diff --git a/cloudbuild_dynamic.yaml b/cloudbuild_dynamic.yaml index 846ee11f..ebb677b2 100644 --- a/cloudbuild_dynamic.yaml +++ b/cloudbuild_dynamic.yaml @@ -41,6 +41,6 @@ steps: images: - gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT} -timeout: 900s +timeout: 1200s options: machineType: 'E2_HIGHCPU_8' diff --git a/docs/index.docs.js b/docs/index.docs.js new file mode 100644 index 00000000..40ded82a --- /dev/null +++ b/docs/index.docs.js @@ -0,0 +1,89 @@ +import auth from './resources/auth.docs'; +import datarequest from './resources/datarequest.docs'; +import publisher from './resources/publisher.docs'; +import person from './resources/person.docs'; +import search from './resources/search.docs'; +import stats from './resources/stats.docs'; +import message from './resources/message.docs'; +import topic from './resources/topic.docs'; +import dataset from './resources/dataset.docs'; +import project from './resources/project.docs'; +import paper from './resources/paper.docs'; +import tool from './resources/tool.docs'; +import course from './resources/course.docs'; +import collection from './resources/collections.docs'; +import activitylog from './resources/activitylog.docs'; + +import collectionsSchema from './schemas/collections.schema'; + +module.exports = { + openapi: '3.0.1', + info: { + title: 'HDR UK API', + description: 'API for Tools and artefacts repository.', + version: '1.0.0', + }, + servers: [ + { + url: 'https://api.www.healthdatagateway.org/', + }, + { + url: 'http://localhost:3001/', + }, + { + url: 'https://api.{environment}.healthdatagateway.org:{port}/', + variables: { + environment: { + default: 'latest', + description: 'The Environment name.', + }, + port: { + enum: ['443'], + default: '443', + }, + }, + }, + ], + security: [ + { + oauth2: [], + }, + ], + paths: { + ...auth, + ...datarequest, + ...publisher, + ...person, + ...search, + ...stats, + ...message, + ...topic, + ...dataset, + ...project, + ...paper, + ...tool, + ...course, + ...collection, + ...activitylog, + }, + components: { + securitySchemes: { + oauth2: { + type: 'oauth2', + flows: { + clientCredentials: { + tokenUrl: 'https://api.www.healthdatagateway.org/oauth/token', + scopes: {}, + }, + }, + }, + cookieAuth: { + type: 'http', + scheme: 'bearer', + }, + }, + schemas: { + Collections: { ...collectionsSchema }, + }, + }, +}; diff --git a/docs/resources/activitylog.docs.js b/docs/resources/activitylog.docs.js new file mode 100644 index 00000000..af427301 --- /dev/null +++ b/docs/resources/activitylog.docs.js @@ -0,0 +1,160 @@ +module.exports = { + '/api/v2/activitylog': { + post: { + summary: 'Search activity logs for a given dataset or data access request', + security: [ + { + cookieAuth: [], + }, + ], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + required: ['versionIds', 'type'], + properties: { + versionIds: { + type: 'array', + }, + type: { + type: 'array', + }, + }, + example: { + versionIds: ['618cd6170d111006c0550fa3', '618cd556f19753063504a492'], + type: 'dataset', + }, + }, + }, + }, + }, + description: + 'Returns a list of activity logs for a given set of versionIds sorted into thier respective versions. Activity logs can either be for datasets or data access requests. The requesting user must be an admin user or a member of the custodian team to which the version IDs relate.', + tags: ['Activity Logs'], + responses: { + 200: { + description: 'Successful response including the JSON payload.', + }, + 401: { + description: 'Unauthorised.', + }, + }, + }, + }, + '/api/v2/activitylog/{type}': { + post: { + summary: 'Create a manual activity log for a data access requesr', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [ + { + in: 'path', + name: 'type', + required: true, + description: 'The type of activity log. Functionality only exists in current API for data access requests.', + schema: { + type: 'string', + example: 'data_request', + }, + }, + ], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + required: ['description', 'timestamp', 'versionId'], + properties: { + description: { + type: 'string', + description: 'The text associated with the manual log.', + }, + timestamp: { + type: 'string', + format: 'date-time', + description: 'Timestamp of when the log was created.', + }, + versionId: { + type: 'string', + description: 'The versionId of the data access request version the activity log relates to.', + }, + }, + example: { description: 'Test', timestamp: '2021-11-11T12:03:49.714Z', versionId: '615b2ba0e33a38453bcf306b' }, + }, + }, + }, + }, + description: + 'Creates a manual activity log for a data access request version. The user must be an admin user or a member of the custodian team to which the log relates.', + tags: ['Activity Logs'], + responses: { + 200: { + description: 'Successful response including the updated JSON payload for the associated data access request version.', + }, + 400: { + description: 'Bad request, including missing information in request body.', + }, + 401: { + description: 'Unauthorised.', + }, + 401: { + description: 'Data access request for submitted version I', + }, + }, + }, + }, + '/api/v2/activitylog/{type}/{id}': { + delete: { + summary: 'Delete a manually created activity log for a data access request', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [ + { + in: 'path', + name: 'type', + required: true, + description: 'The type of activity log. Functionality only exists in current API for data access requests.', + schema: { + type: 'string', + example: 'data_request', + }, + }, + { + in: 'path', + name: 'id', + required: true, + description: 'The id of the manually created activity log.', + schema: { + type: 'string', + }, + }, + ], + description: + 'Deletes a manually created activity log for a data access request version. The user must be a member of the relevant custodian team or an admin user.', + tags: ['Activity Logs'], + responses: { + 200: { + description: 'Successful deletion, including payload for updated version.', + }, + 400: { + description: 'Bad request - only manually created logs can be deleted.', + }, + 401: { + description: 'Unauthorised.', + }, + 404: { + description: 'Log not found for submitted version ID.', + }, + }, + }, + }, +}; diff --git a/docs/resources/auth.docs.js b/docs/resources/auth.docs.js new file mode 100644 index 00000000..b0ce0a67 --- /dev/null +++ b/docs/resources/auth.docs.js @@ -0,0 +1,122 @@ +module.exports = { + '/oauth/token': { + post: { + tags: ['Authorization'], + description: + 'OAuth2.0 token endpoint responsible for issuing short-lived json web tokens (JWT) for access to secure Gateway APIs. For client credentials grant flow, a valid client id and secret must be provided to identify your application and provide the expected permissions. This type of authentication is reserved for team based connectivity through client applications and is not provided for human user access. For more information, contact the HDR-UK team.', + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + grant_type: { + type: 'string', + description: 'The OAuth2.0 grant type that will be used to provide authentication.', + }, + client_id: { + type: 'string', + description: + 'A unique identifer provided to your team by the HDR-UK team at the time of onboarding to the Gateway. Contact the HDR-UK team for issue of new credentials.', + }, + client_secret: { + type: 'string', + description: + 'A long (50 character) string provided by the HDR-UK team at the time of onboarding to the Gateway. Contact the HDR-UK team for issue of new credentials.', + }, + }, + required: ['grant_type', 'client_secret', 'client_id'], + }, + examples: { + 'Client Credentials Grant Flow': { + value: { + grant_type: 'client_credentials', + client_id: '2ca1f61a90e3547', + client_secret: '3f80fecbf781b6da280a8d17aa1a22066fb66daa415d8befc1', + }, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'Successful response containing json web token (JWT) that will authorize an HTTP request against secured resources.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + access_token: { + type: 'string', + description: + 'The encoded json web token (JWT) that must be appended to the Authorization of subsequent API HTTP requests in order to access secured resources.', + }, + token_type: { + type: 'string', + description: 'The type of token issued, in this case, a json web token (JWT).', + }, + expires_in: { + type: 'integer', + description: 'The length of time in seconds before the issued JWT expires, defaulted to 900 seconds (15 minutes).', + }, + }, + }, + examples: { + 'Client Credentials Grant Flow': { + value: { + access_token: + 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjp7Il9pZCI6IjYwMGJmYzk5YzhiZjcwMGYyYzdkNWMzNiIsInRpbWVTdGFtcCI2MTYxMjM4MzkwMzE5Nn0sImlhdCI6MTYxMjM4MzkwMywiZXhwIjoxNjEyMzg0ODAzfQ.-YvUBdjtJvdrRacz6E8-cYPQlum4TrEmiCFl8jO5a-M', + token_type: 'jwt', + expires_in: 900, + }, + }, + }, + }, + }, + }, + 400: { + description: 'Failure response caused by incomplete or invalid client credentials being passed to the endpoint.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + success: { + type: 'boolean', + description: 'A field that indicates the API request failed.', + }, + message: { + type: 'string', + description: 'A message indicating that the request failed for a given reason.', + }, + }, + }, + examples: { + 'Invalid Client Credentials': { + value: { + success: false, + message: 'Invalid client credentials were provided for the authorisation attempt', + }, + }, + 'Incomplete Client Credentials': { + value: { + success: false, + message: 'Incomplete client credentials were provided for the authorisation attempt', + }, + }, + 'Invalid Grant Type': { + value: { + success: false, + message: 'An invalid grant type has been specified', + }, + }, + }, + }, + }, + }, + }, + }, + }, +}; diff --git a/docs/resources/collections.docs.js b/docs/resources/collections.docs.js new file mode 100644 index 00000000..2cff0111 --- /dev/null +++ b/docs/resources/collections.docs.js @@ -0,0 +1,227 @@ +module.exports = { + '/api/v1/collections/getList': { + get: { + summary: 'Returns a list of collections', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [], + description: 'Returns a list of collections', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response containing a list of collections', + }, + 401: { + description: 'Unauthorized', + }, + }, + }, + }, + '/api/v1/collections/{id}': { + get: { + summary: 'Returns a specific collection', + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the collection', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 2181307729084665, + }, + }, + ], + description: 'Returns a single, public collection including its related resource(s)', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response containing a single collection object', + }, + 404: { + description: 'Collection not found for ID: {id}', + }, + }, + }, + }, + '/api/v1/collections/relatedobjects/{id}': { + get: { + summary: 'Returns related resource(s) of a collection', + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the collection', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 5968326934600661, + }, + }, + ], + description: 'Returns an array of the related resource(s) of a given collection', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response containing the related resource(s)', + }, + }, + }, + }, + '/api/v1/collections/entityid/{id}': { + get: { + summary: 'Returns collection array for a given entity', + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the entity', + schema: { + type: 'string', + format: 'uuid', + example: 'c1f4b16c-9dfa-48e5-94ee-f0aa58c270e4', + }, + }, + ], + description: 'Returns an array of the collection(s) in which a given entity (e.g., dataset or paper) can be found', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response containing the collection(s)', + }, + }, + }, + }, + '/api/v1/collections/edit/{id}': { + put: { + summary: 'Edit a collection', + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the collection', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 5968326934600661, + }, + }, + ], + description: + 'Edit a collection by posting the updated collection object. This JSON body is validated server-side for structure and field type', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response detailing whether the update was successful or not', + }, + }, + }, + }, + '/api/v1/collections/status/{id}': { + put: { + summary: 'Change the status of a collection', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + activeflag: { + type: 'string', + enum: ['active', 'archive'], + }, + }, + }, + }, + }, + }, + description: 'Change the status of a collection', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response detailing whether the change of status was successful or not', + }, + }, + }, + }, + '/api/v1/collections/add': { + post: { + summary: 'Add a new collection', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/Collections', + }, + }, + }, + }, + description: + 'Add a collection by posting a new collection object conforming to the schema. This JSON body is validated server-side for structure and field type', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response detailing whether the new collection addition was successful or not', + }, + }, + }, + }, + '/api/v1/collections/delete/{id}': { + delete: { + summary: 'Delete a collection', + security: [ + { + cookieAuth: [], + }, + ], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the collection', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 5968326934600661, + }, + }, + ], + description: 'Delete a collection', + tags: ['Collections'], + responses: { + 200: { + description: 'Successful response detailing whether deleting the collection was a success', + }, + 401: { + description: 'Unauthorized', + }, + }, + }, + }, +}; diff --git a/docs/resources/course.docs.js b/docs/resources/course.docs.js new file mode 100644 index 00000000..a745b132 --- /dev/null +++ b/docs/resources/course.docs.js @@ -0,0 +1,102 @@ +module.exports = { + '/api/v2/courses': { + get: { + summary: 'Returns a list of courses', + parameters: [ + { + name: 'search', + in: 'query', + description: + 'Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added.', + schema: { + type: 'string', + }, + example: 'Research', + }, + { + name: 'page', + in: 'query', + description: 'A specific page of results to retrieve', + schema: { + type: 'number', + }, + example: 1, + }, + { + name: 'limit', + in: 'query', + description: 'Maximum number of results returned per page', + schema: { + type: 'number', + }, + example: 10, + }, + { + name: 'sort', + in: 'query', + description: + 'Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. provider for ascending or -provider for descending. Multiple fields should be comma separated as shown in the example below.', + schema: { + type: 'string', + }, + example: 'provider,-counter', + }, + { + name: 'fields', + in: 'query', + description: + 'Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below.', + schema: { + type: 'string', + }, + example: 'provider,counter,description', + }, + { + name: 'count', + in: 'query', + description: 'Returns the number of the number of entities matching the query parameters provided instead of the result payload', + schema: { + type: 'boolean', + }, + example: true, + }, + ], + description: + "Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. Note - This response is limited to 100 records by default. Please use the 'page' query parameter to access records beyond the first 100. The 'limit' query parameter can therefore only be specified up to a maximum of 100.", + tags: ['Courses v2.0'], + responses: { + 200: { + description: 'Successful response containing a list of course objects matching query parameters', + }, + }, + }, + }, + '/api/v2/courses/{id}': { + summary: 'summary', + get: { + summary: 'Returns a course object', + description: 'Returns a course object by matching unique identifier in the default format that is stored as within the Gateway', + tags: ['Courses v2.0'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the course', + schema: { + type: 'number', + example: 5540794872521069, + }, + }, + ], + responses: { + 200: { + description: 'Successful response containing a single course object', + }, + 404: { + description: 'A course could not be found by the provided course identifier', + }, + }, + }, + }, +}; diff --git a/docs/resources/datarequest.docs.js b/docs/resources/datarequest.docs.js new file mode 100644 index 00000000..f67f1d55 --- /dev/null +++ b/docs/resources/datarequest.docs.js @@ -0,0 +1,882 @@ +module.exports = { + '/api/v1/data-access-request/{id}': { + get: { + tags: ['Data Access Request'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The unique identifier for a single data access request application.', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + description: 'Retrieve a single Data Access Request application using a supplied identifer', + responses: { + 200: { + description: 'Successful response containing a full data access request application.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + data: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the application.', + }, + aboutApplication: { + description: + "An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not.", + type: 'object', + properties: { + isNationalCoreStudies: { + type: 'boolean', + description: 'A flag to indicate if this application is in relation to a National Core Studies Project.', + }, + nationalCoreStudiesProjectId: { + type: 'integer', + description: + 'The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project.', + }, + projectName: { + type: 'string', + description: 'The project name that has been assigned to the application by the applicant(s).', + }, + }, + }, + authorIds: { + type: 'array', + items: { + type: 'integer', + }, + description: + "An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit.", + }, + datasetIds: { + type: 'array', + items: { + type: 'string', + }, + description: + 'An array of values correlating to datasets selected for the application via their identifier, which is unique per version.', + }, + datasetTitles: { + type: 'array', + items: { + type: 'string', + }, + description: 'An array of strings correlating to the dataset titles that have been selected for the application.', + }, + applicationStatus: { + type: 'string', + enum: ['inProgress', 'submitted', 'inReview', 'approved', 'rejected', 'approved with conditions'], + description: 'The current status of the application.', + }, + jsonSchema: { + type: 'object', + description: + 'The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear.', + }, + questionAnswers: { + type: 'object', + description: + 'The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array.', + }, + publisher: { + type: 'string', + description: 'The name of the Custodian that holds the dataset and is processing the application.', + }, + publisherObj: { + type: 'object', + description: 'The object containing details regarding the Custodian/publisher relating to the application.', + }, + userId: { + type: 'integer', + description: + 'The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application.', + }, + schemaId: { + type: 'string', + description: 'The unique identifier that correlates to the schema from which the application form was generated.', + }, + files: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem.', + }, + amendmentIterations: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment.', + }, + createdAt: { + type: 'string', + description: 'The date and time that the application was started.', + }, + updatedAt: { + type: 'string', + description: 'The date and time that the application was last updated by any party.', + }, + projectId: { + type: 'string', + description: + 'The unique identifier for the application converted to a more human friendly format in uppercase and hypenated.', + }, + dateSubmitted: { + type: 'string', + description: + 'The date and time that the application was originally submitted by the applicant(s) to the Custodian for review.', + }, + dateReviewStart: { + type: 'string', + description: + 'The date and time that the review process was commenced by a Custodian manager. The review starts from the moment the manager opens the application to triage it.', + }, + dateFinalStatus: { + type: 'string', + description: + 'The date and time that the Custodian triggered a status change to the application once a final decision was made. E.g. when application was approved. This date can be used in conjunction with the dateReviewStart date to calculate the length of time the Custodian took to make a decision through their review process.', + }, + datasets: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the full metadata for each of the datasets that have been applied for through this application.', + }, + mainApplicant: { + type: 'object', + description: + 'An object containing the details of the main applicant of the application as referenced by the userId field.', + }, + authors: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the details of the contributors of the application as referenced by the authorIds field.', + }, + readOnly: { + type: 'boolean', + description: + 'A value to indicate if the requesting party is able to modify the application in its present state. For example, this will be false for a Custodian, but true for applicants if the applicant(s) are working on resubmitting the application following a request for amendments.', + }, + unansweredAmendments: { + type: 'integer', + description: + 'The number of amendments that have been requested by the Custodian in the current amendment iteration.', + }, + answeredAmendments: { + type: 'integer', + description: + 'The number of requested amendments that the applicant(s) have fixed in the current amendment iteration.', + }, + userType: { + type: 'string', + enum: ['custodian', 'applicant'], + description: + 'The type of user that has requested the Data Access Request application based on their permissions. It is either an applicant or a Custodian user.', + }, + activeParty: { + type: 'string', + enum: ['custodian', 'applicant'], + description: + 'The party that is currently handling the application. This is the applicant during presubmission, then the Custodian following submission. The active party then fluctuates between parties during amendment iterations.', + }, + inReviewMode: { + type: 'boolean', + description: + 'A flag to indicate if the current user is a reviewer of the application. This value will be false unless the requesting user is an assigned reviewer to a currently active workflow step. When this value is true, the requesting user is able to recommend approval or rejection of the application.', + }, + reviewSections: { + type: 'array', + items: { + type: 'string', + }, + description: + "An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data']", + }, + hasRecommended: { + type: 'boolean', + description: + 'A flag to indicate if the current user as a reviewer of the current workflow phase has submitted their recommendation for approval or rejection based on their review of the review sections assigned to them.', + }, + workflow: { + type: 'object', + description: + 'The full details of the workflow that has been assigned to the Data Access Request application. This includes information such as the review phases that the application will pass through and associated metadata.', + }, + }, + }, + }, + }, + examples: { + 'Approved Application': { + value: { + status: 'success', + data: { + aboutApplication: { + selectedDatasets: [ + { + _id: '5fc31a18d98e4f4cff7e9315', + datasetId: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + name: 'HDR UK Papers & Preprints', + description: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint', + abstract: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + contactPoint: 'hdr.hdr@hdruk.ac.uk', + publisherObj: { + dataRequestModalContent: { + header: ' ', + body: '{omitted for brevity...}', + footer: '', + }, + active: true, + allowsMessaging: true, + workflowEnabled: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + name: 'OTHER > HEALTH DATA RESEARCH UK', + imageURL: '', + team: { + active: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + members: [ + { + roles: ['manager'], + memberid: '5f1a98861a821b4a53e44d15', + }, + { + roles: ['manager'], + memberid: '600bfc99c8bf700f2c7d5c36', + }, + ], + type: 'publisher', + __v: 3, + createdAt: '2020-11-30T21:12:40.855Z', + updatedAt: '2020-12-02T13:33:45.232Z', + }, + }, + }, + ], + isNationalCoreStudies: true, + nationalCoreStudiesProjectId: '4324836585275824', + projectName: 'Test application title', + completedDatasetSelection: true, + completedInviteCollaborators: true, + completedReadAdvice: true, + completedCommunicateAdvice: true, + completedApprovalsAdvice: true, + completedSubmitAdvice: true, + }, + authorIds: [], + datasetIds: ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], + datasetTitles: [], + applicationStatus: 'approved', + jsonSchema: '{omitted for brevity...}', + questionAnswers: { + 'fullname-892140ec730145dc5a28b8fe139c2876': 'James Smith', + 'jobtitle-ff1d692a04b4bb9a2babe9093339136f': 'Consultant', + 'organisation-65c06905b8319ffa29919732a197d581': 'Consulting Inc.', + }, + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + _id: '60142c5b4316a0e0fcd47c56', + version: 1, + userId: 9190228196797084, + schemaId: '5f55e87e780ba204b0a98eb8', + files: [], + amendmentIterations: [], + createdAt: '2021-01-29T15:40:11.943Z', + updatedAt: '2021-02-03T14:38:22.688Z', + __v: 0, + projectId: '6014-2C5B-4316-A0E0-FCD4-7C56', + dateSubmitted: '2021-01-29T16:30:27.351Z', + dateReviewStart: '2021-02-03T14:36:22.341Z', + dateFinalStatus: '2021-02-03T14:38:22.680Z', + datasets: ['{omitted for brevity...}'], + dataset: null, + mainApplicant: { + _id: '5f1a98861a821b4a53e44d15', + firstname: 'James', + lastname: 'Smith', + }, + authors: [], + id: '60142c5b4316a0e0fcd47c56', + readOnly: true, + unansweredAmendments: 0, + answeredAmendments: 0, + userType: 'custodian', + activeParty: 'custodian', + inReviewMode: false, + reviewSections: [], + hasRecommended: false, + workflow: {}, + }, + }, + }, + }, + }, + }, + }, + 401: { + description: 'Unauthorised attempt to access an application.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + message: { + type: 'string', + }, + }, + }, + examples: { + Unauthorised: { + value: { + status: 'failure', + message: 'Unauthorised', + }, + }, + }, + }, + }, + }, + 404: { + description: 'Failed to find the application requested.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + message: { + type: 'string', + }, + }, + }, + examples: { + 'Not Found': { + value: { + status: 'error', + message: 'Application not found.', + }, + }, + }, + }, + }, + }, + }, + }, + put: { + tags: ['Data Access Request'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The unique identifier for a single Data Access Request application.', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + description: 'Update a single Data Access Request application.', + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + applicationStatus: { + type: 'string', + }, + applicationStatusDesc: { + type: 'string', + }, + }, + }, + examples: { + 'Update Application Status': { + value: { + applicationStatus: 'approved', + applicationStatusDesc: 'This application meets all the requirements.', + }, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'Successful response containing the full, updated data access request application.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + data: { + type: 'object', + properties: { + id: { + type: 'string', + description: 'The unique identifier for the application.', + }, + aboutApplication: { + description: + "An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not.", + type: 'object', + properties: { + isNationalCoreStudies: { + type: 'boolean', + description: 'A flag to indicate if this application is in relation to a National Core Studies Project.', + }, + nationalCoreStudiesProjectId: { + type: 'integer', + description: + 'The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project.', + }, + projectName: { + type: 'string', + description: 'The project name that has been assigned to the application by the applicant(s).', + }, + }, + }, + authorIds: { + type: 'array', + items: { + type: 'integer', + }, + description: + "An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit.", + }, + datasetIds: { + type: 'array', + items: { + type: 'string', + }, + description: + 'An array of values correlating to datasets selected for the application via their identifier, which is unique per version.', + }, + datasetTitles: { + type: 'array', + items: { + type: 'string', + }, + description: 'An array of strings correlating to the dataset titles that have been selected for the application.', + }, + applicationStatus: { + type: 'string', + enum: ['inProgress', 'submitted', 'inReview', 'approved', 'rejected', 'approved with conditions'], + description: 'The current status of the application.', + }, + jsonSchema: { + type: 'object', + description: + 'The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear.', + }, + questionAnswers: { + type: 'object', + description: + 'The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array.', + }, + publisher: { + type: 'string', + description: 'The name of the Custodian that holds the dataset and is processing the application.', + }, + publisherObj: { + type: 'object', + description: 'The object containing details regarding the Custodian/publisher relating to the application.', + }, + userId: { + type: 'integer', + description: + 'The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application.', + }, + schemaId: { + type: 'string', + description: 'The unique identifier that correlates to the schema from which the application form was generated.', + }, + files: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem.', + }, + amendmentIterations: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment.', + }, + createdAt: { + type: 'string', + description: 'The date and time that the application was started.', + }, + updatedAt: { + type: 'string', + description: 'The date and time that the application was last updated by any party.', + }, + projectId: { + type: 'string', + description: + 'The unique identifier for the application converted to a more human friendly format in uppercase and hypenated.', + }, + dateSubmitted: { + type: 'string', + description: + 'The date and time that the application was originally submitted by the applicant(s) to the Custodian for review.', + }, + dateReviewStart: { + type: 'string', + description: + 'The date and time that the review process was commenced by a Custodian manager. The review starts from the moment the manager opens the application to triage it.', + }, + dateFinalStatus: { + type: 'string', + description: + 'The date and time that the Custodian triggered a status change to the application once a final decision was made. E.g. when application was approved. This date can be used in conjunction with the dateReviewStart date to calculate the length of time the Custodian took to make a decision through their review process.', + }, + datasets: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the full metadata for each of the datasets that have been applied for through this application.', + }, + mainApplicant: { + type: 'object', + description: + 'An object containing the details of the main applicant of the application as referenced by the userId field.', + }, + authors: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the details of the contributors of the application as referenced by the authorIds field.', + }, + readOnly: { + type: 'boolean', + description: + 'A value to indicate if the requesting party is able to modify the application in its present state. For example, this will be false for a Custodian, but true for applicants if the applicant(s) are working on resubmitting the application following a request for amendments.', + }, + unansweredAmendments: { + type: 'integer', + description: + 'The number of amendments that have been requested by the Custodian in the current amendment iteration.', + }, + answeredAmendments: { + type: 'integer', + description: + 'The number of requested amendments that the applicant(s) have fixed in the current amendment iteration.', + }, + userType: { + type: 'string', + enum: ['custodian', 'applicant'], + description: + 'The type of user that has requested the Data Access Request application based on their permissions. It is either an applicant or a Custodian user.', + }, + activeParty: { + type: 'string', + enum: ['custodian', 'applicant'], + description: + 'The party that is currently handling the application. This is the applicant during presubmission, then the Custodian following submission. The active party then fluctuates between parties during amendment iterations.', + }, + inReviewMode: { + type: 'boolean', + description: + 'A flag to indicate if the current user is a reviewer of the application. This value will be false unless the requesting user is an assigned reviewer to a currently active workflow step. When this value is true, the requesting user is able to recommend approval or rejection of the application.', + }, + reviewSections: { + type: 'array', + items: { + type: 'string', + }, + description: + "An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data']", + }, + hasRecommended: { + type: 'boolean', + description: + 'A flag to indicate if the current user as a reviewer of the current workflow phase has submitted their recommendation for approval or rejection based on their review of the review sections assigned to them.', + }, + workflow: { + type: 'object', + description: + 'The full details of the workflow that has been assigned to the Data Access Request application. This includes information such as the review phases that the application will pass through and associated metadata.', + }, + }, + }, + }, + }, + examples: { + 'Approved Application': { + value: { + status: 'success', + data: { + aboutApplication: { + selectedDatasets: [ + { + _id: '5fc31a18d98e4f4cff7e9315', + datasetId: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + name: 'HDR UK Papers & Preprints', + description: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint', + abstract: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + contactPoint: 'hdr.hdr@hdruk.ac.uk', + publisherObj: { + dataRequestModalContent: { + header: ' ', + body: '{omitted for brevity...}', + footer: '', + }, + active: true, + allowsMessaging: true, + workflowEnabled: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + name: 'OTHER > HEALTH DATA RESEARCH UK', + imageURL: '', + team: { + active: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + members: [ + { + roles: ['manager'], + memberid: '5f1a98861a821b4a53e44d15', + }, + { + roles: ['manager'], + memberid: '600bfc99c8bf700f2c7d5c36', + }, + ], + type: 'publisher', + __v: 3, + createdAt: '2020-11-30T21:12:40.855Z', + updatedAt: '2020-12-02T13:33:45.232Z', + }, + }, + }, + ], + isNationalCoreStudies: true, + nationalCoreStudiesProjectId: '4324836585275824', + projectName: 'Test application title', + completedDatasetSelection: true, + completedInviteCollaborators: true, + completedReadAdvice: true, + completedCommunicateAdvice: true, + completedApprovalsAdvice: true, + completedSubmitAdvice: true, + }, + authorIds: [], + datasetIds: ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], + datasetTitles: [], + applicationStatus: 'approved', + jsonSchema: '{omitted for brevity...}', + questionAnswers: { + 'fullname-892140ec730145dc5a28b8fe139c2876': 'James Smith', + 'jobtitle-ff1d692a04b4bb9a2babe9093339136f': 'Consultant', + 'organisation-65c06905b8319ffa29919732a197d581': 'Consulting Inc.', + }, + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + _id: '60142c5b4316a0e0fcd47c56', + version: 1, + userId: 9190228196797084, + schemaId: '5f55e87e780ba204b0a98eb8', + files: [], + amendmentIterations: [], + createdAt: '2021-01-29T15:40:11.943Z', + updatedAt: '2021-02-03T14:38:22.688Z', + __v: 0, + projectId: '6014-2C5B-4316-A0E0-FCD4-7C56', + dateSubmitted: '2021-01-29T16:30:27.351Z', + dateReviewStart: '2021-02-03T14:36:22.341Z', + dateFinalStatus: '2021-02-03T14:38:22.680Z', + datasets: ['{omitted for brevity...}'], + dataset: null, + mainApplicant: { + _id: '5f1a98861a821b4a53e44d15', + firstname: 'James', + lastname: 'Smith', + }, + authors: [], + id: '60142c5b4316a0e0fcd47c56', + readOnly: true, + unansweredAmendments: 0, + answeredAmendments: 0, + userType: 'custodian', + activeParty: 'custodian', + inReviewMode: false, + reviewSections: [], + hasRecommended: false, + workflow: {}, + }, + }, + }, + }, + }, + }, + }, + 401: { + description: 'Unauthorised attempt to update an application.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + message: { + type: 'string', + }, + }, + }, + examples: { + Unauthorised: { + value: { + status: 'error', + message: 'Unauthorised to perform this update.', + }, + }, + }, + }, + }, + }, + 404: { + description: 'Failed to find the application requested.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + message: { + type: 'string', + }, + }, + }, + examples: { + 'Not Found': { + value: { + status: 'error', + message: 'Application not found.', + }, + }, + }, + }, + }, + }, + }, + }, + patch: { + summary: 'Update a users question answers for access request.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Data Access Request'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the datset', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + questionAnswers: { + type: 'object', + }, + }, + }, + examples: { + 0: { + value: '{\n "firstName": "Roger"\n}', + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/data-access-request/{datasetID}': { + get: { + summary: 'Returns access request template.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Data Access Request'], + parameters: [ + { + in: 'path', + name: 'datasetID', + required: true, + description: 'The ID of the datset', + schema: { + type: 'string', + example: '6efbc62f-6ebb-4f18-959b-1ec6fd0cc6fb', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, +}; diff --git a/docs/resources/dataset.docs.js b/docs/resources/dataset.docs.js new file mode 100644 index 00000000..a2efbdee --- /dev/null +++ b/docs/resources/dataset.docs.js @@ -0,0 +1,255 @@ +module.exports = { + '/api/v1/datasets/{datasetID}': { + get: { + summary: 'Returns Dataset object.', + tags: ['Datasets'], + parameters: [ + { + in: 'path', + name: 'datasetID', + required: true, + description: 'The ID of the datset', + schema: { + type: 'string', + example: '756daeaa-6e47-4269-9df5-477c01cdd271', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/datasets': { + get: { + summary: 'Returns List of Dataset objects.', + tags: ['Datasets'], + parameters: [ + { + in: 'query', + name: 'limit', + required: false, + description: 'Limit the number of results', + schema: { + type: 'integer', + example: 3, + }, + }, + { + in: 'query', + name: 'offset', + required: false, + description: 'Index to offset the search results', + schema: { + type: 'integer', + example: 1, + }, + }, + { + in: 'query', + name: 'q', + required: false, + description: 'Filter using search query', + schema: { + type: 'string', + example: 'epilepsy', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v2/datasets': { + get: { + summary: 'Returns a list of dataset objects', + tags: ['Datasets v2.0'], + description: + "Version 2.0 of the datasets API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. Note - This response is limited to 100 records by default. Please use the 'page' query parameter to access records beyond the first 100. The 'limit' query parameter can therefore only be specified up to a maximum of 100.", + parameters: [ + { + name: 'search', + in: 'query', + description: + 'Full text index search function which searches for partial matches in various dataset fields including name, description and abstract. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added.', + schema: { + type: 'string', + }, + example: 'COVID-19', + }, + { + name: 'page', + in: 'query', + description: 'A specific page of results to retrieve', + schema: { + type: 'number', + }, + example: 1, + }, + { + name: 'limit', + in: 'query', + description: 'Maximum number of results returned per page', + schema: { + type: 'number', + }, + example: 10, + }, + { + name: 'sort', + in: 'query', + description: + 'Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below.', + schema: { + type: 'string', + }, + example: 'datasetfields.publisher,name,-counter', + }, + { + name: 'fields', + in: 'query', + description: + 'Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,counter,datasetid', + }, + { + name: 'count', + in: 'query', + description: 'Returns the number of the number of entities matching the query parameters provided instead of the result payload', + schema: { + type: 'boolean', + }, + example: true, + }, + { + name: 'datasetid', + in: 'query', + description: 'Filter by the unique identifier for a single version of a dataset', + schema: { + type: 'string', + }, + example: '0cfe60cd-038d-4c03-9a95-894c52135922', + }, + { + name: 'pid', + in: 'query', + description: 'Filter by the identifier for a dataset that persists across versions', + schema: { + type: 'string', + }, + example: '621dd611-adcf-4434-b538-eecdbe5f72cf', + }, + { + name: 'name', + in: 'query', + description: 'Filter by dataset name', + schema: { + type: 'string', + }, + example: 'ARIA Dataset', + }, + { + name: 'activeflag', + in: 'query', + description: 'Filter by the status of a single dataset version', + schema: { + type: 'string', + enum: ['active', 'archive'], + }, + example: 'active', + }, + { + name: 'datasetfields.publisher', + in: 'query', + description: 'Filter by the name of the Custodian holding the dataset', + schema: { + type: 'string', + }, + example: 'ALLIANCE > BARTS HEALTH NHS TRUST', + }, + { + name: 'metadataquality.completeness_percent[gte]', + in: 'query', + description: + 'Filter by the metadata quality completeness percentage using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to.', + schema: { + type: 'number', + }, + example: 90.5, + }, + { + name: 'metadataquality.weighted_completeness_percent[gte]', + in: 'query', + description: + 'Filter by the metadata quality weighted completeness percentage using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to.', + schema: { + type: 'number', + }, + example: 71.2, + }, + { + name: 'metadataquality.weighted_quality_score[gte]', + in: 'query', + description: + 'Filter by the metadata quality score using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to.', + schema: { + type: 'number', + }, + example: 35.3, + }, + ], + responses: { + 200: { + description: 'Successful response containing a list of datasets matching query parameters', + }, + }, + }, + }, + '/api/v2/datasets/{datasetid}': { + get: { + summary: 'Returns a dataset object.', + tags: ['Datasets v2.0'], + parameters: [ + { + in: 'path', + name: 'datasetid', + required: true, + description: 'The unqiue identifier for a specific version of a dataset', + schema: { + type: 'string', + example: 'af20ebb2-018a-4557-8ced-0bec75dba150', + }, + }, + { + in: 'query', + name: 'raw', + required: false, + description: + 'A flag which determines if the response triggered is the raw structure in which the data is stored rather than the dataset v2.0 standard', + schema: { + type: 'boolean', + example: false, + }, + }, + ], + description: + 'Version 2.0 of the datasets API introduces the agreed dataset v2.0 schema as defined at the following link - https://github.com/HDRUK/schemata/edit/master/schema/dataset/2.0.0/dataset.schema.json', + responses: { + 200: { + description: 'Successful response containing a single dataset object', + }, + 404: { + description: 'A dataset could not be found by the provided dataset identifier', + }, + }, + }, + }, +}; diff --git a/docs/resources/message.docs.js b/docs/resources/message.docs.js new file mode 100644 index 00000000..d0f63212 --- /dev/null +++ b/docs/resources/message.docs.js @@ -0,0 +1,134 @@ +module.exports = { + '/api/v1/messages/{id}': { + delete: { + summary: 'Delete a Message', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Messages'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the Message', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + responses: { + 204: { + description: 'Ok', + }, + }, + }, + put: { + summary: 'Update a single Message', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Messages'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the Message', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + isRead: { + type: 'boolean', + }, + }, + }, + examples: { + 'Update message to read': { + value: '{\n "isRead": true\n}', + }, + }, + }, + }, + }, + responses: { + 204: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/messages/unread/count': { + get: { + summary: 'Returns the number of unread messages for the authenticated user', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Messages'], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/messages': { + post: { + summary: 'Returns a new Message object and creates an associated parent Topic if a Topic is not specified in request body', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Messages'], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + isRead: { + type: 'boolean', + }, + messageDescription: { + type: 'string', + }, + messageType: { + type: 'string', + }, + }, + required: ['isRead', 'messageDescription', 'messageType'], + }, + examples: { + 'Create new message': { + value: '{\n "isRead": false,\n "messageDescription": "this is an example",\n "messageType": "message"\n}', + }, + }, + }, + }, + }, + responses: { + 201: { + description: 'OK', + }, + }, + }, + }, +}; diff --git a/docs/resources/paper.docs.js b/docs/resources/paper.docs.js new file mode 100644 index 00000000..8134739d --- /dev/null +++ b/docs/resources/paper.docs.js @@ -0,0 +1,435 @@ +module.exports = { + '/api/v1/papers': { + post: { + summary: 'Returns a Paper object with ID.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Papers'], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + }, + example: { + type: 'paper', + name: 'Epilepsy data research', + link: 'http://epilepsy.org', + description: 'Epilespy data research description', + categories: { + category: 'API', + programmingLanguage: ['Javascript'], + programmingLanguageVersion: '0.0.0', + }, + licence: 'MIT licence', + authors: [4495285946631793], + tags: { + features: ['Arbitrage'], + topics: ['Epilepsy'], + }, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + get: { + summary: 'Return List of Paper objects.', + tags: ['Papers'], + parameters: [ + { + in: 'query', + name: 'limit', + required: false, + description: 'Limit the number of results', + schema: { + type: 'integer', + example: 3, + }, + }, + { + in: 'query', + name: 'offset', + required: false, + description: 'Index to offset the search results', + schema: { + type: 'integer', + example: 1, + }, + }, + { + in: 'query', + name: 'q', + required: false, + description: 'Filter using search query', + schema: { + type: 'string', + example: 'epilepsy', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/papers/{id}': { + get: { + summary: 'Returns Paper object.', + tags: ['Papers'], + parameters: [ + { + name: 'id', + in: 'path', + required: true, + description: 'The ID of the user', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 8370396016757367, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + patch: { + summary: 'Change status of the Paper object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Papers'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + schema: { + type: 'integer', + example: 7485531672584456, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + id: { + type: 'number', + }, + activeflag: { + type: 'string', + }, + }, + example: { + activeflag: 'active', + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + put: { + summary: 'Returns edited Paper object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Papers'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the paper', + schema: { + type: 'integer', + format: 'int64', + example: 7485531672584456, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + id: { + type: 'number', + }, + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + toolids: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + example: { + id: 7485531672584456, + type: 'paper', + name: 'Test Paper Title 2', + link: 'http://localhost:8080/epilepsy', + description: 'Test abstract 2', + categories: { + category: 'API', + programmingLanguage: ['Javascript'], + programmingLanguageVersion: '1.0.0', + }, + licence: 'MIT licence', + authors: [4495285946631793], + tags: { + features: ['Arbitrage'], + topics: ['Epilepsy'], + }, + toolids: [], + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v2/papers': { + get: { + summary: 'Returns a list of paper objects', + tags: ['Papers v2.0'], + parameters: [ + { + name: 'search', + in: 'query', + description: + 'Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added.', + schema: { + type: 'string', + }, + example: 'Exploration', + }, + { + name: 'page', + in: 'query', + description: 'A specific page of results to retrieve', + schema: { + type: 'number', + }, + example: 1, + }, + { + name: 'limit', + in: 'query', + description: 'Maximum number of results returned per page', + schema: { + type: 'number', + }, + example: 10, + }, + { + name: 'sort', + in: 'query', + description: + 'Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,-counter', + }, + { + name: 'fields', + in: 'query', + description: + 'Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,counter,description', + }, + { + name: 'count', + in: 'query', + description: 'Returns the number of the number of entities matching the query parameters provided instead of the result payload', + schema: { + type: 'boolean', + }, + example: true, + }, + ], + description: + "Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. Note - This response is limited to 100 records by default. Please use the 'page' query parameter to access records beyond the first 100. The 'limit' query parameter can therefore only be specified up to a maximum of 100.", + responses: { + 200: { + description: 'Successful response containing a list of papers matching query parameters', + }, + }, + }, + }, + '/api/v2/papers/{id}': { + get: { + summary: 'Returns paper object', + tags: ['Papers v2.0'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the paper', + schema: { + type: 'number', + example: 13296138992670704, + }, + }, + ], + description: 'Returns a paper object by matching unique identifier in the default format that is stored as within the Gateway', + responses: { + 200: { + description: 'Successful response containing a single paper object', + }, + 404: { + description: 'A paper could not be found by the provided paper identifier', + }, + }, + }, + }, +}; diff --git a/docs/resources/person.docs.js b/docs/resources/person.docs.js new file mode 100644 index 00000000..4df4cf29 --- /dev/null +++ b/docs/resources/person.docs.js @@ -0,0 +1,134 @@ +module.exports = { + '/api/v1/person/{id}': { + get: { + summary: 'Returns details for a person.', + tags: ['Person'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the person', + schema: { + type: 'string', + example: 900000014, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/person': { + get: { + summary: 'Returns an array of person objects.', + tags: ['Person'], + responses: { + 200: { + description: 'OK', + }, + }, + }, + post: { + summary: 'Returns a new person object.', + tags: ['Person'], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['firstname', 'lastname', 'bio', 'link', 'orcid', 'emailNotifications', 'terms'], + properties: { + firstname: { + type: 'string', + }, + lastname: { + type: 'string', + }, + bio: { + type: 'string', + }, + link: { + type: 'string', + }, + orcid: { + type: 'string', + }, + emailNotifications: { + type: 'boolean', + }, + terms: { + type: 'boolean', + }, + }, + example: { + firstname: 'John', + lastname: 'Smith', + bio: 'Researcher', + link: 'http://google.com', + orcid: 'https://orcid.org/123456789', + emailNotifications: false, + terms: true, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + put: { + summary: 'Returns edited person object.', + tags: ['Person'], + responses: { + 200: { + description: 'OK', + }, + }, + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['id', 'bio', 'link', 'orcid', 'emailNotifications', 'terms'], + properties: { + id: { + type: 'string', + }, + bio: { + type: 'string', + }, + link: { + type: 'string', + }, + orcid: { + type: 'string', + }, + emailNotifications: { + type: 'boolean', + }, + terms: { + type: 'boolean', + }, + }, + example: { + id: '5268590523943617', + bio: 'Research assistant', + link: 'http://google.com', + orcid: 'https://orcid.org/123456789', + emailNotifications: false, + terms: true, + }, + }, + }, + }, + }, + }, + }, +}; diff --git a/docs/resources/project.docs.js b/docs/resources/project.docs.js new file mode 100644 index 00000000..abc37dd3 --- /dev/null +++ b/docs/resources/project.docs.js @@ -0,0 +1,429 @@ +module.exports = { + '/api/v1/projects': { + post: { + summary: 'Returns a Project object with ID.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Projects'], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + }, + example: { + type: 'project', + name: 'Epilepsy data research', + link: 'http://epilepsy.org', + description: 'Epilespy data research description', + categories: { + category: 'API', + programmingLanguage: ['Javascript'], + programmingLanguageVersion: '0.0.0', + }, + licence: 'MIT licence', + authors: [4495285946631793], + tags: { + features: ['Arbitrage'], + topics: ['Epilepsy'], + }, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + get: { + summary: 'Returns List of Project objects.', + tags: ['Projects'], + parameters: [ + { + in: 'query', + name: 'limit', + required: false, + description: 'Limit the number of results', + schema: { + type: 'integer', + example: 3, + }, + }, + { + in: 'query', + name: 'offset', + required: false, + description: 'Index to offset the search results', + schema: { + type: 'integer', + example: 1, + }, + }, + { + in: 'query', + name: 'q', + required: false, + description: 'Filter using search query', + schema: { + type: 'string', + example: 'epilepsy', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/projects/{id}': { + get: { + summary: 'Returns Project object.', + tags: ['Projects'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + schema: { + type: 'integer', + example: 441788967946948, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + patch: { + summary: 'Change status of the Project object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Projects'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + schema: { + type: 'integer', + example: 662346984100503, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + activeflag: { + type: 'string', + }, + }, + example: { + activeflag: 'active', + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + put: { + summary: 'Returns edited Project object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Projects'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the project', + schema: { + type: 'integer', + format: 'int64', + example: 26542005388306332, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + id: { + type: 'number', + }, + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + toolids: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + example: { + id: 26542005388306332, + type: 'project', + name: 'Research Data TEST EPILEPSY', + link: 'http://localhost:8080/epilepsy', + description: 'Epilespy data research description', + categories: { + category: 'API', + programmingLanguage: ['Javascript'], + programmingLanguageVersion: '1.0.0', + }, + licence: 'MIT licence', + authors: [4495285946631793], + tags: { + features: ['Arbitrage'], + topics: ['Epilepsy'], + }, + toolids: [], + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v2/projects': { + get: { + summary: 'Returns a list of project objects', + tags: ['Projects v2.0'], + parameters: [ + { + name: 'search', + in: 'query', + description: + 'Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added.', + schema: { + type: 'string', + }, + example: 'health service', + }, + { + name: 'page', + in: 'query', + description: 'A specific page of results to retrieve', + schema: { + type: 'number', + }, + example: 1, + }, + { + name: 'limit', + in: 'query', + description: 'Maximum number of results returned per page', + schema: { + type: 'number', + }, + example: 10, + }, + { + name: 'sort', + in: 'query', + description: + 'Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,-counter', + }, + { + name: 'fields', + in: 'query', + description: + 'Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,counter,description', + }, + { + name: 'count', + in: 'query', + description: 'Returns the number of the number of entities matching the query parameters provided instead of the result payload', + schema: { + type: 'boolean', + }, + example: true, + }, + ], + description: + "Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. Note - This response is limited to 100 records by default. Please use the 'page' query parameter to access records beyond the first 100. The 'limit' query parameter can therefore only be specified up to a maximum of 100.", + responses: { + 200: { + description: 'Successful response containing a list of projects matching query parameters', + }, + }, + }, + }, + '/api/v2/projects/{id}': { + get: { + summary: 'Returns a project object', + tags: ['Projects v2.0'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the project', + schema: { + type: 'number', + example: 100000001, + }, + }, + ], + description: 'Returns a project object by matching unique identifier in the default format that is stored as within the Gateway', + responses: { + 200: { + description: 'Successful response containing a single project object', + }, + 404: { + description: 'A project could not be found by the provided project identifier', + }, + }, + }, + }, +}; diff --git a/docs/resources/publisher.docs.js b/docs/resources/publisher.docs.js new file mode 100644 index 00000000..e15fb371 --- /dev/null +++ b/docs/resources/publisher.docs.js @@ -0,0 +1,717 @@ +module.exports = { + '/api/v1/publishers/{publisher}/dataaccessrequests': { + get: { + tags: ['Publishers'], + parameters: [ + { + in: 'path', + name: 'publisher', + required: true, + description: 'The full name of the Custodian/Publisher, as registered on the Gateway.', + schema: { + type: 'string', + example: 'OTHER > HEALTH DATA RESEARCH UK', + }, + }, + ], + description: 'Returns a collection of all Data Access Requests that have been submitted to the Custodian team for review.', + responses: { + 200: { + description: 'Successful response containing a collection of Data Access Request applications.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + avgDecisionTime: { + type: 'string', + description: 'The average number of days the Custodian has taken to process applications from submission to decision.', + }, + canViewSubmitted: { + type: 'boolean', + description: + 'A flag to indicate if the requesting user has permissions to view submitted applications, which are visible only to managers of the Custodian team. Using OAuth2.0 client credentials will return this value as true.', + }, + status: { + type: 'string', + }, + data: { + type: 'array', + items: { + type: 'object', + properties: { + aboutApplication: { + description: + "An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not.", + type: 'object', + properties: { + isNationalCoreStudies: { + type: 'boolean', + description: 'A flag to indicate if this application is in relation to a National Core Studies Project.', + }, + nationalCoreStudiesProjectId: { + type: 'integer', + description: + 'The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project.', + }, + projectName: { + type: 'string', + description: 'The project name that has been assigned to the application by the applicant(s).', + }, + }, + }, + amendmentIterations: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment.', + }, + amendmentStatus: { + type: 'string', + description: + 'A textual indicator of what state the application is in relating to updates made by the Custodian e.g. if it is awaiting updates from the applicant or if new updates have been submitted by the applicant(s).', + }, + applicants: { + type: 'string', + description: 'Concatenated list of applicants names who are contributing to the application.', + }, + applicationStatus: { + type: 'string', + enum: ['inProgress', 'submitted', 'inReview', 'approved', 'rejected', 'approved with conditions'], + description: 'The current status of the application.', + }, + authorIds: { + type: 'array', + items: { + type: 'integer', + description: + "An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit.", + }, + }, + createdAt: { + type: 'string', + description: 'The date and time that the application was started.', + }, + datasetIds: { + type: 'array', + items: { + type: 'string', + }, + description: + 'An array of values correlating to datasets selected for the application via their identifier, which is unique per version.', + }, + datasetTitles: { + type: 'array', + items: { + type: 'string', + }, + description: 'An array of strings correlating to the dataset titles that have been selected for the application.', + }, + datasets: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the full metadata for each of the datasets that have been applied for through this application.', + }, + dateSubmitted: { + type: 'string', + description: + 'The date and time that the application was originally submitted by the applicant(s) to the Custodian for review.', + }, + files: { + type: 'array', + items: { + type: 'object', + }, + description: + 'An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem.', + }, + id: { + type: 'string', + description: 'The unique identifier for the application.', + }, + jsonSchema: { + type: 'object', + description: + 'The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear.', + }, + questionAnswers: { + type: 'object', + description: + 'The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array.', + }, + mainApplicant: { + type: 'object', + description: + 'An object containing the details of the main applicant of the application as referenced by the userId field.', + }, + projectId: { + type: 'string', + description: + 'The unique identifier for the application converted to a more human friendly format in uppercase and hypenated.', + }, + projectName: { + type: 'string', + description: 'The project name that has been assigned to the application by the applicant(s).', + }, + publisher: { + type: 'string', + description: 'The name of the Custodian that holds the dataset and is processing the application.', + }, + publisherObj: { + type: 'object', + description: 'The object containing details regarding the Custodian/publisher relating to the application.', + }, + reviewPanels: { + type: 'array', + items: { + type: 'string', + }, + description: + "An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data']", + }, + schemaId: { + type: 'string', + description: 'The unique identifier that correlates to the schema from which the application form was generated.', + }, + updatedAt: { + type: 'string', + description: 'The date and time that the application was last updated by any party.', + }, + userId: { + type: 'integer', + description: + 'The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application.', + }, + deadlinePassed: { + type: 'boolean', + description: 'A flag to indicate if the deadline has passed for the current review phase for this application.', + }, + decisionApproved: { + type: 'boolean', + description: + 'A flag to indicate if the request users decision as a reviewer of the current workflow phase was positive or negative. i.e. correlating to approval or rejection recommendation.', + }, + decisionComments: { + type: 'string', + description: + 'A supporting note or comment made by the requesting user as context to their decision as a reviewer of the current workflow phase.', + }, + decisionDate: { + type: 'string', + description: 'The date that the requesting user made their decision as a reviewer of the current workflow phase.', + }, + decisionDuration: { + type: 'integer', + description: + "The number of days from submission until a final decision was made on the application. i.e. the application status was changed to a final status e.g. 'Approved'.", + }, + decisionMade: { + type: 'boolean', + description: + 'A flag to indicate if the requesting user has made an expected decision as a reviewer of the current workflow phase.', + }, + decisionStatus: { + type: 'string', + description: + 'A message indicating if the requesting user as a reviewer of the application has made a decision or is still required to make a decision for the current work flow.', + }, + isReviewer: { + type: 'boolean', + description: + 'A flag to indicate if the requesting user is a reviewer of the current workflow step for the application.', + }, + remainingActioners: { + type: 'array', + items: { + type: 'string', + }, + description: + 'An array containing the names of Custodian team reviewers expected to complete a review for the current workflow phase, or a list of managers if the application is awaiting a final decision.', + }, + reviewStatus: { + type: 'string', + description: + "A message indicating the current status of the application review in relation to the assigned workflow. E.g. 'Final decision required' or 'Deadline is today'. This message changes based on the requesting user's relationship to the application. E.g. if they are a reviewer or manager.", + }, + stepName: { + type: 'string', + description: 'The name of the current workflow step that the application is in.', + }, + workflowCompleted: { + type: 'boolean', + description: 'A flag to indicate if the assigned workflow for the review process has been completed.', + }, + workflowName: { + type: 'string', + description: + 'The name of the workflow the Custodian team have assigned to the application for the review process.', + }, + }, + }, + }, + }, + }, + examples: { + 'Single Request Received': { + value: { + success: true, + data: [ + { + authorIds: [], + datasetIds: ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], + datasetTitles: [], + applicationStatus: 'submitted', + jsonSchema: '{omitted for brevity...}', + questionAnswers: '{omitted for brevity...}', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + _id: '601853db22dc004f9adfaa24', + version: 1, + userId: 7584453789581072, + schemaId: '5f55e87e780ba204b0a98eb8', + files: [ + { + error: '', + _id: '601aacf8ecdfa66e5cbc2742', + status: 'UPLOADED', + description: 'QuestionAnswers', + fileId: '9e76ee1a676f423b9b5c7aabf59c69db', + size: 509984, + name: 'QuestionAnswersFlags.png', + owner: '5ec7f1b39219d627e5cafae3', + }, + { + error: '', + _id: '601aadbcecdfa6c532bc2743', + status: 'UPLOADED', + description: 'Notifications', + fileId: 'adb1718dcc094b9cb4b0ab347ad2ee94', + size: 54346, + name: 'HQIP-Workflow-Assigned-Notification.png', + owner: '5ec7f1b39219d627e5cafae3', + }, + ], + amendmentIterations: [], + createdAt: '2021-02-01T19:17:47.470Z', + updatedAt: '2021-02-03T16:36:36.720Z', + __v: 2, + projectId: '6018-53DB-22DC-004F-9ADF-AA24', + aboutApplication: { + selectedDatasets: [ + { + _id: '5fc31a18d98e4f4cff7e9315', + datasetId: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + name: 'HDR UK Papers & Preprints', + description: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint', + abstract: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + contactPoint: 'hdr.hdr@hdruk.ac.uk', + publisherObj: { + dataRequestModalContent: { + header: ' ', + body: '{omitted for brevity...}', + footer: '', + }, + active: true, + allowsMessaging: true, + workflowEnabled: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + name: 'OTHER > HEALTH DATA RESEARCH UK', + imageURL: '', + team: { + active: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + members: [ + { + roles: ['manager'], + memberid: '5f1a98861a821b4a53e44d15', + }, + { + roles: ['manager'], + memberid: '600bfc99c8bf700f2c7d5c36', + }, + ], + type: 'publisher', + __v: 3, + createdAt: '2020-11-30T21:12:40.855Z', + updatedAt: '2020-12-02T13:33:45.232Z', + }, + }, + }, + ], + isNationalCoreStudies: true, + nationalCoreStudiesProjectId: '4324836585275824', + projectName: 'Test application title', + completedDatasetSelection: true, + completedInviteCollaborators: true, + completedReadAdvice: true, + completedCommunicateAdvice: true, + completedApprovalsAdvice: true, + completedSubmitAdvice: true, + }, + dateSubmitted: '2021-02-03T16:37:36.081Z', + datasets: [ + { + categories: { + programmingLanguage: [], + }, + tags: { + features: ['Preprints', 'Papers', 'HDR UK'], + topics: [], + }, + datasetfields: { + geographicCoverage: ['https://www.geonames.org/countries/GB/united-kingdom.html'], + physicalSampleAvailability: ['Not Available'], + technicaldetails: '{omitted for brevity...}', + versionLinks: [ + { + id: '142b1618-2691-4019-97b4-16b1e27c5f95', + linkType: 'Superseded By', + domainType: 'CatalogueSemanticLink', + source: { + id: '9e798632-442a-427b-8d0e-456f754d28dc', + domainType: 'DataModel', + label: 'HDR UK Papers & Preprints', + documentationVersion: '0.0.1', + }, + target: { + id: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + domainType: 'DataModel', + label: 'HDR UK Papers & Preprints', + documentationVersion: '1.0.0', + }, + }, + ], + phenotypes: [], + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + abstract: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', + releaseDate: '2020-11-27T00:00:00Z', + accessRequestDuration: 'Other', + conformsTo: 'OTHER', + accessRights: 'https://github.com/HDRUK/papers/blob/master/LICENSE', + jurisdiction: 'GB-ENG', + datasetStartDate: '2020-03-31', + datasetEndDate: '2022-04-30', + statisticalPopulation: '0', + ageBand: '0-0', + contactPoint: 'hdr.hdr@hdruk.ac.uk', + periodicity: 'Daily', + metadataquality: { + id: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + title: 'HDR UK Papers & Preprints', + completeness_percent: 95.24, + weighted_completeness_percent: 100, + error_percent: 11.63, + weighted_error_percent: 19.05, + quality_score: 91.81, + quality_rating: 'Gold', + weighted_quality_score: 90.47, + weighted_quality_rating: 'Gold', + }, + datautility: { + id: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + publisher: 'OTHER > HEALTH DATA RESEARCH UK', + title: 'HDR UK Papers & Preprints', + metadata_richness: 'Gold', + availability_of_additional_documentation_and_support: '', + data_model: '', + data_dictionary: '', + provenance: '', + data_quality_management_process: '', + dama_quality_dimensions: '', + pathway_coverage: '', + length_of_follow_up: '', + allowable_uses: '', + research_environment: '', + time_lag: '', + timeliness: '', + linkages: '', + data_enrichments: '', + }, + metadataschema: { + '@context': 'http://schema.org/', + '@type': 'Dataset', + identifier: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + url: 'https://healthdatagateway.org/detail/d5faf9c6-6c34-46d7-93c4-7706a5436ed9', + name: 'HDR UK Papers & Preprints', + description: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint', + license: 'Open Access', + keywords: [ + 'Preprints,Papers,HDR UK', + 'OTHER > HEALTH DATA RESEARCH UK', + 'NOT APPLICABLE', + 'GB-ENG', + 'https://www.geonames.org/countries/GB/united-kingdom.html', + ], + includedinDataCatalog: [ + { + '@type': 'DataCatalog', + name: 'OTHER > HEALTH DATA RESEARCH UK', + url: 'hdr.hdr@hdruk.ac.uk', + }, + { + '@type': 'DataCatalog', + name: 'HDR UK Health Data Gateway', + url: 'http://healthdatagateway.org', + }, + ], + }, + }, + authors: [], + showOrganisation: false, + toolids: [], + datasetids: [], + _id: '5fc31a18d98e4f4cff7e9315', + relatedObjects: [], + programmingLanguage: [], + pid: 'b7a62c6d-ed00-4423-ad27-e90b71222d8e', + datasetVersion: '1.0.0', + id: 9816147066244124, + datasetid: 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', + type: 'dataset', + activeflag: 'active', + name: 'HDR UK Papers & Preprints', + description: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint', + license: 'Open Access', + datasetv2: { + identifier: '', + version: '', + issued: '', + modified: '', + revisions: [], + summary: { + title: '', + abstract: + 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', + publisher: { + identifier: '', + name: 'HEALTH DATA RESEARCH UK', + logo: '', + description: '', + contactPoint: 'hdr.hdr@hdruk.ac.uk', + memberOf: 'OTHER', + accessRights: [], + deliveryLeadTime: '', + accessService: '', + accessRequestCost: '', + dataUseLimitation: [], + dataUseRequirements: [], + }, + contactPoint: 'hdr.hdr@hdruk.ac.uk', + keywords: ['Preprints', 'Papers', 'HDR UK'], + alternateIdentifiers: [], + doiName: 'https://doi.org/10.5281/zenodo.326615', + }, + documentation: { + description: '', + associatedMedia: ['https://github.com/HDRUK/papers'], + isPartOf: 'NOT APPLICABLE', + }, + coverage: { + spatial: 'GB', + typicalAgeRange: '0-0', + physicalSampleAvailability: ['NOT AVAILABLE'], + followup: 'UNKNOWN', + pathway: 'NOT APPLICABLE', + }, + provenance: { + origin: { + purpose: 'OTHER', + source: 'MACHINE GENERATED', + collectionSituation: 'OTHER', + }, + temporal: { + accrualPeriodicity: 'DAILY', + distributionReleaseDate: '2020-11-27', + startDate: '2020-03-31', + endDate: '2022-04-30', + timeLag: 'NO TIMELAG', + }, + }, + accessibility: { + usage: { + dataUseLimitation: 'GENERAL RESEARCH USE', + dataUseRequirements: 'RETURN TO DATABASE OR RESOURCE', + resourceCreator: 'HDR UK Using Team', + investigations: ['https://github.com/HDRUK/papers'], + isReferencedBy: ['Not Available'], + }, + access: { + accessRights: ['Open Access'], + accessService: 'https://github.com/HDRUK/papers', + accessRequestCost: 'Free', + deliveryLeadTime: 'OTHER', + jurisdiction: 'GB-ENG', + dataProcessor: 'HDR UK', + dataController: 'HDR UK', + }, + formatAndStandards: { + vocabularyEncodingScheme: 'OTHER', + conformsTo: 'OTHER', + language: 'en', + format: ['csv', 'JSON'], + }, + }, + enrichmentAndLinkage: { + qualifiedRelation: ['Not Available'], + derivation: ['Not Available'], + tools: ['https://github.com/HDRUK/papers'], + }, + observations: [], + }, + createdAt: '2020-11-29T03:48:41.794Z', + updatedAt: '2021-02-02T10:09:57.030Z', + __v: 0, + counter: 20, + }, + ], + dataset: null, + mainApplicant: { + isServiceAccount: false, + _id: '5ec7f1b39219d627e5cafae3', + id: 7584453789581072, + providerId: '112563375053074694443', + provider: 'google', + firstname: 'Chris', + lastname: 'Marks', + email: 'chris.marks@paconsulting.com', + role: 'Admin', + __v: 0, + redirectURL: '/tool/100000012', + discourseKey: '2f52ecaa21a0d0223a119da5a09f8f8b09459e7b69ec3f981102d09f66488d99', + discourseUsername: 'chris.marks', + updatedAt: '2021-02-01T12:39:56.372Z', + }, + publisherObj: { + dataRequestModalContent: { + header: '', + body: '', + footer: '', + }, + active: true, + allowsMessaging: true, + workflowEnabled: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + name: 'OTHER > HEALTH DATA RESEARCH UK', + imageURL: '', + team: { + active: true, + _id: '5f7b1a2bce9f65e6ed83e7da', + members: [ + { + roles: ['manager'], + memberid: '5f1a98861a821b4a53e44d15', + }, + { + roles: ['manager'], + memberid: '600bfc99c8bf700f2c7d5c36', + }, + ], + type: 'publisher', + __v: 3, + createdAt: '2020-11-30T21:12:40.855Z', + updatedAt: '2020-12-02T13:33:45.232Z', + users: [ + { + _id: '5f1a98861a821b4a53e44d15', + firstname: 'Robin', + lastname: 'Kavanagh', + }, + { + _id: '600bfc99c8bf700f2c7d5c36', + firstname: 'HDR-UK', + lastname: 'Service Account', + }, + ], + }, + }, + id: '601853db22dc004f9adfaa24', + projectName: 'PA Paper', + applicants: 'Chris Marks', + workflowName: '', + workflowCompleted: false, + decisionDuration: '', + decisionMade: false, + decisionStatus: '', + decisionComments: '', + decisionDate: '', + decisionApproved: false, + remainingActioners: 'Robin Kavanagh (you), HDR-UK Service Account', + stepName: '', + deadlinePassed: '', + reviewStatus: '', + isReviewer: false, + reviewPanels: [], + amendmentStatus: '', + }, + ], + avgDecisionTime: 1, + canViewSubmitted: true, + }, + }, + }, + }, + }, + }, + 401: { + description: 'Unauthorised attempt to access an application.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string', + }, + message: { + type: 'string', + }, + }, + }, + examples: { + Unauthorised: { + value: { + status: 'failure', + message: 'Unauthorised', + }, + }, + }, + }, + }, + }, + 404: { + description: 'Failed to find the application requested.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + success: { + type: 'boolean', + }, + }, + }, + examples: { + 'Not Found': { + value: { + success: false, + }, + }, + }, + }, + }, + }, + }, + }, + }, +}; diff --git a/docs/resources/search.docs.js b/docs/resources/search.docs.js new file mode 100644 index 00000000..f75f3d3c --- /dev/null +++ b/docs/resources/search.docs.js @@ -0,0 +1,58 @@ +module.exports = { + '/api/v1/search': { + get: { + tags: ['Search'], + summary: 'Search for HDRUK /search?search', + parameters: [ + { + in: 'query', + name: 'params', + schema: { + type: 'object', + properties: { + search: { + type: 'string', + example: 'Epilepsy', + }, + type: { + type: 'string', + example: 'all', + }, + category: { + type: 'string', + example: 'API', + }, + programmingLanguage: { + type: 'string', + example: 'Javascript', + }, + features: { + type: 'string', + example: 'Arbitrage', + }, + topics: { + type: 'string', + example: 'Epilepsy', + }, + startIndex: { + type: 'string', + example: 0, + }, + maxResults: { + type: 'string', + example: 10, + }, + }, + }, + style: 'form', + explode: true, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, +}; diff --git a/docs/resources/stats.docs.js b/docs/resources/stats.docs.js new file mode 100644 index 00000000..28659afa --- /dev/null +++ b/docs/resources/stats.docs.js @@ -0,0 +1,122 @@ +module.exports = { + '/api/v1/stats/topSearches': { + get: { + summary: 'Returns top searches for a given month and year.', + tags: ['Stats'], + parameters: [ + { + name: 'month', + in: 'query', + required: true, + description: 'Month number.', + schema: { + type: 'string', + example: 7, + }, + }, + { + name: 'year', + in: 'query', + required: true, + description: 'Year.', + schema: { + type: 'string', + example: 2020, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/stats': { + get: { + summary: + 'Returns the details on recent searches, popular objects, unmet demands or recently updated objects based on the rank query parameter.', + tags: ['Stats'], + parameters: [ + { + name: 'rank', + in: 'query', + required: true, + description: 'The type of stat.', + schema: { + type: 'string', + example: 'unmet', + }, + }, + { + name: 'type', + in: 'query', + required: true, + description: 'Resource type.', + schema: { + type: 'string', + example: 'Tools', + }, + }, + { + name: 'month', + in: 'query', + required: true, + description: 'Month number.', + schema: { + type: 'string', + example: 7, + }, + }, + { + name: 'year', + in: 'query', + required: true, + description: 'Year.', + schema: { + type: 'string', + example: 2020, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/kpis': { + get: { + summary: 'Returns information for KPIs, based on the KPI type and selectedDate parameters.', + tags: ['KPIs'], + parameters: [ + { + name: 'type', + in: 'query', + required: true, + description: 'The type of KPI.', + schema: { + type: 'string', + example: 'uptime', + }, + }, + { + name: 'selectedDate', + in: 'query', + required: true, + description: 'Full date time string.', + schema: { + type: 'string', + example: 'Wed Jul 01 2020 01:00:00 GMT 0100 (British Summer Time)', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, +}; diff --git a/docs/resources/tool.docs.js b/docs/resources/tool.docs.js new file mode 100644 index 00000000..e2c4d92e --- /dev/null +++ b/docs/resources/tool.docs.js @@ -0,0 +1,422 @@ +module.exports = { + '/api/v1/tools': { + get: { + summary: 'Return List of Tool objects.', + tags: ['Tools'], + parameters: [ + { + in: 'query', + name: 'limit', + required: false, + description: 'Limit the number of results', + schema: { + type: 'integer', + example: 3, + }, + }, + { + in: 'query', + name: 'offset', + required: false, + description: 'Index to offset the search results', + schema: { + type: 'integer', + example: 1, + }, + }, + { + in: 'query', + name: 'q', + required: false, + description: 'Filter using search query', + schema: { + type: 'string', + example: 'epilepsy', + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + post: { + summary: 'Returns new Tool object with ID.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Tools'], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + }, + example: { + id: 26542005388306332, + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v1/tools/{id}': { + get: { + summary: 'Returns Tool object', + tags: ['Tools'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the tool', + schema: { + type: 'integer', + format: 'int64', + minimum: 1, + example: 19009, + }, + }, + ], + responses: { + 200: { + description: 'OK', + }, + }, + }, + put: { + summary: 'Returns edited Tool object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Tools'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + schema: { + type: 'integer', + example: 123, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + id: { + type: 'number', + }, + type: { + type: 'string', + }, + name: { + type: 'string', + }, + link: { + type: 'string', + }, + description: { + type: 'string', + }, + categories: { + type: 'object', + properties: { + category: { + type: 'string', + }, + programmingLanguage: { + type: 'array', + items: { + type: 'string', + }, + }, + programmingLanguageVersion: { + type: 'string', + }, + }, + }, + licence: { + type: 'string', + }, + authors: { + type: 'array', + items: { + type: 'number', + }, + }, + tags: { + type: 'object', + properties: { + features: { + type: 'array', + items: { + type: 'string', + }, + }, + topics: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + toolids: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + example: { + id: 26542005388306332, + type: 'tool', + name: 'Research Data TEST EPILEPSY', + link: 'http://localhost:8080/epilepsy', + description: 'Epilespy data research description', + categories: { + category: 'API', + programmingLanguage: ['Javascript'], + programmingLanguageVersion: '1.0.0', + }, + licence: 'MIT licence', + authors: [4495285946631793], + tags: { + features: ['Arbitrage'], + topics: ['Epilepsy'], + }, + toolids: [], + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + patch: { + summary: 'Change status of Tool object.', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Tools'], + parameters: [ + { + name: 'id', + in: 'path', + required: true, + description: 'The ID of the tool', + schema: { + type: 'integer', + format: 'int64', + example: 5032687830560181, + }, + }, + ], + requestBody: { + content: { + 'application/json': { + schema: { + type: 'object', + required: ['name'], + properties: { + id: { + type: 'number', + }, + activeflag: { + type: 'string', + }, + }, + example: { + id: 662346984100503, + activeflag: 'active', + }, + }, + }, + }, + }, + responses: { + 200: { + description: 'OK', + }, + }, + }, + }, + '/api/v2/tools': { + get: { + summary: 'Returns a list of tool objects', + tags: ['Tools v2.0'], + parameters: [ + { + name: 'search', + in: 'query', + description: + 'Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added.', + schema: { + type: 'string', + }, + example: 'Regulation', + }, + { + name: 'page', + in: 'query', + description: 'A specific page of results to retrieve', + schema: { + type: 'number', + }, + example: 1, + }, + { + name: 'limit', + in: 'query', + description: 'Maximum number of results returned per page', + schema: { + type: 'number', + }, + example: 10, + }, + { + name: 'sort', + in: 'query', + description: + 'Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,-counter', + }, + { + name: 'fields', + in: 'query', + description: + 'Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below.', + schema: { + type: 'string', + }, + example: 'name,counter, description', + }, + { + name: 'count', + in: 'query', + description: 'Returns the number of the number of entities matching the query parameters provided instead of the result payload', + schema: { + type: 'boolean', + }, + example: true, + }, + ], + description: + "Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. Note - This response is limited to 100 records by default. Please use the 'page' query parameter to access records beyond the first 100. The 'limit' query parameter can therefore only be specified up to a maximum of 100.", + responses: { + 200: { + description: 'Successful response containing a list of tools matching query parameters', + }, + }, + }, + }, + '/api/v2/tools/{id}': { + get: { + summary: 'Returns a tool object', + tags: ['Tools v2.0'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the tool', + schema: { + type: 'number', + example: 100000006, + }, + }, + ], + description: 'Returns a tool object by matching unique identifier in the default format that is stored as within the Gateway', + responses: { + 200: { + description: 'Successful response containing a single tool object', + }, + 404: { + description: 'A tool could not be found by the provided tool identifier', + }, + }, + }, + }, +}; diff --git a/docs/resources/topic.docs.js b/docs/resources/topic.docs.js new file mode 100644 index 00000000..1939b5d0 --- /dev/null +++ b/docs/resources/topic.docs.js @@ -0,0 +1,159 @@ +module.exports = { + '/api/v1/topics': { + post: { + summary: 'Returns a new Topic object with ID (Does not create any associated messages)', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Topics'], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + relatedObjectIds: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + }, + examples: { + 'Create a new topic': { + value: "{\n \"relatedObjectIds\": \"['1','2','3']\"\n}", + }, + }, + }, + }, + }, + responses: { + 201: { + description: 'A new Topic', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + _id: { + type: 'object', + description: 'Generated ID', + }, + title: { + type: 'string', + description: 'Title of message', + }, + subtitle: { + type: 'string', + description: 'Subtitle of message', + }, + relatedObjectIds: { + type: 'array', + items: { + type: 'string', + }, + description: 'Object ID this Topic is related to', + }, + createdBy: { + type: 'object', + description: 'User that created the topic', + }, + createdDate: { + type: 'string', + description: 'Date the topic was created', + }, + recipients: { + type: 'array', + items: { + type: 'string', + }, + description: 'Collection of user IDs', + }, + tags: { + type: 'array', + items: { + type: 'string', + }, + description: 'Collection of tags to describe topic', + }, + }, + }, + }, + }, + }, + }, + }, + get: { + summary: 'Returns a list of all topics that the authenticated user is a recipient or member of', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Topics'], + responses: { + 200: { + description: 'Ok', + }, + }, + }, + }, + '/api/v1/topics/{id}': { + get: { + summary: 'Returns Topic object by ID', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Topics'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the topic', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + responses: { + 200: { + description: 'Ok', + }, + }, + }, + delete: { + summary: 'Soft deletes a message Topic but does not affect associated messages', + security: [ + { + cookieAuth: [], + }, + ], + tags: ['Topics'], + parameters: [ + { + in: 'path', + name: 'id', + required: true, + description: 'The ID of the Topic', + schema: { + type: 'string', + example: '5ee249426136805fbf094eef', + }, + }, + ], + responses: { + 204: { + description: 'Ok', + }, + }, + }, + }, +}; diff --git a/docs/schemas/collections.schema.js b/docs/schemas/collections.schema.js new file mode 100644 index 00000000..ebdd5c3f --- /dev/null +++ b/docs/schemas/collections.schema.js @@ -0,0 +1,59 @@ +module.exports = { + $schema: 'http://json-schema.org/draft-07/schema', + title: 'Collections schema', + type: 'object', + properties: { + name: { + type: 'string', + }, + description: { + type: 'string', + }, + imageLink: { + type: 'string', + format: 'uri', + }, + authors: { + type: 'array', + minItems: 0, + items: { + type: 'integer', + }, + }, + relatedObjects: { + type: 'array', + minItems: 0, + items: { + type: 'object', + properties: { + reason: { + type: 'string', + }, + objectType: { + type: 'string', + }, + pid: { + type: 'string', + }, + user: { + type: 'string', + }, + updated: { + type: 'string', + }, + }, + }, + }, + publicflag: { + type: 'boolean', + }, + keywords: { + type: 'array', + minItems: 0, + items: { + type: 'integer', + }, + }, + }, + required: ['name', 'description', 'publicflag', 'authors'], +}; diff --git a/migrations/1627566998386-add_globals.js b/migrations/1627566998386-add_globals.js index 69129778..85bca95e 100644 --- a/migrations/1627566998386-add_globals.js +++ b/migrations/1627566998386-add_globals.js @@ -59,6 +59,12 @@ const globalData = { label: 'Commercial project', impliedValues: ['platinum', 'gold'], }, + { + id: mongoose.Types.ObjectId(), + displayOrder: 5, + label: 'N/A', + impliedValues: [], + }, ], }, { @@ -101,6 +107,13 @@ const globalData = { label: '1 year', impliedValues: ['platinum', 'gold', 'silver', 'bronze'], }, + { + id: mongoose.Types.ObjectId(), + displayOrder: 5, + definition: 'N/A', + label: 'N/A', + impliedValues: [], + }, ], }, { @@ -232,6 +245,13 @@ const globalData = { label: 'More than 10 years', impliedValues: ['platinum'], }, + { + id: mongoose.Types.ObjectId(), + displayOrder: 5, + definition: 'N/A', + label: 'N/A', + impliedValues: [], + }, ], }, { @@ -306,6 +326,13 @@ const globalData = { label: 'Model conforms to national standard and key fields coded to national/internal standard', impliedValues: ['platinum'], }, + { + id: mongoose.Types.ObjectId(), + displayOrder: 5, + definition: 'N/A', + label: 'N/A', + impliedValues: [], + }, ], }, { @@ -380,6 +407,13 @@ const globalData = { label: "Earlier and 'raw' versions and the impact of each stage of data cleaning", impliedValues: ['platinum'], }, + { + id: mongoose.Types.ObjectId(), + displayOrder: 5, + definition:'N/A', + label: "N/A", + impliedValues: [], + }, ], }, { diff --git a/migrations/1633525344331-Ig_2354_replace_hubs_with_hub.js b/migrations/1633525344331-Ig_2354_replace_hubs_with_hub.js new file mode 100644 index 00000000..b9b5854c --- /dev/null +++ b/migrations/1633525344331-Ig_2354_replace_hubs_with_hub.js @@ -0,0 +1,63 @@ +import { PublisherModel } from '../src/resources/publisher/publisher.model'; +import { Data as ToolModel } from '../src/resources/tool/data.model'; + +/** + * Make any changes you need to make to the database here + */ +async function up () { + await toolsUpdate(); + await publisherUpdate(); + +} + +async function toolsUpdate() { + const tools = await ToolModel.find({ type: "dataset", "datasetfields.publisher": { $regex: "HUBS" } }).lean(); + let tmpTool = []; + tools.forEach((tool => { + const { _id } = tool; + tmpTool.push({ + updateOne: { + filter: { _id }, + update: { + "datasetfields.publisher": replaceHubs(tool.datasetfields.publisher), + "datasetfields.metadataquality.publisher": replaceHubs(tool.datasetfields.metadataquality.publisher), + "datasetv2.summary.publisher.memberOf": replaceHubs(tool.datasetv2.summary.publisher.memberOf), + } + }, + }); + })); + await ToolModel.bulkWrite(tmpTool); +} + + +async function publisherUpdate() { + const publishers = await PublisherModel.find({ "publisherDetails.memberOf": "HUBS" }).lean(); + let tmpPub = []; + publishers.forEach((pub => { + const { _id } = pub; + tmpPub.push({ + updateOne: { + filter: { _id }, + update: { + "publisherDetails.memberOf": replaceHubs(pub.publisherDetails.memberOf), + "name" : replaceHubs(pub.name), + } + }, + }); + })); + await PublisherModel.bulkWrite(tmpPub); +} + + +function replaceHubs(input) { + return input.replace('HUBS','HUB') +} + +/** + * Make any changes that UNDO the up function side effects here (if possible) + */ +async function down () { + // Write migration here +} + +module.exports = { up, down }; diff --git a/migrations/1638716002879-remove_projects_from_related_resources.js b/migrations/1638716002879-remove_projects_from_related_resources.js new file mode 100644 index 00000000..1972a3ea --- /dev/null +++ b/migrations/1638716002879-remove_projects_from_related_resources.js @@ -0,0 +1,32 @@ +import { Data } from '../src/resources/tool/data.model'; +import { Collections } from '../src/resources/collections/collections.model'; +import { Course } from '../src/resources/course/course.model'; + +/** + * Make any changes you need to make to the database here + */ +async function up() { + //Remove projects that are in related resources for tools and papers + await Data.update({ 'relatedObjects.objectType': 'project' }, { $pull: { relatedObjects: { objectType: 'project' } } }, { multi: true }); + //Remove projects that are in related resources for collections + await Collections.update( + { 'relatedObjects.objectType': 'project' }, + { $pull: { relatedObjects: { objectType: 'project' } } }, + { multi: true } + ); + //Remove projects that are in related resources for courses + await Course.update( + { 'relatedObjects.objectType': 'project' }, + { $pull: { relatedObjects: { objectType: 'project' } } }, + { multi: true } + ); +} + +/** + * Make any changes that UNDO the up function side effects here (if possible) + */ +async function down() { + // Write migration here +} + +module.exports = { up, down }; diff --git a/migrations/README.md b/migrations/README.md index c31a180b..ce9cf4bb 100644 --- a/migrations/README.md +++ b/migrations/README.md @@ -1,4 +1,3 @@ - # HDR UK GATEWAY - Data Migrations The primary data source used by the Gateway Project is the noSQL solution provided by MongoDb. Data migration strategy is a fundamental part of software development and release cycles for a data intensive web application. The project team have chosen the NPM package Migrate-Mongoose - https://www.npmjs.com/package/migrate-mongoose to assist in the management of data migration scripts. This package allows developers to write versioned, reversible data migration scripts using the Mongoose library. @@ -27,11 +26,12 @@ Complete the scripts required for the UP process, and if possible, the DOWN proc #### Step 4 -With the scripts written, the functions can be tested by running the following command, replacing 'my_new_migration_script' with the name of the script you want to execute. +With the scripts written, the functions can be tested by running the following command, replacing 'my_new_migration_script' with the name of the script you want to execute without the time stamp so for example +node -r esm migrations/migrate.js up add_globals node -r esm migrations/migrate.js up my_new_migration_script -When this process is completed, the connected database will have a new document representing your migration scripts inside the 'migrations' collection, which tracks the state of the migration. If you need to run your scripts multiple times for test purposes, you can change the state of the migration to 'Down'. +When this process is completed, the connected database will have a new document representing your migration scripts inside the 'migrations' collection, which tracks the state of the migration. If you need to run your scripts multiple times for test purposes, you can change the state of the migration to 'Down'. During this process, please ensure you are using a personal database. diff --git a/package.json b/package.json index 56d99e48..c4853af8 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "ajv-formats": "^2.0.2", "async": "^3.2.0", "await-to-js": "^2.1.1", - "axios": "0.21.1", + "axios": "0.21.3", "axios-retry": "^3.1.9", "base64url": "^3.0.1", "bcrypt": "^5.0.0", @@ -38,6 +38,7 @@ "express-session": "^1.17.1", "express-validator": "^6.6.1", "faker": "^5.3.1", + "flat": "^5.0.2", "form-data": "^3.0.0", "googleapis": "^55.0.0", "jose": "^2.0.2", @@ -57,6 +58,7 @@ "passport-jwt": "^4.0.0", "passport-linkedin-oauth2": "^2.0.0", "passport-openidconnect": "0.0.2", + "passport-orcid": "0.0.4", "prettier": "^2.2.1", "query-string": "^6.12.1", "randomstring": "^1.1.5", diff --git a/src/config/middleware.js b/src/config/middleware.js new file mode 100644 index 00000000..d2e915ef --- /dev/null +++ b/src/config/middleware.js @@ -0,0 +1,24 @@ +import { has, isNaN } from 'lodash'; + +export const resultLimit = (req, res, next, allowedLimit) => { + let error; + if(has(req.query, 'limit')) { + const requestedLimit = parseInt(req.query.limit); + + if(isNaN(requestedLimit)) { + error = `The result limit parameter provided must be a numeric value.`; + } + else if (requestedLimit > allowedLimit){ + error = `Maximum request limit exceeded. You may only request up to a maximum of ${allowedLimit} records per page. Please use the page query parameter to request further data.`; + } + } + + if (error) { + return res.status(400).json({ + success: false, + message: error, + }); + } + + next(); +}; diff --git a/src/config/server.js b/src/config/server.js index eacf8cdd..4cc51275 100644 --- a/src/config/server.js +++ b/src/config/server.js @@ -3,8 +3,6 @@ import express from 'express'; import Provider from 'oidc-provider'; import swaggerUi from 'swagger-ui-express'; -import YAML from 'yamljs'; -const swaggerDocument = YAML.load('./swagger.yaml'); import cors from 'cors'; import logger from 'morgan'; import passport from 'passport'; @@ -20,26 +18,29 @@ require('dotenv').config(); var app = express(); -Sentry.init({ - dsn: 'https://b6ea46f0fbe048c9974718d2c72e261b@o444579.ingest.sentry.io/5653683', - environment: helper.getEnvironment(), - integrations: [ - // enable HTTP calls tracing - new Sentry.Integrations.Http({ tracing: true }), - // enable Express.js middleware tracing - new Tracing.Integrations.Express({ - // trace all requests to the default router - app, - }), - ], - tracesSampleRate: 1.0, -}); -// RequestHandler creates a separate execution context using domains, so that every -// transaction/span/breadcrumb is attached to its own Hub instance -app.use(Sentry.Handlers.requestHandler()); -// TracingHandler creates a trace for every incoming request -app.use(Sentry.Handlers.tracingHandler()); -app.use(Sentry.Handlers.errorHandler()); +const readEnv = process.env.ENV || 'prod'; +if (readEnv === 'test' || readEnv === 'prod') { + Sentry.init({ + dsn: 'https://b6ea46f0fbe048c9974718d2c72e261b@o444579.ingest.sentry.io/5653683', + environment: helper.getEnvironment(), + integrations: [ + // enable HTTP calls tracing + new Sentry.Integrations.Http({ tracing: true }), + // enable Express.js middleware tracing + new Tracing.Integrations.Express({ + // trace all requests to the default router + app, + }), + ], + tracesSampleRate: 1.0, + }); + // RequestHandler creates a separate execution context using domains, so that every + // transaction/span/breadcrumb is attached to its own Hub instance + app.use(Sentry.Handlers.requestHandler()); + // TracingHandler creates a trace for every incoming request + app.use(Sentry.Handlers.tracingHandler()); + app.use(Sentry.Handlers.errorHandler()); +} const Account = require('./account'); const configuration = require('./configuration'); @@ -180,7 +181,7 @@ app.get('/api/v1/openid/interaction/:uid', setNoCache, (req, res, next) => { app.use('/api/v1/openid', oidc.callback); app.use('/api', router); -app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument)); +app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(require('../../docs/index.docs'))); app.use('/oauth', require('../resources/auth/oauth.route')); app.use('/api/v1/auth/sso/discourse', require('../resources/auth/sso/sso.discourse.router')); @@ -253,6 +254,8 @@ app.use('/api/v1/global', require('../resources/global/global.route')); app.use('/api/v1/search-preferences', require('../resources/searchpreferences/searchpreferences.route')); +app.use('/api/v2/data-use-registers', require('../resources/dataUseRegister/dataUseRegister.route')); + initialiseAuthentication(app); // launch our backend into a port diff --git a/src/middlewares/__tests__/activitylog.middleware.test.js b/src/middlewares/__tests__/activitylog.middleware.test.js new file mode 100644 index 00000000..71d9cd3d --- /dev/null +++ b/src/middlewares/__tests__/activitylog.middleware.test.js @@ -0,0 +1,175 @@ +import sinon from 'sinon'; + +import { validateViewRequest, authoriseView } from '../activitylog.middleware'; +import { datasetService } from '../../resources/dataset/dependency'; +import { UserModel } from '../../resources/user/user.model'; + +afterEach(function () { + sinon.restore(); +}); + +describe('Testing the ActivityLog middleware', () => { + const mockedRequest = () => { + const req = {}; + return req; + }; + + const mockedResponse = () => { + const res = {}; + res.status = jest.fn().mockReturnValue(res); + res.json = jest.fn().mockReturnValue(res); + return res; + }; + + describe('Testing the validateViewRequest middleware', () => { + const expectedResponse = { + success: false, + message: 'You must provide a valid log category and array of version identifiers to retrieve corresponding logs', + }; + + it('Should return 400 when no versionIds are passed in request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: [], type: 'dataset' }; + const nextFunction = jest.fn(); + + validateViewRequest(req, res, nextFunction); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(nextFunction.mock.calls.length).toBe(0); + }); + + it('Should return 400 if activity log type "data_request" or "dataset"', () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: [123, 456], type: 'notARealType' }; + const nextFunction = jest.fn(); + + validateViewRequest(req, res, nextFunction); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(nextFunction.mock.calls.length).toBe(0); + }); + + it('Should invoke next() if conditions are satisfied', () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: [123, 456], type: 'dataset' }; + const nextFunction = jest.fn(); + + validateViewRequest(req, res, nextFunction); + + expect(nextFunction.mock.calls.length).toBe(1); + }); + }); + describe('Testing the authoriseView middleware', () => { + const expectedResponse = { + success: false, + message: 'You are not authorised to perform this action', + }; + it('Should return a 401 error if the user is not authorised', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: ['xyz', 'abc'], type: 'dataset' }; + req.user = undefined; + const nextFunction = jest.fn(); + + let versionsStub = sinon.stub(datasetService, 'getDatasets').returns([ + { + datasetv2: { + identifier: 'abc', + summary: { + publisher: { + identifier: 'pub1', + }, + }, + }, + }, + { + datasetv2: { + identifier: 'xyz', + summary: { + publisher: { + identifier: 'pub2', + }, + }, + }, + }, + ]); + + await authoriseView(req, res, nextFunction); + + expect(versionsStub.calledOnce).toBe(true); + expect(res.status).toHaveBeenCalledWith(401); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(nextFunction.mock.calls.length).toBe(0); + }); + it('Should invoke next() if the user is authorised against dataset(s)', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: ['xyz', 'abc'], type: 'dataset' }; + req.user = new UserModel({ + _id: '618a72fd5ec8f54772b7a17b', + firstname: 'John', + lastname: 'Smith', + teams: [ + { + publisher: { _id: 'fakeTeam', name: 'fakeTeam' }, + type: 'admin', + members: [{ memberid: '618a72fd5ec8f54772b7a17b', roles: ['admin_dataset'] }], + }, + ], + }); + const nextFunction = jest.fn(); + + let versionsStub = sinon.stub(datasetService, 'getDatasets').returns([ + { + datasetv2: { + identifier: 'abc', + summary: { + publisher: { + identifier: 'pub1', + }, + }, + }, + }, + { + datasetv2: { + identifier: 'xyz', + summary: { + publisher: { + identifier: 'pub2', + }, + }, + }, + }, + ]); + + await authoriseView(req, res, nextFunction); + + expect(versionsStub.calledOnce).toBe(true); + expect(nextFunction.mock.calls.length).toBe(1); + }); + + it('Should respond 401 if an error is thrown', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + req.body = { versionIds: ['xyz', 'abc'], type: 'dataset' }; + const nextFunction = jest.fn(); + + let versionsStub = sinon.stub(datasetService, 'getDatasets').throws(); + + let badCall = await authoriseView(req, res, nextFunction); + + try { + badCall(); + } catch { + expect(versionsStub.calledOnce).toBe(true); + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.status).toHaveBeenCalledWith(401); + } + }); + }); +}); diff --git a/src/middlewares/__tests__/checkIDMiddleware.test.js b/src/middlewares/__tests__/checkIDMiddleware.test.js new file mode 100644 index 00000000..b35d4a67 --- /dev/null +++ b/src/middlewares/__tests__/checkIDMiddleware.test.js @@ -0,0 +1,56 @@ +import { checkIDMiddleware } from '../index'; + +describe('checkIDMiddleware', () => { + + const nextFunction = jest.fn(); + + const mockedResponse = () => { + const res = {}; + res.status = jest.fn().mockReturnValue(res); + res.json = jest.fn().mockReturnValue(res); + return res; + }; + + it('should return 400 response code when we dont have id value into list of paramteres', () => { + const expectedResponse = { + success: false, + message: 'You must provide a dataset identifier' + }; + + const mockedRequest = () => { + const req = {}; + req.params = {}; + return req; + }; + + const mockedReq = mockedRequest(); + const mockedRes = mockedResponse(); + + checkIDMiddleware(mockedReq, mockedRes, nextFunction); + + expect(mockedRes.status).toHaveBeenCalledWith(400); + expect(mockedRes.json).toHaveBeenCalledWith(expectedResponse); + }); + + it('should pass the middleware when we have id value into list of paramteres', () => { + const expectedResponse = {}; + + const mockedRequest = () => { + const req = {}; + req.params = { id: 1 }; + return req; + }; + + const mockedReq = mockedRequest(); + const mockedRes = mockedResponse(); + + nextFunction.mockReturnValue(expectedResponse); + + checkIDMiddleware(mockedReq, mockedRes, nextFunction); + + expect(mockedRes.status.mock.calls.length).toBe(0); + expect(mockedRes.json.mock.calls.length).toBe(0); + expect(nextFunction.mock.calls.length).toBe(1); + }); + +}); \ No newline at end of file diff --git a/src/middlewares/activitylog.middleware.js b/src/middlewares/activitylog.middleware.js new file mode 100644 index 00000000..60141579 --- /dev/null +++ b/src/middlewares/activitylog.middleware.js @@ -0,0 +1,167 @@ +import { isEmpty } from 'lodash'; + +import { activityLogService } from '../resources/activitylog/dependency'; +import { dataRequestService } from '../resources//datarequest/dependency'; +import { datasetService } from '../resources/dataset/dependency'; +import datasetonboardingUtil from '../resources/dataset/utils/datasetonboarding.util'; +import constants from '../resources/utilities/constants.util'; + +const validateViewRequest = (req, res, next) => { + const { versionIds = [], type = '' } = req.body; + + if (isEmpty(versionIds) || !Object.values(constants.activityLogTypes).includes(type)) { + return res.status(400).json({ + success: false, + message: 'You must provide a valid log category and array of version identifiers to retrieve corresponding logs', + }); + } + + next(); +}; + +const authoriseView = async (req, res, next) => { + const requestingUser = req.user; + const { versionIds = [] } = req.body; + let authorised, userType, accessRecords; + + try { + if (req.body.type === constants.activityLogTypes.DATA_ACCESS_REQUEST) { + ({ authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions(versionIds, requestingUser)); + if (!authorised) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + req.body.userType = userType; + req.body.versions = accessRecords; + } else if (req.body.type === constants.activityLogTypes.DATASET) { + const datasetVersions = await datasetService.getDatasets({ _id: { $in: versionIds } }, { lean: true }); + let permissionsArray = []; + await datasetVersions.forEach(async version => { + ({ authorised } = await datasetonboardingUtil.getUserPermissionsForDataset( + version.datasetv2.identifier, + requestingUser, + version.datasetv2.summary.publisher.identifier + )); + permissionsArray.push(authorised); + }); + + if (!permissionsArray.includes(true)) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + req.body.userType = requestingUser.teams.map(team => team.type).includes(constants.userTypes.ADMIN) + ? constants.userTypes.ADMIN + : constants.userTypes.CUSTODIAN; + req.body.versions = datasetVersions; + } + } catch (error) { + return res.status(401).json({ + success: false, + message: 'Error authenticating the user against submitted versionIds. Please check the submitted dataset versionIds', + }); + } + + next(); +}; + +const validateCreateRequest = (req, res, next) => { + const { versionId, description, timestamp } = req.body; + const { type } = req.params; + + if (!versionId || !description || !timestamp || !Object.values(constants.activityLogTypes).includes(type)) { + return res.status(400).json({ + success: false, + message: 'You must provide a valid log category and the following event details: associated version, description and timestamp', + }); + } + + next(); +}; + +const authoriseCreate = async (req, res, next) => { + const requestingUser = req.user; + const { versionId } = req.body; + const { type } = req.params; + + const { authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions([versionId], requestingUser); + if (isEmpty(accessRecords)) { + return res.status(404).json({ + success: false, + message: 'The requested application version could not be found', + }); + } + if (!authorised || userType !== constants.userTypes.CUSTODIAN) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + req.body.userType = userType; + req.body.accessRecord = accessRecords[0]; + req.body.versionTitle = accessRecords[0].getVersionById(versionId).detailedTitle; + req.body.type = type; + + next(); +}; + +const validateDeleteRequest = (req, res, next) => { + const { id } = req.params; + + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a log event identifier', + }); + } + + next(); +}; + +const authoriseDelete = async (req, res, next) => { + const requestingUser = req.user; + const { id, type } = req.params; + + const log = await activityLogService.getLog(id, type); + + if (!log) { + return res.status(404).json({ + success: false, + message: 'The requested application log entry could not be found', + }); + } + + const { authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions([log.versionId], requestingUser); + if (isEmpty(accessRecords)) { + return res.status(404).json({ + success: false, + message: 'The requested application version could not be found', + }); + } + if (!authorised || userType !== constants.userTypes.CUSTODIAN) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + if (log.eventType !== constants.activityLogEvents.data_access_request.MANUAL_EVENT) { + return res.status(400).json({ + success: false, + message: 'You cannot delete a system generated log entry', + }); + } + + req.body.userType = userType; + req.body.accessRecord = accessRecords[0]; + req.body.versionId = log.versionId; + req.body.type = type; + + next(); +}; + +export { validateViewRequest, authoriseView, authoriseCreate, validateCreateRequest, validateDeleteRequest, authoriseDelete }; diff --git a/src/middlewares/checkIDMiddleware.js b/src/middlewares/checkIDMiddleware.js new file mode 100644 index 00000000..ed9db761 --- /dev/null +++ b/src/middlewares/checkIDMiddleware.js @@ -0,0 +1,15 @@ +const checkIDMiddleware = (req, res, next) => { + + const { id } = req.params; + + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a dataset identifier', + }); + } + + next(); +} + +export { checkIDMiddleware } \ No newline at end of file diff --git a/src/middlewares/index.js b/src/middlewares/index.js new file mode 100644 index 00000000..f8d6fc4b --- /dev/null +++ b/src/middlewares/index.js @@ -0,0 +1,19 @@ +import { checkIDMiddleware } from './checkIDMiddleware'; +import { + validateViewRequest, + authoriseView, + authoriseCreate, + validateCreateRequest, + validateDeleteRequest, + authoriseDelete, +} from './activitylog.middleware'; + +export { + checkIDMiddleware, + validateViewRequest, + authoriseView, + authoriseCreate, + validateCreateRequest, + validateDeleteRequest, + authoriseDelete, +}; diff --git a/src/resources/activitylog/__mocks__/activitylogs.js b/src/resources/activitylog/__mocks__/activitylogs.dar.js similarity index 100% rename from src/resources/activitylog/__mocks__/activitylogs.js rename to src/resources/activitylog/__mocks__/activitylogs.dar.js diff --git a/src/resources/activitylog/__mocks__/activitylogs.dataset.js b/src/resources/activitylog/__mocks__/activitylogs.dataset.js new file mode 100644 index 00000000..7446eff2 --- /dev/null +++ b/src/resources/activitylog/__mocks__/activitylogs.dataset.js @@ -0,0 +1,104 @@ +export const datasetActivityLogMocks = [ + { + _id: '6189679675a82a0867ce55b9', + eventType: 'newDatasetVersionSubmitted', + logType: 'dataset', + timestamp: '12345', + user: '616993c3034a7d773064e208', + userDetails: { firstName: 'John', lastName: 'Smith', role: 'custodian' }, + version: '1.0.0', + versionId: '6189673475a82a0867ce54fa', + userTypes: ['admin', 'custodian'], + __v: 0, + }, + { + _id: '618967d475a82a0867ce56b0', + eventType: 'newDatasetVersionSubmitted', + logType: 'dataset', + timestamp: '12345', + user: '616993c3034a7d773064e208', + userDetails: { firstName: 'John', lastName: 'Smith', role: 'custodian' }, + version: '2.0.0', + versionId: '618967b075a82a0867ce5650', + userTypes: ['admin', 'custodian'], + __v: 0, + }, +]; + +export const formattedJSONResponseMock = [ + { + version: 'Version 2.0.0', + versionNumber: 2, + meta: { + dateSubmitted: '12345', + dateCreated: '12345', + applicationStatus: 'active', + }, + events: [ + { + _id: '618967d475a82a0867ce56b0', + eventType: 'newDatasetVersionSubmitted', + logType: 'dataset', + timestamp: '12345', + user: '616993c3034a7d773064e208', + userDetails: { + firstName: 'John', + lastName: 'Smith', + role: 'custodian', + }, + version: '2.0.0', + versionId: '618967b075a82a0867ce5650', + userTypes: ['admin', 'custodian'], + __v: 0, + }, + ], + }, + { + version: 'Version 1.0.0', + versionNumber: 1, + meta: { + dateSubmitted: '12345', + dateCreated: '12345', + applicationStatus: 'rejected', + }, + events: [ + { + _id: '6189679675a82a0867ce55b9', + eventType: 'newDatasetVersionSubmitted', + logType: 'dataset', + timestamp: '12345', + user: '616993c3034a7d773064e208', + userDetails: { + firstName: 'John', + lastName: 'Smith', + role: 'custodian', + }, + version: '1.0.0', + versionId: '6189673475a82a0867ce54fa', + userTypes: ['admin', 'custodian'], + __v: 0, + }, + ], + }, +]; + +export const datasetVersionsMock = [ + { + _id: '6189673475a82a0867ce54fa', + timestamps: { + created: '12345', + submitted: '12345', + }, + datasetVersion: '1.0.0', + activeflag: 'rejected', + }, + { + _id: '618967b075a82a0867ce5650', + timestamps: { + created: '12345', + submitted: '12345', + }, + datasetVersion: '2.0.0', + activeflag: 'active', + }, +]; diff --git a/src/resources/activitylog/__tests__/activitylog.service.test.js b/src/resources/activitylog/__tests__/activitylog.service.test.js index 735eea81..394ef82d 100644 --- a/src/resources/activitylog/__tests__/activitylog.service.test.js +++ b/src/resources/activitylog/__tests__/activitylog.service.test.js @@ -1,8 +1,15 @@ -import { activityLogService } from '../dependency'; -import { partyTimeRanges } from '../__mocks__/activitylogs'; import { cloneDeep } from 'lodash'; +import sinon from 'sinon'; + +import { activityLogService, activityLogRepository } from '../dependency'; +import { partyTimeRanges } from '../__mocks__/activitylogs.dar'; +import { datasetActivityLogMocks, formattedJSONResponseMock, datasetVersionsMock } from '../__mocks__/activitylogs.dataset'; import constants from '../../utilities/constants.util'; +afterEach(function () { + sinon.restore(); +}); + describe('ActivityLogService', function () { describe('calculateTimeWithParty', function () { // Arrange @@ -36,4 +43,78 @@ describe('ActivityLogService', function () { } ); }); + describe('logActivity', () => { + it('Should invoke the logDatasetActivity function when eventype is "dataset', async () => { + let serviceStub = sinon.stub(activityLogService, 'logDatasetActivity'); + + await activityLogService.logActivity('mockEventType', { type: constants.activityLogTypes.DATASET }); + + expect(serviceStub.calledOnce).toBe(true); + }); + }); + + describe('logDatasetActivity', () => { + const datasetLoggingActivities = Object.keys(constants.activityLogEvents.dataset); + const context = { + updatedDataset: { datasetVersion: '1.0.0', _id: '618a72fd5ec8f54772b7a17a', applicationStatusDesc: 'Some admin comment!' }, + user: { + firstname: 'John', + lastname: 'Smith', + _id: '618a72fd5ec8f54772b7a17b', + teams: [ + { + publisher: { _id: 'fakeTeam', name: 'fakeTeam' }, + type: 'admin', + members: [{ memberid: '618a72fd5ec8f54772b7a17b', roles: ['admin_dataset'] }], + }, + ], + }, + differences: [{ 'summary/title': 'VERSION 2' }], + }; + + test.each(datasetLoggingActivities)('Each event type creates a valid log', async event => { + let createActivityStub = sinon.stub(activityLogRepository, 'createActivityLog'); + sinon.stub(Date, 'now').returns('123456'); + + let log = { + eventType: constants.activityLogEvents.dataset[event], + logType: constants.activityLogTypes.DATASET, + timestamp: '123456', + user: context.user._id, + userDetails: { firstName: context.user.firstname, lastName: context.user.lastname, role: 'admin' }, + version: context.updatedDataset.datasetVersion, + versionId: context.updatedDataset._id, + userTypes: [constants.userTypes.ADMIN, constants.userTypes.CUSTODIAN], + }; + + await activityLogService.logDatasetActivity(constants.activityLogEvents.dataset[event], context); + + expect(createActivityStub.calledOnce).toBe(true); + if (event === 'DATASET_VERSION_SUBMITTED' || event === 'DATASET_VERSION_ARCHIVED' || event === 'DATASET_VERSION_UNARCHIVED') { + expect(createActivityStub.calledWith(log)).toBe(true); + } + if (event === 'DATASET_VERSION_APPROVED' || event === 'DATASET_VERSION_REJECTED') { + log.adminComment = 'Some admin comment!'; + expect(createActivityStub.calledWith(log)).toBe(true); + } + if (event === 'DATASET_UPDATES_SUBMITTED') { + log.datasetUpdates = [{ 'summary/title': 'VERSION 2' }]; + expect(createActivityStub.calledWith(log)).toBe(true); + } + }); + }); + describe('searchLogs and formatLogs', () => { + it('Should returned correctly formatted logs', async () => { + const formatLogsStub = sinon.stub(activityLogRepository, 'searchLogs').returns(datasetActivityLogMocks); + const versionIds = [datasetVersionsMock[0]._id, datasetVersionsMock[1]._id]; + const type = 'dataset'; + const userType = 'admin'; + const versions = datasetVersionsMock; + + const formattedResponse = await activityLogService.searchLogs(versionIds, type, userType, versions); + + expect(formatLogsStub.calledOnce).toBe(true); + expect(formattedResponse).toStrictEqual(formattedJSONResponseMock); + }); + }); }); diff --git a/src/resources/activitylog/activitylog.controller.js b/src/resources/activitylog/activitylog.controller.js index d08ac4ab..207750a7 100644 --- a/src/resources/activitylog/activitylog.controller.js +++ b/src/resources/activitylog/activitylog.controller.js @@ -16,10 +16,10 @@ export default class ActivityLogController extends Controller { async searchLogs(req, res) { try { // Extract required log params - const { versionIds = [], type = '', userType, accessRecords } = req.body; + const { versionIds = [], type = '', userType, versions } = req.body; // Find the logs - const logs = await this.activityLogService.searchLogs(versionIds, type, userType, accessRecords); + const logs = await this.activityLogService.searchLogs(versionIds, type, userType, versions); // Return the logs return res.status(200).json({ @@ -42,7 +42,7 @@ export default class ActivityLogController extends Controller { const { versionId, description, timestamp, type, userType, accessRecord, versionTitle } = req.body; // Create new event log - await this.activityLogService.logActivity(constants.activityLogEvents.MANUAL_EVENT, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.MANUAL_EVENT, { versionId, versionTitle, description, @@ -52,7 +52,12 @@ export default class ActivityLogController extends Controller { }); // Send notifications - await this.createNotifications(constants.activityLogNotifications.MANUALEVENTADDED, { description, timestamp }, accessRecord, req.user); + await this.createNotifications( + constants.activityLogNotifications.MANUALEVENTADDED, + { description, timestamp }, + accessRecord, + req.user + ); // Get logs for version that was updated const [affectedVersion] = await this.activityLogService.searchLogs([versionId], type, userType, [accessRecord], false); @@ -85,7 +90,12 @@ export default class ActivityLogController extends Controller { await this.activityLogService.deleteLog(id); // Send notifications - await this.createNotifications(constants.activityLogNotifications.MANUALEVENTREMOVED, { description: log.plainText, timestamp: log.timestamp }, accessRecord, req.user); + await this.createNotifications( + constants.activityLogNotifications.MANUALEVENTREMOVED, + { description: log.plainText, timestamp: log.timestamp }, + accessRecord, + req.user + ); // Get logs for version that was updated const [affectedVersion] = await this.activityLogService.searchLogs([versionId], type, userType, [accessRecord], false); @@ -119,7 +129,9 @@ export default class ActivityLogController extends Controller { // Create in-app notifications await notificationBuilder.triggerNotificationMessage( teamMembersIds, - `${user.firstname} ${user.lastname} (${publisher}) has added an event to the activity log of '${projectName || `No project name set`}' data access request application`, + `${user.firstname} ${user.lastname} (${publisher}) has added an event to the activity log of '${ + projectName || `No project name set` + }' data access request application`, 'data access request log updated', _id, publisher @@ -150,7 +162,9 @@ export default class ActivityLogController extends Controller { // Create in-app notifications await notificationBuilder.triggerNotificationMessage( teamMembersIds, - `${user.firstname} ${user.lastname} (${publisher}) has deleted an event from the activity log of '${projectName || `No project name set`}' data access request application`, + `${user.firstname} ${user.lastname} (${publisher}) has deleted an event from the activity log of '${ + projectName || `No project name set` + }' data access request application`, 'data access request log updated', _id, publisher diff --git a/src/resources/activitylog/activitylog.model.js b/src/resources/activitylog/activitylog.model.js index 95b34797..f50a27a0 100644 --- a/src/resources/activitylog/activitylog.model.js +++ b/src/resources/activitylog/activitylog.model.js @@ -2,18 +2,29 @@ import { model, Schema } from 'mongoose'; import constants from '../utilities/constants.util'; const ActivityLogSchema = new Schema({ - eventType: { type: String, required: true, enum: Object.values(constants.activityLogEvents) }, + eventType: { + type: String, + required: true, + enum: Object.values({ ...constants.activityLogEvents.dataset, ...constants.activityLogEvents.data_access_request }), + }, logType: { type: String, required: true, enum: Object.values(constants.activityLogTypes) }, - userTypes: [], + userTypes: { type: Array, required: false, default: void 0 }, timestamp: { type: Date, required: true }, user: { type: Schema.Types.ObjectId, ref: 'User', required: true }, + userDetails: { + firstName: { type: String }, + lastName: { type: String }, + role: { type: String }, + }, versionId: { type: Schema.Types.ObjectId, required: true }, version: { type: String, required: true }, - plainText: { type: String, required: true }, + plainText: { type: String, required: false }, detailedText: String, - html: { type: String, required: true }, + html: { type: String, required: false }, detailedHtml: String, - isPresubmission: Boolean + isPresubmission: Boolean, + datasetUpdates: {}, + adminComment: String, }); export const ActivityLog = model('ActivityLog', ActivityLogSchema); diff --git a/src/resources/activitylog/activitylog.route.js b/src/resources/activitylog/activitylog.route.js index f13d02a3..00f048cf 100644 --- a/src/resources/activitylog/activitylog.route.js +++ b/src/resources/activitylog/activitylog.route.js @@ -1,161 +1,56 @@ import express from 'express'; import passport from 'passport'; +import { + validateViewRequest, + authoriseView, + authoriseCreate, + validateCreateRequest, + validateDeleteRequest, + authoriseDelete, +} from '../../middlewares/index'; import ActivityLogController from './activitylog.controller'; import { activityLogService } from './dependency'; -import { dataRequestService } from '../datarequest/dependency'; import { logger } from '../utilities/logger'; -import { isEmpty } from 'lodash'; -import constants from '../utilities/constants.util'; const router = express.Router(); const activityLogController = new ActivityLogController(activityLogService); const logCategory = 'Activity Log'; -const validateViewRequest = (req, res, next) => { - const { versionIds = [], type = '' } = req.body; - - if (isEmpty(versionIds) || !Object.values(constants.activityLogTypes).includes(type)) { - return res.status(400).json({ - success: false, - message: 'You must provide a valid log category and array of version identifiers to retrieve corresponding logs', - }); - } - - next(); -}; - -const authoriseView = async (req, res, next) => { - const requestingUser = req.user; - const { versionIds = [] } = req.body; - - const { authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions(versionIds, requestingUser); - if (!authorised) { - return res.status(401).json({ - success: false, - message: 'You are not authorised to perform this action', - }); - } - req.body.userType = userType; - req.body.accessRecords = accessRecords; - - next(); -}; - -const validateCreateRequest = (req, res, next) => { - const { versionId, description, timestamp } = req.body; - const { type } = req.params; - - if (!versionId || !description || !timestamp || !Object.values(constants.activityLogTypes).includes(type)) { - return res.status(400).json({ - success: false, - message: 'You must provide a valid log category and the following event details: associated version, description and timestamp', - }); - } - - next(); -}; - -const authoriseCreate = async (req, res, next) => { - const requestingUser = req.user; - const { versionId } = req.body; - const { type } = req.params; - - const { authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions([versionId], requestingUser); - if(isEmpty(accessRecords)) { - return res.status(404).json({ - success: false, - message: 'The requested application version could not be found', - }); - } - if (!authorised || userType !== constants.userTypes.CUSTODIAN) { - return res.status(401).json({ - success: false, - message: 'You are not authorised to perform this action', - }); - } - - req.body.userType = userType; - req.body.accessRecord = accessRecords[0]; - req.body.versionTitle = accessRecords[0].getVersionById(versionId).detailedTitle; - req.body.type = type; - - next(); -}; - -const validateDeleteRequest = (req, res, next) => { - const { id } = req.params; - - if (!id) { - return res.status(400).json({ - success: false, - message: 'You must provide a log event identifier', - }); - } - - next(); -}; - -const authoriseDelete = async (req, res, next) => { - const requestingUser = req.user; - const { id, type } = req.params; - - const log = await activityLogService.getLog(id, type); - - if(!log) { - return res.status(404).json({ - success: false, - message: 'The requested application log entry could not be found', - }); - } - - const { authorised, userType, accessRecords } = await dataRequestService.checkUserAuthForVersions([log.versionId], requestingUser); - if(isEmpty(accessRecords)) { - return res.status(404).json({ - success: false, - message: 'The requested application version could not be found', - }); - } - if (!authorised || userType !== constants.userTypes.CUSTODIAN) { - return res.status(401).json({ - success: false, - message: 'You are not authorised to perform this action', - }); - } - if (log.eventType !== constants.activityLogEvents.MANUAL_EVENT) { - return res.status(400).json({ - success: false, - message: 'You cannot delete a system generated log entry', - }); - } - - req.body.userType = userType; - req.body.accessRecord = accessRecords[0]; - req.body.versionId = log.versionId; - req.body.type = type; - - next(); -}; - // @route POST /api/v2/activitylog // @desc Returns a collection of logs based on supplied query parameters // @access Private -router.post('/', passport.authenticate('jwt'), validateViewRequest, authoriseView, logger.logRequestMiddleware({ logCategory, action: 'Viewed activity logs' }), (req, res) => - activityLogController.searchLogs(req, res) +router.post( + '/', + passport.authenticate('jwt'), + validateViewRequest, + authoriseView, + logger.logRequestMiddleware({ logCategory, action: 'Viewed activity logs' }), + (req, res) => activityLogController.searchLogs(req, res) ); // @route POST /api/v2/activitylog/event // @desc Creates a new manual event in the activity log identified in the payload // @access Private -router.post('/:type', passport.authenticate('jwt'), validateCreateRequest, authoriseCreate, logger.logRequestMiddleware({ logCategory, action: 'Created manual event' }), (req, res) => - activityLogController.createLog(req, res) +router.post( + '/:type', + passport.authenticate('jwt'), + validateCreateRequest, + authoriseCreate, + logger.logRequestMiddleware({ logCategory, action: 'Created manual event' }), + (req, res) => activityLogController.createLog(req, res) ); // @route DELETE /api/v2/activitylog/id // @desc Delete a manual event from the activity log // @access Private -router.delete('/:type/:id', passport.authenticate('jwt'), validateDeleteRequest, authoriseDelete, logger.logRequestMiddleware({ logCategory, action: 'Deleted manual event' }), (req, res) => - activityLogController.deleteLog(req, res) +router.delete( + '/:type/:id', + passport.authenticate('jwt'), + validateDeleteRequest, + authoriseDelete, + logger.logRequestMiddleware({ logCategory, action: 'Deleted manual event' }), + (req, res) => activityLogController.deleteLog(req, res) ); module.exports = router; diff --git a/src/resources/activitylog/activitylog.service.js b/src/resources/activitylog/activitylog.service.js index f3b735b0..9b825d5f 100644 --- a/src/resources/activitylog/activitylog.service.js +++ b/src/resources/activitylog/activitylog.service.js @@ -10,7 +10,7 @@ export default class activityLogService { async searchLogs(versionIds, type, userType, versions, includePresubmission) { const logs = await this.activityLogRepository.searchLogs(versionIds, type, userType); - return this.formatLogs(logs, versions, includePresubmission); + return this.formatLogs(logs, type, versions, includePresubmission); } getLog(id, type) { @@ -21,6 +21,207 @@ export default class activityLogService { return this.activityLogRepository.deleteLog(id); } + formatLogs(logs, type, versions, includePresubmission = true) { + let formattedVersionEvents; + switch (type) { + case constants.activityLogTypes.DATA_ACCESS_REQUEST: + let presubmissionEvents = []; + if (includePresubmission) { + presubmissionEvents = this.buildPresubmissionEvents(logs); + } + + formattedVersionEvents = versions.reduce((arr, version) => { + const { + majorVersion: majorVersionNumber, + dateSubmitted, + dateCreated, + applicationType, + applicationStatus, + _id: majorVersionId, + amendmentIterations = [], + } = version; + + const partyDurations = this.getPartyTimeDistribution(version); + + const majorVersion = this.buildVersionEvents( + `${majorVersionNumber}`, + dateSubmitted, + dateCreated, + null, + applicationType, + applicationStatus, + () => this.getEventsForVersion(logs, majorVersionId), + () => this.calculateTimeWithParty(partyDurations, constants.userTypes.APPLICANT) + ); + + if (majorVersion.events.length > 0) { + arr.push(majorVersion); + } + + amendmentIterations.forEach((iterationMinorVersion, index) => { + const { + dateSubmitted: minorVersionDateSubmitted, + dateCreated: minorVersionDateCreated, + dateReturned: minorVersionDateReturned, + _id: minorVersionId, + } = iterationMinorVersion; + const partyDurations = this.getPartyTimeDistribution(iterationMinorVersion); + const minorVersion = this.buildVersionEvents( + `${majorVersionNumber}.${index + 1}`, + minorVersionDateSubmitted, + minorVersionDateCreated, + minorVersionDateReturned, + 'Update', + applicationStatus, + () => this.getEventsForVersion(logs, minorVersionId), + () => this.calculateTimeWithParty(partyDurations, constants.userTypes.APPLICANT) + ); + if (minorVersion.events.length > 0) { + arr.push(minorVersion); + } + }); + + return arr; + }, []); + + if (!isEmpty(presubmissionEvents)) { + formattedVersionEvents.push(presubmissionEvents); + } + break; + + case constants.activityLogTypes.DATASET: + formattedVersionEvents = versions.reduce((arr, version) => { + const { + datasetVersion, + timestamps: { submitted: dateSubmitted, created: dateCreated }, + activeflag, + _id, + } = version; + + const formattedVersion = { + version: `Version ${datasetVersion}`, + versionNumber: parseFloat(datasetVersion), + meta: { + ...(dateSubmitted && { dateSubmitted }), + ...(dateCreated && { dateCreated }), + applicationStatus: activeflag, + }, + events: this.getEventsForVersion(logs, _id), + }; + + arr.push(formattedVersion); + + return arr; + }, []); + break; + } + const orderedVersionEvents = orderBy(formattedVersionEvents, ['versionNumber'], ['desc']); + return orderedVersionEvents; + } + + logActivity(eventType, context) { + const logType = context.type; + switch (logType) { + case constants.activityLogTypes.DATA_ACCESS_REQUEST: + this.logDataAccessRequestActivity(eventType, context); + break; + case constants.activityLogTypes.DATASET: + this.logDatasetActivity(eventType, context); + break; + } + } + + async logDatasetActivity(eventType, context) { + const { updatedDataset, user, differences } = context; + const userRole = user.teams.map(team => team.type).includes(constants.userTypes.ADMIN) + ? constants.userTypes.ADMIN + : constants.userTypes.CUSTODIAN; + let log = { + eventType: eventType, + logType: constants.activityLogTypes.DATASET, + timestamp: Date.now(), + user: user._id, + userDetails: { firstName: user.firstname, lastName: user.lastname, role: userRole }, + version: updatedDataset.datasetVersion, + versionId: updatedDataset._id, + userTypes: [constants.userTypes.ADMIN, constants.userTypes.CUSTODIAN], + }; + + if ( + eventType === constants.activityLogEvents.dataset.DATASET_VERSION_REJECTED || + eventType === constants.activityLogEvents.dataset.DATASET_VERSION_APPROVED + ) + log['adminComment'] = updatedDataset.applicationStatusDesc; + + if (eventType === constants.activityLogEvents.dataset.DATASET_UPDATES_SUBMITTED && differences) log['datasetUpdates'] = differences; + + await this.activityLogRepository.createActivityLog(log); + } + + logDataAccessRequestActivity(eventType, context) { + switch (eventType) { + case constants.activityLogEvents.data_access_request.APPLICATION_SUBMITTED: + this.logApplicationSubmittedEvent(context); + break; + case constants.activityLogEvents.data_access_request.REVIEW_PROCESS_STARTED: + this.logReviewProcessStartedEvent(context); + break; + case constants.activityLogEvents.data_access_request.UPDATES_SUBMITTED: + this.logUpdatesSubmittedEvent(context); + break; + case constants.activityLogEvents.data_access_request.AMENDMENT_SUBMITTED: + this.logAmendmentSubmittedEvent(context); + break; + case constants.activityLogEvents.data_access_request.APPLICATION_APPROVED: + this.logApplicationApprovedEvent(context); + break; + case constants.activityLogEvents.data_access_request.APPLICATION_APPROVED_WITH_CONDITIONS: + this.logApplicationApprovedWithConditionsEvent(context); + break; + case constants.activityLogEvents.data_access_request.APPLICATION_REJECTED: + this.logApplicationRejectedEvent(context); + break; + case constants.activityLogEvents.data_access_request.COLLABORATOR_ADDEDD: + this.logCollaboratorAddedEvent(context); + break; + case constants.activityLogEvents.data_access_request.COLLABORATOR_REMOVED: + this.logCollaboratorRemovedEvent(context); + break; + case constants.activityLogEvents.data_access_request.PRESUBMISSION_MESSAGE: + this.logPresubmissionMessages(context); + break; + case constants.activityLogEvents.data_access_request.CONTEXTUAL_MESSAGE: + this.logContextualMessage(context); + break; + case constants.activityLogEvents.data_access_request.NOTE: + this.logNote(context); + break; + case constants.activityLogEvents.data_access_request.UPDATE_REQUESTED: + this.logUpdateRequestedEvent(context); + break; + case constants.activityLogEvents.data_access_request.WORKFLOW_ASSIGNED: + this.logWorkflowAssignedEvent(context); + break; + case constants.activityLogEvents.data_access_request.REVIEW_PHASE_STARTED: + this.logReviewPhaseStartedEvent(context); + break; + case constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_ISSUE: + this.logReccomendationWithIssueEvent(context); + break; + case constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_NO_ISSUE: + this.logReccomendationWithNoIssueEvent(context); + break; + case constants.activityLogEvents.data_access_request.DEADLINE_PASSED: + this.logDeadlinePassedEvent(context); + break; + case constants.activityLogEvents.data_access_request.FINAL_DECISION_REQUIRED: + this.logFinalDecisionRequiredEvent(context); + break; + case constants.activityLogEvents.data_access_request.MANUAL_EVENT: + this.logManualEvent(context); + } + } + getActiveQuestion(questionsArr, questionId) { let child; @@ -45,75 +246,6 @@ export default class activityLogService { } } - formatLogs(logs, versions, includePresubmission = true) { - let presubmissionEvents = []; - if (includePresubmission) { - presubmissionEvents = this.buildPresubmissionEvents(logs); - } - - const formattedVersionEvents = versions.reduce((arr, version) => { - const { - majorVersion: majorVersionNumber, - dateSubmitted, - dateCreated, - applicationType, - applicationStatus, - _id: majorVersionId, - amendmentIterations = [], - } = version; - - const partyDurations = this.getPartyTimeDistribution(version); - - const majorVersion = this.buildVersionEvents( - `${majorVersionNumber}`, - dateSubmitted, - dateCreated, - null, - applicationType, - applicationStatus, - () => this.getEventsForVersion(logs, majorVersionId), - () => this.calculateTimeWithParty(partyDurations, constants.userTypes.APPLICANT) - ); - - if (majorVersion.events.length > 0) { - arr.push(majorVersion); - } - - amendmentIterations.forEach((iterationMinorVersion, index) => { - const { - dateSubmitted: minorVersionDateSubmitted, - dateCreated: minorVersionDateCreated, - dateReturned: minorVersionDateReturned, - _id: minorVersionId, - } = iterationMinorVersion; - const partyDurations = this.getPartyTimeDistribution(iterationMinorVersion); - const minorVersion = this.buildVersionEvents( - `${majorVersionNumber}.${index + 1}`, - minorVersionDateSubmitted, - minorVersionDateCreated, - minorVersionDateReturned, - 'Update', - applicationStatus, - () => this.getEventsForVersion(logs, minorVersionId), - () => this.calculateTimeWithParty(partyDurations, constants.userTypes.APPLICANT) - ); - if (minorVersion.events.length > 0) { - arr.push(minorVersion); - } - }); - - return arr; - }, []); - - if (!isEmpty(presubmissionEvents)) { - formattedVersionEvents.push(presubmissionEvents); - } - - const orderedVersionEvents = orderBy(formattedVersionEvents, ['versionNumber'], ['desc']); - - return orderedVersionEvents; - } - buildPresubmissionEvents(logs) { const presubmissionEvents = this.getEventsForVersion(logs); @@ -293,76 +425,12 @@ export default class activityLogService { return partyDurations; } - async logActivity(eventType, context) { - switch (eventType) { - case constants.activityLogEvents.APPLICATION_SUBMITTED: - this.logApplicationSubmittedEvent(context); - break; - case constants.activityLogEvents.REVIEW_PROCESS_STARTED: - this.logReviewProcessStartedEvent(context); - break; - case constants.activityLogEvents.UPDATES_SUBMITTED: - this.logUpdatesSubmittedEvent(context); - break; - case constants.activityLogEvents.AMENDMENT_SUBMITTED: - this.logAmendmentSubmittedEvent(context); - break; - case constants.activityLogEvents.APPLICATION_APPROVED: - this.logApplicationApprovedEvent(context); - break; - case constants.activityLogEvents.APPLICATION_APPROVED_WITH_CONDITIONS: - this.logApplicationApprovedWithConditionsEvent(context); - break; - case constants.activityLogEvents.APPLICATION_REJECTED: - this.logApplicationRejectedEvent(context); - break; - case constants.activityLogEvents.COLLABORATOR_ADDEDD: - this.logCollaboratorAddedEvent(context); - break; - case constants.activityLogEvents.COLLABORATOR_REMOVED: - this.logCollaboratorRemovedEvent(context); - break; - case constants.activityLogEvents.PRESUBMISSION_MESSAGE: - this.logPresubmissionMessages(context); - break; - case constants.activityLogEvents.CONTEXTUAL_MESSAGE: - this.logContextualMessage(context); - break; - case constants.activityLogEvents.NOTE: - this.logNote(context); - break; - case constants.activityLogEvents.UPDATE_REQUESTED: - this.logUpdateRequestedEvent(context); - break; - case constants.activityLogEvents.WORKFLOW_ASSIGNED: - this.logWorkflowAssignedEvent(context); - break; - case constants.activityLogEvents.REVIEW_PHASE_STARTED: - this.logReviewPhaseStartedEvent(context); - break; - case constants.activityLogEvents.RECOMMENDATION_WITH_ISSUE: - this.logReccomendationWithIssueEvent(context); - break; - case constants.activityLogEvents.RECOMMENDATION_WITH_NO_ISSUE: - this.logReccomendationWithNoIssueEvent(context); - break; - case constants.activityLogEvents.DEADLINE_PASSED: - this.logDeadlinePassedEvent(context); - break; - case constants.activityLogEvents.FINAL_DECISION_REQUIRED: - this.logFinalDecisionRequiredEvent(context); - break; - case constants.activityLogEvents.MANUAL_EVENT: - this.logManualEvent(context); - } - } - async logReviewProcessStartedEvent(context) { const { accessRequest, user } = context; const version = accessRequest.versionTree[`${accessRequest.majorVersion}.0`]; const log = { - eventType: constants.activityLogEvents.REVIEW_PROCESS_STARTED, + eventType: constants.activityLogEvents.data_access_request.REVIEW_PROCESS_STARTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Review process started by custodian manager ${user.firstname} ${user.lastname}`, @@ -381,7 +449,7 @@ export default class activityLogService { const version = accessRequest.versionTree[`${accessRequest.majorVersion}.${accessRequest.amendmentIterations.length}`]; const log = { - eventType: constants.activityLogEvents.APPLICATION_APPROVED, + eventType: constants.activityLogEvents.data_access_request.APPLICATION_APPROVED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Application approved by custodian manager ${user.firstname} ${user.lastname}`, @@ -406,7 +474,7 @@ export default class activityLogService { ``; const log = { - eventType: constants.activityLogEvents.APPLICATION_APPROVED_WITH_CONDITIONS, + eventType: constants.activityLogEvents.data_access_request.APPLICATION_APPROVED_WITH_CONDITIONS, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Application approved with conditions by custodian manager ${user.firstname} ${user.lastname}`, @@ -433,7 +501,7 @@ export default class activityLogService { ``; const log = { - eventType: constants.activityLogEvents.APPLICATION_REJECTED, + eventType: constants.activityLogEvents.data_access_request.APPLICATION_REJECTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Application rejected by custodian manager ${user.firstname} ${user.lastname}`, @@ -454,7 +522,7 @@ export default class activityLogService { const version = accessRequest.versionTree[`${accessRequest.majorVersion}.0`]; const log = { - eventType: constants.activityLogEvents.APPLICATION_SUBMITTED, + eventType: constants.activityLogEvents.data_access_request.APPLICATION_SUBMITTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Version 1 application has been submitted by applicant ${user.firstname} ${user.lastname}`, @@ -479,7 +547,7 @@ export default class activityLogService { ``; const log = { - eventType: constants.activityLogEvents.AMENDMENT_SUBMITTED, + eventType: constants.activityLogEvents.data_access_request.AMENDMENT_SUBMITTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Amendment submitted by applicant ${user.firstname} ${user.lastname}. ${version.displayTitle} of this application has been created.`, @@ -545,7 +613,7 @@ export default class activityLogService { }); const logUpdate = { - eventType: constants.activityLogEvents.UPDATE_SUBMITTED, + eventType: constants.activityLogEvents.data_access_request.UPDATE_SUBMITTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), detailedText: detText, @@ -565,7 +633,7 @@ export default class activityLogService { await this.activityLogRepository.createActivityLog(logUpdate); const logUpdates = { - eventType: constants.activityLogEvents.UPDATES_SUBMITTED, + eventType: constants.activityLogEvents.data_access_request.UPDATES_SUBMITTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Updates submitted by applicant ${user.firstname} ${user.lastname}. ${currentVersion.displayTitle} of this application has been created.`, @@ -619,7 +687,7 @@ export default class activityLogService { }); const log = { - eventType: constants.activityLogEvents.UPDATE_REQUESTED, + eventType: constants.activityLogEvents.data_access_request.UPDATE_REQUESTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), @@ -649,7 +717,7 @@ export default class activityLogService { }); const log = { - eventType: constants.activityLogEvents.COLLABORATOR_ADDEDD, + eventType: constants.activityLogEvents.data_access_request.COLLABORATOR_ADDEDD, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Applicant ${user.firstname} ${user.lastname} added ${collaborator.firstname} ${collaborator.lastname} as a collaborator`, @@ -672,7 +740,7 @@ export default class activityLogService { }); const log = { - eventType: constants.activityLogEvents.COLLABORATOR_REMOVED, + eventType: constants.activityLogEvents.data_access_request.COLLABORATOR_REMOVED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Applicant ${user.firstname} ${user.lastname} removed ${collaborator.firstname} ${collaborator.lastname} as a collaborator`, @@ -715,7 +783,7 @@ export default class activityLogService { .join('')}`; const log = { - eventType: constants.activityLogEvents.WORKFLOW_ASSIGNED, + eventType: constants.activityLogEvents.data_access_request.WORKFLOW_ASSIGNED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `${workflow.workflowName} has been assigned by custodian manager ${user.firstname} ${user.lastname}`, @@ -740,7 +808,7 @@ export default class activityLogService { const step = workflow.steps.find(step => step.active); const log = { - eventType: constants.activityLogEvents.REVIEW_PHASE_STARTED, + eventType: constants.activityLogEvents.data_access_request.REVIEW_PHASE_STARTED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `${step.stepName} has started. ${workflow.steps.findIndex(step => step.active) + 1} out of ${ @@ -771,7 +839,7 @@ export default class activityLogService { const detText = `Recommendation: Issues found\n${comments}`; const log = { - eventType: constants.activityLogEvents.RECOMMENDATION_WITH_ISSUE, + eventType: constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_ISSUE, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Recommendation with issues found sent by reviewer ${user.firstname} ${user.lastname}`, @@ -800,7 +868,7 @@ export default class activityLogService { const detText = `Recommendation: No issues found\n${comments}`; const log = { - eventType: constants.activityLogEvents.RECOMMENDATION_WITH_NO_ISSUE, + eventType: constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_NO_ISSUE, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Recommendation with no issues found sent by reviewer ${user.firstname} ${user.lastname}`, @@ -821,7 +889,7 @@ export default class activityLogService { const version = accessRequest.versionTree[`${accessRequest.majorVersion}.${accessRequest.amendmentIterations.length}`]; const log = { - eventType: constants.activityLogEvents.FINAL_DECISION_REQUIRED, + eventType: constants.activityLogEvents.data_access_request.FINAL_DECISION_REQUIRED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Final decision required by custodian by custodian manager ${user.firstname} ${user.lastname}. All review phases completed`, @@ -846,7 +914,7 @@ export default class activityLogService { if (!userType) return; const log = { - eventType: constants.activityLogEvents.PRESUBMISSION_MESSAGE, + eventType: constants.activityLogEvents.data_access_request.PRESUBMISSION_MESSAGE, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: createdDate, user: createdBy._id, @@ -905,7 +973,7 @@ export default class activityLogService { .join(''); const log = { - eventType: constants.activityLogEvents.DEADLINE_PASSED, + eventType: constants.activityLogEvents.data_access_request.DEADLINE_PASSED, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), plainText: `Deadline was ${daysSinceDeadlinePassed} ${daysSinceDeadlinePassed > 1 ? 'days' : 'day'} ago for ${step.stepName} ${ @@ -931,7 +999,7 @@ export default class activityLogService { const { versionId, versionTitle, description, timestamp, user = {} } = context; const log = { - eventType: constants.activityLogEvents.MANUAL_EVENT, + eventType: constants.activityLogEvents.data_access_request.MANUAL_EVENT, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp, user: user._id, @@ -984,7 +1052,7 @@ export default class activityLogService { : `Message sent from applicant ${user.firstname} ${user.lastname}`; const log = { - eventType: constants.activityLogEvents.CONTEXTUAL_MESSAGE, + eventType: constants.activityLogEvents.data_access_request.CONTEXTUAL_MESSAGE, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), user: user._id, @@ -1039,7 +1107,7 @@ export default class activityLogService { : `Note added by applicant ${user.firstname} ${user.lastname}`; const log = { - eventType: constants.activityLogEvents.NOTE, + eventType: constants.activityLogEvents.data_access_request.NOTE, logType: constants.activityLogTypes.DATA_ACCESS_REQUEST, timestamp: Date.now(), user: user._id, @@ -1056,6 +1124,54 @@ export default class activityLogService { await this.activityLogRepository.createActivityLog(log); } + async logDataUseRegisterUpdated(context) { + const { dataUseRegister, updateObj, user } = context; + + let detHtml = ''; + let detText = ''; + + Object.keys(updateObj).forEach(updatedField => { + const oldValue = dataUseRegister[updatedField]; + const newValue = updateObj[updatedField]; + + detHtml = detHtml.concat( + `
` + + `
${dataUseRegister.projectTitle}
` + + `
` + + `
Field
` + + `
${updatedField}
` + + `
` + + `
` + + `
Previous Value
` + + `
${oldValue ? oldValue : ''}
` + + `
` + + `
` + + `
Updated Value
` + + `
${newValue ? newValue : ''}
` + + `
` + + `
` + ); + + detText = detText.concat( + `${dataUseRegister.projectTitle}\nField: ${updatedField}\nPrevious Value: ${oldValue}\nUpdated Value: ${newValue}\n\n` + ); + }); + + const logUpdate = { + eventType: constants.activityLogEvents.DATA_USE_REGISTER_UPDATED, + logType: constants.activityLogTypes.DATA_USE_REGISTER, + timestamp: Date.now(), + detailedText: detText, + plainText: `updates submitted by custodian ${user.firstname} ${user.lastname}.`, + html: `updates submitted by custodian ${user.firstname} ${user.lastname}.`, + detailedHtml: detHtml, + user: user._id, + userTypes: [constants.userTypes.APPLICANT, constants.userTypes.CUSTODIAN], + }; + + await this.activityLogRepository.createActivityLog(logUpdate); + } + getQuestionInfo(accessRequest, questionId) { const questionSet = accessRequest.jsonSchema.questionSets.find(qs => qs.questions.find(question => question.questionId === questionId)); diff --git a/src/resources/auth/__tests__/auth.utilities.test.js b/src/resources/auth/__tests__/auth.utilities.test.js new file mode 100644 index 00000000..75da1a4f --- /dev/null +++ b/src/resources/auth/__tests__/auth.utilities.test.js @@ -0,0 +1,72 @@ +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; + +describe('Utilities', () => { + describe('catchErrorAndRedirect middleware', () => { + it('should be a function', () => { + expect(typeof catchLoginErrorAndRedirect).toBe('function'); + }); + + it('should call next once when ( req.auth.err || !req.auth.user ) == false', () => { + let res = {}; + let req = { + auth: { + user: 'someUser', + err: null, + }, + }; + const next = jest.fn(); + + catchLoginErrorAndRedirect(req, res, next); + + // assert + expect(next.mock.calls.length).toBe(1); + }); + + it('should not call next when ( req.auth.err || !req.auth.user ) == true', () => { + let res = {}; + res.status = jest.fn().mockReturnValue(res); + res.redirect = jest.fn().mockReturnValue(res); + let req = { + auth: { + user: {}, + err: 'someErr', + }, + param: { + returnpage: 'somePage', + }, + }; + const next = jest.fn(); + + catchLoginErrorAndRedirect(req, res, next); + + // assert + expect(next.mock.calls.length).toBe(0); + expect(res.status.mock.calls.length).toBe(1); + expect(res.redirect.mock.calls.length).toBe(1); + }); + }); + + describe('loginAndSignToken middleware', () => { + it('should be a function', () => { + expect(typeof loginAndSignToken).toBe('function'); + }); + + it('should call res.login once', () => { + let res = {}; + res.status = jest.fn().mockReturnValue(res); + res.redirect = jest.fn().mockReturnValue(res); + let req = { + auth: { + user: 'someUser', + }, + }; + req.login = jest.fn().mockReturnValue(req); + const next = jest.fn(); + + loginAndSignToken(req, res, next); + + // assert + expect(req.login.mock.calls.length).toBe(1); + }); + }); +}); diff --git a/src/resources/auth/auth.route.js b/src/resources/auth/auth.route.js index cfb72157..fdc342f3 100644 --- a/src/resources/auth/auth.route.js +++ b/src/resources/auth/auth.route.js @@ -64,6 +64,27 @@ router.get('/status', function (req, res, next) { } }); } + if (adminArray[0].roles.includes(constants.roleTypes.ADMIN_DATA_USE)) { + const allTeams = await getTeams(); + allTeams.forEach(newTeam => { + const foundTeam = teams.find(team => team._id && team._id.toString() === newTeam._id.toString()); + if (!isEmpty(foundTeam)) { + const foundRole = foundTeam.roles.find(role => role === constants.roleTypes.REVIEWER); + if (isEmpty(foundRole)) { + foundTeam.roles.push(constants.roleTypes.REVIEWER); + } + foundTeam.isAdmin = true; + } else { + teams.push({ + _id: newTeam._id, + name: newTeam.publisher.name, + roles: [constants.roleTypes.REVIEWER], + type: newTeam.type, + isAdmin: true, + }); + } + }); + } } //Remove admin team and then sort teams alphabetically diff --git a/src/resources/auth/index.js b/src/resources/auth/index.js index 3edecf4a..d51545b3 100644 --- a/src/resources/auth/index.js +++ b/src/resources/auth/index.js @@ -11,6 +11,7 @@ const initialiseAuthentication = app => { strategies.LinkedinStrategy, strategies.GoogleStrategy, strategies.AzureStrategy, + strategies.OrcidStrategy, strategies.JWTStrategy )(app); }; diff --git a/src/resources/auth/strategies/azure.js b/src/resources/auth/strategies/azure.js index 9787d0cc..b0b2be03 100644 --- a/src/resources/auth/strategies/azure.js +++ b/src/resources/auth/strategies/azure.js @@ -3,151 +3,81 @@ import passportAzure from 'passport-azure-ad-oauth2'; import { to } from 'await-to-js'; import jwt from 'jsonwebtoken'; +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; import { getUserByProviderId } from '../../user/user.repository'; -import { updateRedirectURL } from '../../user/user.service'; -import { getObjectById } from '../../tool/data.repository'; import { createUser } from '../../user/user.service'; -import { signToken } from '../utils'; import { ROLES } from '../../user/user.roles'; -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; -const eventLogController = require('../../eventlog/eventlog.controller'); const AzureStrategy = passportAzure.Strategy; const strategy = app => { - const strategyOptions = { - clientID: process.env.AZURE_SSO_CLIENT_ID, - clientSecret: process.env.AZURE_SSO_CLIENT_SECRET, - callbackURL: `/auth/azure/callback`, - proxy: true - }; - - const verifyCallback = async (accessToken, refreshToken, params, profile, done) => { - - let decodedToken; - - try { - decodedToken = jwt.decode(params.id_token); - } catch(err) { - return done('loginError'); - }; - - if ( !decodedToken.oid || decodedToken.oid === '' ) return done('loginError'); - - let [err, user] = await to(getUserByProviderId(decodedToken.oid)); - if (err || user) { - return done(err, user) - }; - - const [createdError, createdUser] = await to( - createUser({ - provider: 'azure', - providerId: decodedToken.oid, - firstname: decodedToken.given_name, - lastname: decodedToken.family_name, - password: null, - email: decodedToken.email, - role: ROLES.Creator - }) - ); - - return done(createdError, createdUser); - }; - - passport.use('azure_ad_oauth2', new AzureStrategy(strategyOptions, verifyCallback)); - - app.get( - `/auth/azure`, - (req, res, next) => { - // Save the url of the user's current page so the app can redirect back to it after authorization - if (req.headers.referer) { - req.param.returnpage = req.headers.referer; - } - next(); - }, - passport.authenticate('azure_ad_oauth2') - ); - - app.get( - `/auth/azure/callback`, (req, res, next) => { - passport.authenticate('azure_ad_oauth2', (err, user, info) => { - - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); - - // failureRedirect - var redirect = '/'; - let returnPage = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - }; - - let redirectUrl = process.env.homeURL + redirect; - - return res.status(200).redirect(redirectUrl); - }; - - req.login(user, async err => { - if (err) { - return next(err); - } - - var redirect = '/'; - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - }; - - let [profileErr, profile] = await to(getObjectById(req.user.id)); - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); - }; - - if (req.param.returnpage) { - delete req.param.returnpage; - }; - - let redirectUrl = process.env.homeURL + redirect; - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - console.log(req.user) - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err.message); - return res.status(500).send('Error authenticating the user.'); - } - }; - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now(), - }; - - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { - httpOnly: true, - secure: process.env.api_url ? true : false, - }) - .redirect(redirectUrl); - }) - })(req, res, next); - } - ) - return app; + const strategyOptions = { + clientID: process.env.AZURE_SSO_CLIENT_ID, + clientSecret: process.env.AZURE_SSO_CLIENT_SECRET, + callbackURL: `/auth/azure/callback`, + proxy: true, + }; + + const verifyCallback = async (accessToken, refreshToken, params, profile, done) => { + let decodedToken; + + try { + decodedToken = jwt.decode(params.id_token); + } catch (err) { + return done('loginError'); + } + + if (!decodedToken.oid || decodedToken.oid === '') return done('loginError'); + + let [err, user] = await to(getUserByProviderId(decodedToken.oid)); + if (err || user) { + return done(err, user); + } + + const [createdError, createdUser] = await to( + createUser({ + provider: 'azure', + providerId: decodedToken.oid, + firstname: decodedToken.given_name, + lastname: decodedToken.family_name, + password: null, + email: decodedToken.email, + role: ROLES.Creator, + }) + ); + + return done(createdError, createdUser); + }; + + passport.use('azure_ad_oauth2', new AzureStrategy(strategyOptions, verifyCallback)); + + app.get( + `/auth/azure`, + (req, res, next) => { + // Save the url of the user's current page so the app can redirect back to it after authorization + if (req.headers.referer) { + req.param.returnpage = req.headers.referer; + } + next(); + }, + passport.authenticate('azure_ad_oauth2') + ); + + app.get( + `/auth/azure/callback`, + (req, res, next) => { + passport.authenticate('azure_ad_oauth2', (err, user) => { + req.auth = { + err: err, + user: user, + }; + next(); + })(req, res, next); + }, + catchLoginErrorAndRedirect, + loginAndSignToken + ); + return app; }; -export { strategy } \ No newline at end of file +export { strategy }; diff --git a/src/resources/auth/strategies/google.js b/src/resources/auth/strategies/google.js index 9104c3c5..1af2697b 100644 --- a/src/resources/auth/strategies/google.js +++ b/src/resources/auth/strategies/google.js @@ -2,17 +2,11 @@ import passport from 'passport'; import passportGoogle from 'passport-google-oauth'; import { to } from 'await-to-js'; +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; import { getUserByProviderId } from '../../user/user.repository'; -import { updateRedirectURL } from '../../user/user.service'; -import { getObjectById } from '../../tool/data.repository'; import { createUser } from '../../user/user.service'; -import { signToken } from '../utils'; import { ROLES } from '../../user/user.roles'; -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; -const eventLogController = require('../../eventlog/eventlog.controller'); const GoogleStrategy = passportGoogle.OAuth2Strategy; const strategy = app => { @@ -64,83 +58,20 @@ const strategy = app => { }) ); - app.get('/auth/google/callback', (req, res, next) => { - passport.authenticate('google', (err, user) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res.status(200).redirect(redirectUrl); - } - - req.login(user, async err => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [, profile] = await to(getObjectById(req.user.id)); - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err.message); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now(), + app.get( + '/auth/google/callback', + (req, res, next) => { + passport.authenticate('google', (err, user) => { + req.auth = { + err: err, + user: user, }; - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { - httpOnly: true, - secure: process.env.api_url ? true : false, - }) - .redirect(redirectUrl); - }); - })(req, res, next); - }); - + next(); + })(req, res, next); + }, + catchLoginErrorAndRedirect, + loginAndSignToken + ); return app; }; diff --git a/src/resources/auth/strategies/index.js b/src/resources/auth/strategies/index.js index 28c09cc7..527dcf85 100644 --- a/src/resources/auth/strategies/index.js +++ b/src/resources/auth/strategies/index.js @@ -3,5 +3,6 @@ import { strategy as GoogleStrategy } from './google'; import { strategy as LinkedinStrategy } from './linkedin'; import { strategy as OdicStrategy } from './oidc'; import { strategy as AzureStrategy } from './azure'; +import { strategy as OrcidStrategy } from './orcid'; -export { JWTStrategy, GoogleStrategy, LinkedinStrategy, OdicStrategy, AzureStrategy }; +export { JWTStrategy, GoogleStrategy, LinkedinStrategy, OdicStrategy, AzureStrategy, OrcidStrategy }; diff --git a/src/resources/auth/strategies/linkedin.js b/src/resources/auth/strategies/linkedin.js index 80cabb0b..584d639c 100644 --- a/src/resources/auth/strategies/linkedin.js +++ b/src/resources/auth/strategies/linkedin.js @@ -2,17 +2,11 @@ import passport from 'passport'; import passportLinkedin from 'passport-linkedin-oauth2'; import { to } from 'await-to-js'; +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; import { getUserByProviderId } from '../../user/user.repository'; -import { getObjectById } from '../../tool/data.repository'; -import { updateRedirectURL } from '../../user/user.service'; import { createUser } from '../../user/user.service'; -import { signToken } from '../utils'; import { ROLES } from '../../user/user.roles'; -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; -const eventLogController = require('../../eventlog/eventlog.controller'); const LinkedinStrategy = passportLinkedin.OAuth2Strategy; const strategy = app => { @@ -62,83 +56,20 @@ const strategy = app => { }) ); - app.get('/auth/linkedin/callback', (req, res, next) => { - passport.authenticate('linkedin', (err, user) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res.status(200).redirect(redirectUrl); - } - - req.login(user, async err => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [, profile] = await to(getObjectById(req.user.id)); - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err.message); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now(), + app.get( + '/auth/linkedin/callback', + (req, res, next) => { + passport.authenticate('linkedin', (err, user) => { + req.auth = { + err: err, + user: user, }; - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { - httpOnly: true, - secure: process.env.api_url ? true : false, - }) - .redirect(redirectUrl); - }); - })(req, res, next); - }); - + next(); + })(req, res, next); + }, + catchLoginErrorAndRedirect, + loginAndSignToken + ); return app; }; diff --git a/src/resources/auth/strategies/oidc.js b/src/resources/auth/strategies/oidc.js index 6493bb92..3748e8fd 100644 --- a/src/resources/auth/strategies/oidc.js +++ b/src/resources/auth/strategies/oidc.js @@ -2,20 +2,15 @@ import passport from 'passport'; import passportOidc from 'passport-openidconnect'; import { to } from 'await-to-js'; +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; import { getUserByProviderId } from '../../user/user.repository'; -import { getObjectById } from '../../tool/data.repository'; -import { updateRedirectURL } from '../../user/user.service'; import { createUser } from '../../user/user.service'; -import { signToken } from '../utils'; +import { UserModel } from '../../user/user.model'; import { ROLES } from '../../user/user.roles'; -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; import { isNil } from 'lodash'; + const OidcStrategy = passportOidc.Strategy; const baseAuthUrl = process.env.AUTH_PROVIDER_URI; -const eventLogController = require('../../eventlog/eventlog.controller'); -import { UserModel } from '../../user/user.model'; const strategy = app => { const strategyOptions = { @@ -71,83 +66,20 @@ const strategy = app => { passport.authenticate('oidc') ); - app.get('/auth/oidc/callback', (req, res, next) => { - passport.authenticate('oidc', (err, user) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res.status(200).redirect(redirectUrl); - } - - req.login(user, async err => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [, profile] = await to(getObjectById(req.user.id)); - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err.message); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now(), + app.get( + '/auth/oidc/callback', + (req, res, next) => { + passport.authenticate('oidc', (err, user) => { + req.auth = { + err: err, + user: user, }; - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { - httpOnly: true, - secure: process.env.api_url ? true : false, - }) - .redirect(redirectUrl); - }); - })(req, res, next); - }); - + next(); + })(req, res, next); + }, + catchLoginErrorAndRedirect, + loginAndSignToken + ); return app; }; diff --git a/src/resources/auth/strategies/orcid.js b/src/resources/auth/strategies/orcid.js new file mode 100644 index 00000000..38f8fa10 --- /dev/null +++ b/src/resources/auth/strategies/orcid.js @@ -0,0 +1,79 @@ +import passport from 'passport'; +import passportOrcid from 'passport-orcid'; +import { to } from 'await-to-js'; + +import { catchLoginErrorAndRedirect, loginAndSignToken } from '../utils'; +import { getUserByProviderId } from '../../user/user.repository'; +import { createUser } from '../../user/user.service'; +import { ROLES } from '../../user/user.roles'; + +const OrcidStrategy = passportOrcid.Strategy; + +const strategy = app => { + const strategyOptions = { + clientID: process.env.ORCID_SSO_CLIENT_ID, + clientSecret: process.env.ORCID_SSO_CLIENT_SECRET, + callbackURL: `/auth/orcid/callback`, + scope: `/authenticate`, + proxy: true, + }; + + if (process.env.ORCID_SSO_ENV) { + strategyOptions.sandbox = process.env.ORCID_SSO_ENV; + } + + const verifyCallback = async (accessToken, refreshToken, params, profile, done) => { + if (!params.orcid || params.orcid === '') return done('loginError'); + + let [err, user] = await to(getUserByProviderId(params.orcid)); + if (err || user) { + return done(err, user); + } + + const [createdError, createdUser] = await to( + createUser({ + provider: 'orcid', + providerId: params.orcid, + firstname: params.name.split(' ')[0], + lastname: params.name.split(' ')[1], + password: null, + email: '', + role: ROLES.Creator, + }) + ); + + return done(createdError, createdUser); + }; + + passport.use('orcid', new OrcidStrategy(strategyOptions, verifyCallback)); + + app.get( + `/auth/orcid`, + (req, res, next) => { + // Save the url of the user's current page so the app can redirect back to it after authorization + if (req.headers.referer) { + req.param.returnpage = req.headers.referer; + } + next(); + }, + passport.authenticate('orcid') + ); + + app.get( + '/auth/orcid/callback', + (req, res, next) => { + passport.authenticate('orcid', (err, user) => { + req.auth = { + err: err, + user: user, + }; + next(); + })(req, res, next); + }, + catchLoginErrorAndRedirect, + loginAndSignToken + ); + return app; +}; + +export { strategy }; diff --git a/src/resources/auth/utils.js b/src/resources/auth/utils.js index e6bc69cb..97cb301b 100644 --- a/src/resources/auth/utils.js +++ b/src/resources/auth/utils.js @@ -1,6 +1,11 @@ /* eslint-disable no-undef */ import passport from 'passport'; import jwt from 'jsonwebtoken'; +import { to } from 'await-to-js'; +import Url from 'url'; +import { isEmpty } from 'lodash'; +import queryString from 'query-string'; + import { ROLES } from '../user/user.roles'; import { UserModel } from '../user/user.model'; import { Course } from '../course/course.model'; @@ -8,7 +13,11 @@ import { Collections } from '../collections/collections.model'; import { Data } from '../tool/data.model'; import { TeamModel } from '../team/team.model'; import constants from '../utilities/constants.util'; -import { isEmpty } from 'lodash'; +import { discourseLogin } from './sso/sso.discourse.service'; +import { updateRedirectURL } from './../user/user.service'; +import { getObjectById } from './../tool/data.repository'; + +const eventLogController = require('./../eventlog/eventlog.controller'); const setup = () => { passport.serializeUser((user, done) => done(null, user._id)); @@ -46,18 +55,20 @@ const camundaToken = () => { ); }; -const checkIsInRole = (...roles) => (req, res, next) => { - if (!req.user) { - return res.redirect('/login'); - } +const checkIsInRole = + (...roles) => + (req, res, next) => { + if (!req.user) { + return res.redirect('/login'); + } - const hasRole = roles.find(role => req.user.role === role); - if (!hasRole) { - return res.redirect('/login'); - } + const hasRole = roles.find(role => req.user.role === role); + if (!hasRole) { + return res.redirect('/login'); + } - return next(); -}; + return next(); + }; const whatIsRole = req => { if (!req.user) { @@ -116,4 +127,90 @@ const getTeams = async () => { return teams; }; -export { setup, signToken, camundaToken, checkIsInRole, whatIsRole, checkIsUser, checkAllowedToAccess, getTeams }; +const catchLoginErrorAndRedirect = (req, res, next) => { + if (req.auth.err || !req.auth.user) { + if (req.auth.err === 'loginError') { + return res.status(200).redirect(process.env.homeURL + '/loginerror'); + } + + let redirect = '/'; + let returnPage = null; + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + return res.status(200).redirect(redirectUrl); + } + next(); +}; + +const loginAndSignToken = (req, res, next) => { + req.login(req.auth.user, async err => { + if (err) { + return next(err); + } + + let redirect = '/'; + let returnPage = null; + let queryStringParsed = null; + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + queryStringParsed = queryString.parse(returnPage.query); + } + + let [, profile] = await to(getObjectById(req.user.id)); + if (!profile) { + await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); + return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); + } + + if (req.param.returnpage) { + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { + try { + redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); + } catch (err) { + console.error(err.message); + return res.status(500).send('Error authenticating the user.'); + } + } + + //Build event object for user login and log it to DB + let eventObj = { + userId: req.user.id, + event: `user_login_${req.user.provider}`, + timestamp: Date.now(), + }; + + await eventLogController.logEvent(eventObj); + + return res + .status(200) + .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { + httpOnly: true, + secure: process.env.api_url ? true : false, + }) + .redirect(redirectUrl); + }); +}; + +export { + setup, + signToken, + camundaToken, + checkIsInRole, + whatIsRole, + checkIsUser, + checkAllowedToAccess, + getTeams, + catchLoginErrorAndRedirect, + loginAndSignToken, +}; diff --git a/src/resources/base/entity.js b/src/resources/base/entity.js index 2b21d3fe..7efee620 100644 --- a/src/resources/base/entity.js +++ b/src/resources/base/entity.js @@ -1,6 +1,8 @@ +import helper from '../utilities/helper.util'; + const transform = require('transformobject').transform; -class Entity { +export default class Entity { equals (other) { if (other instanceof Entity === false) { @@ -24,6 +26,8 @@ class Entity { transformTo(format, {strict} = {strict: false}) { return transform(this, format, { strict }); } -} -module.exports = Entity; \ No newline at end of file + generateId () { + return helper.generatedNumericId(); + } +} \ No newline at end of file diff --git a/src/resources/base/repository.js b/src/resources/base/repository.js index 3294ab52..ea73d430 100644 --- a/src/resources/base/repository.js +++ b/src/resources/base/repository.js @@ -48,7 +48,7 @@ export default class Repository { // Pagination const page = query.page * 1 || 1; - const limit = query.limit * 1 || null; + const limit = query.limit * 1 || 500; const skip = (page - 1) * limit; results = results.skip(skip).limit(limit); diff --git a/src/resources/cohort/cohort.model.js b/src/resources/cohort/cohort.model.js index 5c8676f8..6cd0a9fe 100644 --- a/src/resources/cohort/cohort.model.js +++ b/src/resources/cohort/cohort.model.js @@ -8,6 +8,7 @@ const cohortSchema = new Schema( pid: String, type: String, name: String, + description: String, activeflag: String, userId: Number, uploaders: [], @@ -16,6 +17,8 @@ const cohortSchema = new Schema( changeLog: String, updatedAt: Date, lastRefresh: Date, + datasetPids: [], + filterCriteria: [], // fields from RQuest request_id: String, diff --git a/src/resources/cohort/cohort.route.js b/src/resources/cohort/cohort.route.js index 6debae9f..705d578a 100644 --- a/src/resources/cohort/cohort.route.js +++ b/src/resources/cohort/cohort.route.js @@ -2,6 +2,7 @@ import express from 'express'; import CohortController from './cohort.controller'; import { cohortService } from './dependency'; import { logger } from '../utilities/logger'; +import { resultLimit } from '../../config/middleware'; const router = express.Router(); const cohortController = new CohortController(cohortService); @@ -17,7 +18,7 @@ router.get('/:id', logger.logRequestMiddleware({ logCategory, action: 'Viewed co // @route GET /api/v1/cohorts // @desc Returns a collection of cohorts based on supplied query parameters // @access Public -router.get('/', logger.logRequestMiddleware({ logCategory, action: 'Viewed cohorts data' }), (req, res) => +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), logger.logRequestMiddleware({ logCategory, action: 'Viewed cohorts data' }), (req, res) => cohortController.getCohorts(req, res) ); diff --git a/src/resources/cohort/cohort.service.js b/src/resources/cohort/cohort.service.js index 9429889b..fbb7d3b1 100644 --- a/src/resources/cohort/cohort.service.js +++ b/src/resources/cohort/cohort.service.js @@ -1,4 +1,5 @@ import { v4 as uuidv4 } from 'uuid'; +import { Data } from '../tool/data.model'; export default class CohortService { constructor(cohortRepository) { @@ -32,19 +33,34 @@ export default class CohortService { } // 3. Extract PIDs from cohort object so we can build up related objects - let pids = body.cohort.input.collections.map(collection => { - return collection.external_id; + let datasetIdentifiersPromises = await body.cohort.input.collections.map(async collection => { + let dataset = await Data.findOne({ pid: collection.external_id, activeflag: 'active' }, { datasetid: 1 }).lean(); + return { pid: collection.external_id, datasetId: dataset.datasetid }; }); + let datasetIdentifiers = await Promise.all(datasetIdentifiersPromises); let relatedObjects = []; - pids.forEach(pid => { + let datasetPids = []; + datasetIdentifiers.forEach(datasetIdentifier => { + datasetPids.push(datasetIdentifier.pid); relatedObjects.push({ objectType: 'dataset', - pid, + pid: datasetIdentifier.pid, + objectId: datasetIdentifier.datasetId, isLocked: true, }); }); - // 4. Build document object and save to DB + // 4. Extract filter criteria used in query + let filterCriteria = []; + body.cohort.input.cohorts.forEach(cohort => { + cohort.groups.forEach(group => { + group.rules.forEach(rule => { + filterCriteria.push(rule.value); + }); + }); + }); + + // 5. Build document object and save to DB const document = { id: uniqueId, pid: uuid, @@ -59,7 +75,11 @@ export default class CohortService { cohort: body.cohort, items: body.items, rquestRelatedObjects: body.relatedObjects, + datasetPids, + filterCriteria, relatedObjects, + description: '', + publicflag: true, }; return this.cohortRepository.addCohort(document); } diff --git a/src/resources/cohortprofiling/cohortprofiling.route.js b/src/resources/cohortprofiling/cohortprofiling.route.js index 49bb0563..ce7df438 100644 --- a/src/resources/cohortprofiling/cohortprofiling.route.js +++ b/src/resources/cohortprofiling/cohortprofiling.route.js @@ -1,7 +1,9 @@ import express from 'express'; import CohortProfilingController from './cohortprofiling.controller'; import { cohortProfilingService } from './dependency'; +import { resultLimit } from '../../config/middleware'; import multer from 'multer'; + const upload = multer(); const cohortProfilingController = new CohortProfilingController(cohortProfilingService); @@ -15,7 +17,7 @@ router.get('/:pid/:tableName/:variable', (req, res) => cohortProfilingController // @route GET api/v1/cohortprofiling // @desc Returns a collection of cohort profiling data based on supplied query parameters // @access Public -router.get('/', (req, res) => cohortProfilingController.getCohortProfiling(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => cohortProfilingController.getCohortProfiling(req, res)); // @route POST api/v1/cohortprofiling // @desc Consumes a JSON file containing cohort profiling data, transforms it and saves to MongoDB. diff --git a/src/resources/collections/__mocks__/multi.collection.js b/src/resources/collections/__mocks__/multi.collection.js new file mode 100644 index 00000000..61375ed0 --- /dev/null +++ b/src/resources/collections/__mocks__/multi.collection.js @@ -0,0 +1,76 @@ +export const mock_collections = [ + { + _id: { + oid: '6168030b0e24c03595166261', + }, + authors: [12345], + keywords: [], + relatedObjects: [ + { + _id: { + $oid: '6168030b0e24c03595166262', + }, + objectId: 'af434b05-52a7-4ff1-92f5-e2dd38a574aa', + reason: '', + objectType: 'dataset', + pid: 'fdd9e5ab-442f-45d0-a004-f581a3ac809c', + user: 'John Doe', + updated: '14 Oct 2021', + }, + ], + id: 138879762298581, + name: 'Test collection 1', + description: 'A test collection', + imageLink: '', + activeflag: 'active', + publicflag: true, + updatedon: { + $date: '2021-10-14T12:10:13.817Z', + }, + createdAt: { + $date: '2021-10-14T10:14:35.308Z', + }, + updatedAt: { + $date: '2021-10-14T12:10:14.563Z', + }, + __v: 0, + counter: 1, + }, + { + _id: { + oid: '6168030b0e24c03595166262', + }, + authors: [12345], + keywords: [], + relatedObjects: [ + { + _id: { + $oid: '6168030b0e24c03595166262', + }, + objectId: 'af434b05-52a7-4ff1-92f5-e2dd38a574aa', + reason: '', + objectType: 'dataset', + pid: 'fdd9e5ab-442f-45d0-a004-f581a3ac809c', + user: 'John Doe', + updated: '14 Oct 2021', + }, + ], + id: 138879762298582, + name: 'Test collection 2', + description: 'A test collection', + imageLink: '', + activeflag: 'active', + publicflag: true, + updatedon: { + $date: '2021-10-14T12:10:13.817Z', + }, + createdAt: { + $date: '2021-10-14T10:14:35.308Z', + }, + updatedAt: { + $date: '2021-10-14T12:10:14.563Z', + }, + __v: 0, + counter: 1, + }, +]; diff --git a/src/resources/collections/__mocks__/single.collection.js b/src/resources/collections/__mocks__/single.collection.js new file mode 100644 index 00000000..ff799f0f --- /dev/null +++ b/src/resources/collections/__mocks__/single.collection.js @@ -0,0 +1,68 @@ +export const mock_collection = [ + { + _id: '612e0d035671f75be2461dfa', + authors: [8470291714590257], + keywords: [], + relatedObjects: [ + { + _id: '612e0d035671f75be2461dfb', + objectId: '6ec3a47b-447a-4b22-9b7a-43acae5d408f', + reason: '', + objectType: 'dataset', + pid: 'fce78329-0de1-45f2-9ff1-e1b4af50528e', + user: 'John Doe', + updated: '31 Aug 2021', + }, + ], + id: 20905331408744290, + name: 'Test', + description: 'TestTestTestTestTestTestTestTestTestTest', + imageLink: '', + activeflag: 'active', + publicflag: true, + updatedon: '2021-08-31T11:06:19.329Z', + createdAt: '2021-08-31T11:05:39.129Z', + updatedAt: '2021-10-14T14:38:21.800Z', + __v: 0, + counter: 3, + persons: [ + { + _id: '6128a6f9dd361d15499db644', + categories: { programmingLanguage: [] }, + tags: { features: [], topics: [] }, + document_links: { doi: [], pdf: [], html: [] }, + datasetfields: { geographicCoverage: [], physicalSampleAvailability: [], technicaldetails: [], versionLinks: [], phenotypes: [] }, + authors: [], + emailNotifications: true, + showOrganisation: true, + structuralMetadata: [], + datasetVersionIsV1: false, + toolids: [], + datasetids: [], + id: 8470291714590257, + type: 'person', + firstname: 'John', + lastname: 'Doe', + bio: '', + link: '', + orcid: 'https://orcid.org/', + activeflag: 'active', + terms: true, + sector: 'Academia', + organisation: '', + showSector: true, + showBio: true, + showLink: true, + showOrcid: true, + showDomain: true, + profileComplete: true, + relatedObjects: [], + programmingLanguage: [], + createdAt: '2021-08-27T08:48:57.710Z', + updatedAt: '2021-08-27T10:23:11.582Z', + __v: 0, + counter: 1, + }, + ], + }, +]; diff --git a/src/resources/collections/__tests__/collections.controller.test.js b/src/resources/collections/__tests__/collections.controller.test.js new file mode 100644 index 00000000..9048e336 --- /dev/null +++ b/src/resources/collections/__tests__/collections.controller.test.js @@ -0,0 +1,343 @@ +import sinon from 'sinon'; + +import { mock_collections } from '../__mocks__/multi.collection'; +import { mock_collection } from '../__mocks__/single.collection'; +import CollectionsController from '../collections.controller'; +import CollectionsService from '../collections.service'; +import { Data } from '../../tool/data.model'; +import { filtersService } from '../../filters/dependency'; +import { Collections } from '../collections.model'; + +afterEach(function () { + sinon.restore(); +}); + +describe('With the Collections controller class', () => { + const collectionsService = new CollectionsService(); + const collectionsController = new CollectionsController(collectionsService); + + describe('Using the getList method', () => { + describe('As an ADMIN user', () => { + let req = { + user: { + role: 'Admin', + }, + query: {}, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should return a list of collections for an Admin user', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionsAdmin').returns(mock_collections); + + await collectionsController.getList(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: true, data: mock_collections })).toBe(true); + }); + + it('Should return an error if the service call fails for an Admin user', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionsAdmin').returns(Promise.reject('error')); + + await collectionsController.getList(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('As a CREATOR user', () => { + let req = { + user: { + role: 'Creator', + id: 12345, + }, + query: {}, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should return a list of collections for a Creator user', async () => { + let stub = sinon.stub(collectionsService, 'getCollections').returns(mock_collections); + + await collectionsController.getList(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: true, data: mock_collections })).toBe(true); + }); + + it('Should return an error if the service call fails for a Creator user', async () => { + let stub = sinon.stub(collectionsService, 'getCollections').throws(); + + const badCall = async () => { + await collectionsController.getList(req, res); + }; + + try { + badCall(); + } catch (error) { + expect(stub.calledOnce).toBe(true); + expect(badCall).to.have.been.calledWith(error); + } + }); + }); + }); + + describe('Using the getCollection method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + id: 138879762298581, + }, + query: {}, + }; + let res, json, status; + json = sinon.spy(); + status = sinon.spy(); + res = { json, status }; + + it('Should call the getCollection service and return data, if data is exists', async () => { + let stub = sinon.stub(collectionsService, 'getCollection').returns(mock_collection); + + await collectionsController.getCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: true, data: mock_collection })).toBe(true); + }); + + it('Should return a 404 error if no data exists', async () => { + let stub = sinon.stub(collectionsService, 'getCollection').returns([]); + + await collectionsController.getCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(status.calledOnce).toBe(true); + expect(status.calledWith(404)).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'getCollection').returns(Promise.reject('error')); + + await collectionsController.getCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the getCollectionRelatedResources method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + collectionID: 138879762298581, + }, + query: {}, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the getCollectionsObject service and return data', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionObjects').returns(mock_collections[0].relatedObjects[0]); + + await collectionsController.getCollectionRelatedResources(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: true, data: mock_collections[0].relatedObjects[0] })).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionObjects').returns(Promise.reject('error')); + + await collectionsController.getCollectionRelatedResources(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the getCollectionByEntity method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + entityID: 12345, + }, + query: {}, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the getCollectionByEntity service and return data', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionByEntity').returns(mock_collection); + let dataStub = sinon.stub(Data, 'find').returns([]); + + await collectionsController.getCollectionByEntity(req, res); + expect(stub.calledOnce).toBe(true); + expect(dataStub.calledOnce).toBe(true); + expect(json.calledWith({ success: true, data: mock_collection })).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'getCollectionByEntity').returns(Promise.reject('error')); + let dataStub = sinon.stub(Data, 'find').returns([]); + + await collectionsController.getCollectionByEntity(req, res); + expect(stub.calledOnce).toBe(true); + expect(dataStub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the editCollection method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + id: 12345, + }, + query: {}, + body: { + publicflag: true, + previousPublicFlag: false, + }, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the editCollection service, return data, optimise filters and send notifications', async () => { + let stub = sinon.stub(collectionsService, 'editCollection'); + let collectionStub = sinon.stub(Collections, 'find').returns(mock_collections[0]); + let filterStub = sinon.stub(filtersService, 'optimiseFilters'); + + await collectionsController.editCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(collectionStub.calledOnce).toBe(true); + expect(filterStub.calledOnce).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'editCollection').returns(Promise.reject('error')); + + await collectionsController.editCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the addCollection method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + id: 12345, + }, + query: {}, + body: { + name: 'test', + description: 'test', + imageLink: '', + authors: [123, 456], + relatedObjects: [], + publicflag: true, + keywords: [], + }, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the addCollection service, return the ID and send notifications', async () => { + let stub = sinon.stub(collectionsService, 'addCollection'); + let messageStub = sinon.stub(collectionsController, 'createMessage'); + let emailStub = sinon.stub(collectionsService, 'sendEmailNotifications'); + + await collectionsController.addCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(messageStub.callCount).toBe(3); + expect(emailStub.calledOnce).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'addCollection').returns(Promise.reject('error')); + let messageStub = sinon.stub(collectionsController, 'createMessage'); + let emailStub = sinon.stub(collectionsService, 'sendEmailNotifications'); + + await collectionsController.addCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(messageStub.callCount).toBe(3); + expect(emailStub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the deleteCollection method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + id: 12345, + }, + query: {}, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the deleteCollection service', async () => { + let stub = sinon.stub(collectionsService, 'deleteCollection'); + + await collectionsController.deleteCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: true })).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'deleteCollection').returns(Promise.reject('error')); + + await collectionsController.deleteCollection(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); + + describe('Using the changeStatus method', () => { + let req = { + user: { + role: 'Creator', + }, + params: { + id: 12345, + }, + query: {}, + body: { + activeflag: 'archive', + }, + }; + let res, json; + json = sinon.spy(); + res = { json }; + + it('Should call the changeStatus service', async () => { + let stub = sinon.stub(collectionsService, 'changeStatus'); + let filterStub = sinon.stub(filtersService, 'optimiseFilters'); + + await collectionsController.changeStatus(req, res); + expect(stub.calledOnce).toBe(true); + expect(filterStub.calledOnce).toBe(true); + expect(json.calledWith({ success: true })).toBe(true); + }); + + it('Should return an error if the service call fails', async () => { + let stub = sinon.stub(collectionsService, 'changeStatus').returns(Promise.reject('error')); + + await collectionsController.changeStatus(req, res); + expect(stub.calledOnce).toBe(true); + expect(json.calledWith({ success: false, error: 'error' })).toBe(true); + }); + }); +}); diff --git a/src/resources/collections/collections.controller.js b/src/resources/collections/collections.controller.js new file mode 100644 index 00000000..632cc4da --- /dev/null +++ b/src/resources/collections/collections.controller.js @@ -0,0 +1,228 @@ +import _ from 'lodash'; +import escape from 'escape-html'; + +import Controller from '../base/controller'; +import inputSanitizer from '../utilities/inputSanitizer'; +import urlValidator from '../utilities/urlValidator'; +import { filtersService } from '../filters/dependency'; +import helper from '../utilities/helper.util'; +import { Collections } from '../collections/collections.model'; +import { Data } from '../tool/data.model'; +import { ROLES } from '../user/user.roles'; +import { MessagesModel } from '../message/message.model'; +import { UserModel } from '../user/user.model'; + +export default class CollectionsController extends Controller { + constructor(collectionsService) { + super(collectionsService); + this.collectionsService = collectionsService; + } + + async getList(req, res) { + let role = req.user.role; + let startIndex = 0; + let limit = 40; + let searchString = ''; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + if (req.query.status) { + status = req.query.status; + } + + if (role === ROLES.Admin) { + try { + const data = await this.collectionsService.getCollectionsAdmin(searchString, status, startIndex, limit); + return res.json({ success: true, data: data }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } else if (role === ROLES.Creator) { + try { + let idString = req.user.id; + if (req.query.id) { + idString = req.query.id; + } + const data = await this.collectionsService.getCollections(idString, status, startIndex, limit); + return res.json({ success: true, data: data }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + } + + async getCollection(req, res) { + let collectionID = parseInt(req.params.collectionID); + + try { + const data = await this.collectionsService.getCollection(collectionID); + if (_.isEmpty(data)) { + return res.status(404).send(`Collection not found for ID: ${escape(collectionID)}`); + } + data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); + return res.json({ success: true, data: data }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + + async getCollectionRelatedResources(req, res) { + let collectionID = parseInt(req.params.collectionID); + + try { + const data = await this.collectionsService.getCollectionObjects(collectionID); + return res.json({ success: true, data: data }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + + async getCollectionByEntity(req, res) { + let entityID = req.params.entityID; + let dataVersions = await Data.find({ pid: entityID }, { _id: 0, datasetid: 1 }); + let dataVersionsArray = dataVersions.map(a => a.datasetid); + dataVersionsArray.push(entityID); + + try { + const data = await this.collectionsService.getCollectionByEntity(entityID, dataVersionsArray); + return res.json({ success: true, data: data }); + } catch (err) { + res.json({ success: false, error: err }); + } + } + + async editCollection(req, res) { + let collectionID = parseInt(req.params.id); + let { name, description, imageLink, authors, relatedObjects, publicflag, keywords, previousPublicFlag, collectionCreator } = req.body; + imageLink = urlValidator.validateURL(imageLink); + + let updatedCollection = { name, description, imageLink, authors, relatedObjects, publicflag, keywords }; + + try { + await this.collectionsService.editCollection(collectionID, updatedCollection); + filtersService.optimiseFilters('collection'); + await Collections.find({ id: collectionID }, { publicflag: 1, id: 1, activeflag: 1, authors: 1, name: 1 }).then(async res => { + if (previousPublicFlag === false && publicflag === true) { + await this.collectionsService.sendEmailNotifications(res[0], res[0].activeflag, collectionCreator, true); + + if (res[0].authors) { + res[0].authors.forEach(async authorId => { + await this.createMessage(authorId, res[0], res[0].activeflag, collectionCreator, true); + }); + } + + await this.createMessage(0, res[0], res[0].activeflag, collectionCreator, true); + } + }); + return res.json({ success: true }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + + async addCollection(req, res) { + let collections = new Collections(); + const collectionCreator = req.body.collectionCreator; + const { name, description, imageLink, authors, relatedObjects, publicflag, keywords } = req.body; + + collections.id = parseInt(Math.random().toString().replace('0.', '')); + collections.name = inputSanitizer.removeNonBreakingSpaces(name); + collections.description = inputSanitizer.removeNonBreakingSpaces(description); + collections.imageLink = imageLink; + collections.authors = authors; + collections.relatedObjects = relatedObjects; + collections.activeflag = 'active'; + collections.publicflag = publicflag; + collections.keywords = keywords; + collections.updatedon = Date.now(); + + if (collections.authors) { + collections.authors.forEach(async authorId => { + await this.createMessage(authorId, collections, collections.activeflag, collectionCreator); + }); + } + + await this.createMessage(0, collections, collections.activeflag, collectionCreator); + + await this.collectionsService.sendEmailNotifications(collections, collections.activeflag, collectionCreator); + + try { + await this.collectionsService.addCollection(collections); + res.json({ success: true, id: collections.id }); + } catch (err) { + res.json({ success: false, error: err }); + } + } + + async changeStatus(req, res) { + const collectionID = parseInt(req.params.id); + let { activeflag } = req.body; + activeflag = activeflag.toString(); + + try { + await this.collectionsService.changeStatus(collectionID, activeflag); + filtersService.optimiseFilters('collection'); + return res.json({ success: true }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + + async deleteCollection(req, res) { + const collectionID = parseInt(req.params.id); + try { + await this.collectionsService.deleteCollection(collectionID); + res.json({ success: true }); + } catch (err) { + res.json({ success: false, error: err }); + } + } + + async createMessage(authorId, collections, activeflag, collectionCreator, isEdit) { + let message = new MessagesModel(); + + const messageRecipients = await UserModel.find({ $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] }); + async function saveMessage() { + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = authorId; + message.messageObjectID = collections.id; + message.messageSent = Date.now(); + message.isRead = false; + await message.save(); + } + + if (authorId === 0) { + message.messageType = 'added collection'; + message.messageDescription = this.collectionsService.generateCollectionEmailSubject( + 'Admin', + collections.publicflag, + collections.name, + false, + isEdit + ); + saveMessage(); + } + + for (let messageRecipient of messageRecipients) { + if (activeflag === 'active' && authorId === messageRecipient.id) { + message.messageType = 'added collection'; + message.messageDescription = this.collectionsService.generateCollectionEmailSubject( + 'Creator', + collections.publicflag, + collections.name, + authorId === collectionCreator.id ? true : false, + isEdit + ); + saveMessage(); + } + } + } +} diff --git a/src/resources/collections/collections.repository.js b/src/resources/collections/collections.repository.js index 2c9d2816..70841db8 100644 --- a/src/resources/collections/collections.repository.js +++ b/src/resources/collections/collections.repository.js @@ -1,496 +1,31 @@ -/* eslint-disable no-undef */ -import { Data } from '../tool/data.model'; -import { Course } from '../course/course.model'; +import Repository from '../base/repository'; import { Collections } from './collections.model'; -import { UserModel } from '../user/user.model'; -import emailGenerator from '../utilities/emailGenerator.util'; -import _ from 'lodash'; import helper from '../utilities/helper.util'; -const hdrukEmail = `enquiry@healthdatagateway.org`; - -const getCollectionObjects = async req => { - let relatedObjects = []; - await Collections.find( - { id: parseInt(req.params.collectionID) }, - { - 'relatedObjects._id': 1, - 'relatedObjects.objectId': 1, - 'relatedObjects.objectType': 1, - 'relatedObjects.pid': 1, - 'relatedObjects.updated': 1, - } - ).then(async res => { - await new Promise(async (resolve, reject) => { - if (_.isEmpty(res)) { - reject(`Collection not found for Id: ${req.params.collectionID}.`); - } else { - for (let object of res[0].relatedObjects) { - let relatedObject = await getCollectionObject(object.objectId, object.objectType, object.pid, object.updated); - if (!_.isUndefined(relatedObject)) { - relatedObjects.push(relatedObject); - } else { - await Collections.findOneAndUpdate( - { id: parseInt(req.params.collectionID) }, - { $pull: { relatedObjects: { _id: object._id } } } - ); - } - } - resolve(relatedObjects); - } - }); - }); - - return relatedObjects.sort((a, b) => b.updated - a.updated); -}; - -function getCollectionObject(objectId, objectType, pid, updated) { - let id = pid && pid.length > 0 ? pid : objectId; - - return new Promise(async resolve => { - let data; - if (objectType !== 'dataset' && objectType !== 'course') { - data = await Data.find( - { id: parseInt(id) }, - { - id: 1, - type: 1, - activeflag: 1, - tags: 1, - description: 1, - name: 1, - persons: 1, - categories: 1, - programmingLanguage: 1, - firstname: 1, - lastname: 1, - bio: 1, - authors: 1, - counter: { $ifNull: ['$counter', 0] }, - relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, - } - ) - .populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]) - .lean(); - } else if (!isNaN(id) && objectType === 'course') { - data = await Course.find( - { id: parseInt(id) }, - { - id: 1, - type: 1, - activeflag: 1, - title: 1, - provider: 1, - courseOptions: 1, - award: 1, - domains: 1, - tags: 1, - description: 1, - counter: { $ifNull: ['$counter', 0] }, - relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, - } - ).lean(); - } else { - const datasetRelatedResources = { - $lookup: { - from: 'tools', - let: { - pid: '$pid', - }, - pipeline: [ - { $unwind: '$relatedObjects' }, - { - $match: { - $expr: { - $and: [ - { - $eq: ['$relatedObjects.pid', '$$pid'], - }, - { - $eq: ['$activeflag', 'active'], - }, - ], - }, - }, - }, - { $group: { _id: null, count: { $sum: 1 } } }, - ], - as: 'relatedResourcesTools', - }, - }; - - const datasetRelatedCourses = { - $lookup: { - from: 'course', - let: { - pid: '$pid', - }, - pipeline: [ - { $unwind: '$relatedObjects' }, - { - $match: { - $expr: { - $and: [ - { - $eq: ['$relatedObjects.pid', '$$pid'], - }, - { - $eq: ['$activeflag', 'active'], - }, - ], - }, - }, - }, - { $group: { _id: null, count: { $sum: 1 } } }, - ], - as: 'relatedResourcesCourses', - }, - }; - - const datasetProjectFields = { - $project: { - id: 1, - datasetid: 1, - pid: 1, - type: 1, - activeflag: 1, - name: 1, - datasetv2: 1, - datasetfields: 1, - tags: 1, - description: 1, - counter: { $ifNull: ['$counter', 0] }, - relatedresources: { - $add: [ - { - $cond: { - if: { $eq: [{ $size: '$relatedResourcesTools' }, 0] }, - then: 0, - else: { $first: '$relatedResourcesTools.count' }, - }, - }, - { - $cond: { - if: { $eq: [{ $size: '$relatedResourcesCourses' }, 0] }, - then: 0, - else: { $first: '$relatedResourcesCourses.count' }, - }, - }, - ], - }, - }, - }; - - // 1. Search for a dataset based on pid - data = await Data.aggregate([ - { $match: { $and: [{ pid: id }, { activeflag: 'active' }] } }, - datasetRelatedResources, - datasetRelatedCourses, - datasetProjectFields, - ]); - - // 2. If dataset not found search for a dataset based on datasetID - if (!data || data.length <= 0) { - data = await Data.find({ datasetid: objectId }, { datasetid: 1, pid: 1 }).lean(); - // 3. Use retrieved dataset's pid to search by pid again - data = await Data.aggregate([ - { $match: { $and: [{ pid: data[0].pid }, { activeflag: 'active' }] } }, - datasetRelatedResources, - datasetRelatedCourses, - datasetProjectFields, - ]); - } - - // 4. If dataset still not found search for deleted dataset by pid - if (!data || data.length <= 0) { - data = await Data.aggregate([ - { $match: { $and: [{ pid: id }, { activeflag: 'archive' }] } }, - datasetRelatedResources, - datasetRelatedCourses, - datasetProjectFields, - ]); - } - } - - let relatedObject = { ...data[0], updated: Date.parse(updated) }; - resolve(relatedObject); - }); -} - -async function sendEmailNotifications(collections, activeflag, collectionCreator, isEdit) { - // Generate URL for linking collection in email - const collectionLink = process.env.homeURL + '/collection/' + collections.id; - - // Query Db for all admins or authors of the collection - var q = UserModel.aggregate([ - { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - { - $project: { - _id: 1, - firstname: 1, - lastname: 1, - email: 1, - role: 1, - id: 1, - }, - }, - ]); - - // Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } else { - let subject; - let html; - - emailRecipients.map(emailRecipient => { - if (collections.authors.includes(emailRecipient.id)) { - let author = Number(collections.authors.filter(author => author === emailRecipient.id)); - - if (activeflag === 'active') { - subject = generateCollectionEmailSubject( - 'Creator', - collections.publicflag, - collections.name, - author === collectionCreator.id ? true : false, - isEdit - ); - html = generateCollectionEmailContent( - 'Creator', - collections.publicflag, - collections.name, - collectionLink, - author === collectionCreator.id ? true : false, - isEdit - ); - } - } else if (activeflag === 'active' && emailRecipient.role === 'Admin') { - subject = generateCollectionEmailSubject('Admin', collections.publicflag, collections.name, false, isEdit); - html = generateCollectionEmailContent('Admin', collections.publicflag, collections.name, collectionLink, false, isEdit); - } - - emailGenerator.sendEmail([emailRecipient], `${hdrukEmail}`, subject, html, false); - }); - } - }); -} - -function generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit) { - let emailSubject; - - if (role !== 'Admin' && isCreator !== true) { - if (isEdit === true) { - emailSubject = `The ${ - publicflag === true ? 'public' : 'private' - } collection ${collectionName} that you are a collaborator on has been edited and is now live`; - } else { - emailSubject = `You have been added as a collaborator on the ${ - publicflag === true ? 'public' : 'private' - } collection ${collectionName}`; - } - } else { - emailSubject = `${role === 'Admin' ? 'A' : 'Your'} ${ - publicflag === true ? 'public' : 'private' - } collection ${collectionName} has been ${isEdit === true ? 'edited' : 'published'} and is now live`; +export default class CollectionsRepository extends Repository { + constructor() { + super(Collections); + this.collections = Collections; } - return emailSubject; -} - -function generateCollectionEmailContent(role, publicflag, collectionName, collectionLink, isCreator, isEdit) { - return `
-
- - - - - - - - - - - - - - -
- ${generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit)} -
- ${ - publicflag === true - ? `${role === 'Admin' ? 'A' : 'Your'} public collection has been ${ - isEdit === true ? 'edited on' : 'published to' - } the Gateway. The collection is searchable on the Gateway and can be viewed by all users.` - : `${role === 'Admin' ? 'A' : 'Your'} private collection has been ${ - isEdit === true ? 'edited on' : 'published to' - } the Gateway. Only those who you share the collection link with will be able to view the collection.` - } -
- View Collection -
-
-
`; -} - -const getCollectionsAdmin = async req => { - return new Promise(async resolve => { - let startIndex = 0; - let limit = 40; - let searchString = ''; - let status = 'all'; - - if (req.query.offset) { - startIndex = req.query.offset; - } - if (req.query.limit) { - limit = req.query.limit; - } - if (req.query.q) { - searchString = req.query.q || ''; - } - if (req.query.status) { - status = req.query.status; - } - - let searchQuery; - if (status === 'all') { - searchQuery = {}; - } else { - searchQuery = { $and: [{ activeflag: status }] }; - } - - let searchAll = false; - - if (searchString.length > 0) { - searchQuery['$and'].push({ $text: { $search: searchString } }); - } else { - searchAll = true; - } - - await Promise.all([getObjectResult(searchAll, searchQuery, startIndex, limit), getCountsByStatus()]).then(values => { - resolve(values); - }); - }); -}; - -const getCollections = async req => { - return new Promise(async resolve => { - let startIndex = 0; - let limit = 40; - let idString = req.user.id; - let status = 'all'; - - if (req.query.offset) { - startIndex = req.query.offset; - } - if (req.query.limit) { - limit = req.query.limit; - } - if (req.query.id) { - idString = req.query.id; - } - if (req.query.status) { - status = req.query.status; - } - - let searchQuery; - if (status === 'all') { - searchQuery = [{ authors: parseInt(idString) }]; - } else { - searchQuery = [{ authors: parseInt(idString) }, { activeflag: status }]; - } - - let query = Collections.aggregate([ - { $match: { $and: searchQuery } }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - { $sort: { updatedAt: -1, _id: 1 } }, - ]) - .skip(parseInt(startIndex)) - .limit(parseInt(limit)); - - await Promise.all([getUserCollections(query), getCountsByStatus(idString)]).then(values => { - resolve(values); - }); - - function getUserCollections(query) { - return new Promise(resolve => { - query.exec((err, data) => { - data && - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - if (typeof data === 'undefined') resolve([]); - else resolve(data); - }); - }); - } - }); -}; - -function getObjectResult(searchAll, searchQuery, startIndex, limit) { - let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - let q = ''; - - if (searchAll) { - q = Collections.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - ]) - .sort({ updatedAt: -1, _id: 1 }) - .skip(parseInt(startIndex)) - .limit(parseInt(limit)); - } else { - q = Collections.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - ]) - .sort({ score: { $meta: 'textScore' } }) - .skip(parseInt(startIndex)) - .limit(parseInt(limit)); + async getCollections(query, options) { + return this.find(query, options); } - return new Promise(resolve => { - q.exec((err, data) => { - if (typeof data === 'undefined') { - resolve([]); - } else { - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - resolve(data); - } - }); - }); -} -function getCountsByStatus(idString) { - let q; - - if (_.isUndefined(idString)) { - q = Collections.find({}, { id: 1, name: 1, activeflag: 1 }); - } else { - q = Collections.find({ authors: parseInt(idString) }, { id: 1, name: 1, activeflag: 1 }); + async updateCollection(query, options) { + return this.updateByQuery(query, options); } - return new Promise(resolve => { - q.exec((err, data) => { - const activeCount = data.filter(dat => dat.activeflag === 'active').length; - const archiveCount = data.filter(dat => dat.activeflag === 'archive').length; - - let countSummary = { activeCount: activeCount, archiveCount: archiveCount }; - - resolve(countSummary); + async searchCollections(query) { + return new Promise(resolve => { + query.exec((err, data) => { + data && + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); }); - }); + } } - -export { getCollectionObjects, getCollectionsAdmin, getCollections, sendEmailNotifications, generateCollectionEmailSubject }; diff --git a/src/resources/collections/collections.route.js b/src/resources/collections/collections.route.js index f6b948c5..adcc6c2c 100644 --- a/src/resources/collections/collections.route.js +++ b/src/resources/collections/collections.route.js @@ -1,277 +1,59 @@ import express from 'express'; -import { ROLES } from '../user/user.roles'; import passport from 'passport'; -import { utils } from '../auth'; -import { Collections } from '../collections/collections.model'; -import { Data } from '../tool/data.model'; -import { MessagesModel } from '../message/message.model'; -import { UserModel } from '../user/user.model'; -import helper from '../utilities/helper.util'; import _ from 'lodash'; -import escape from 'escape-html'; -import { - getCollectionObjects, - getCollectionsAdmin, - getCollections, - sendEmailNotifications, - generateCollectionEmailSubject, -} from './collections.repository'; -import inputSanitizer from '../utilities/inputSanitizer'; -import urlValidator from '../utilities/urlValidator'; -import { filtersService } from '../filters/dependency'; +import { utils } from '../auth'; +import CollectionsController from './collections.controller'; +import { collectionsService } from './dependency'; + +const collectionsController = new CollectionsController(collectionsService); const router = express.Router(); // @router GET /api/v1/collections/getList // @desc Returns List of Collections // @access Private -router.get('/getList', passport.authenticate('jwt'), async (req, res) => { - let role = req.user.role; - - if (role === ROLES.Admin) { - await getCollectionsAdmin(req) - .then(data => { - return res.json({ success: true, data }); - }) - .catch(err => { - return res.json({ success: false, err }); - }); - } else if (role === ROLES.Creator) { - await getCollections(req) - .then(data => { - return res.json({ success: true, data }); - }) - .catch(err => { - return res.json({ success: false, err }); - }); - } -}); +router.get('/getList', passport.authenticate('jwt'), (req, res) => collectionsController.getList(req, res)); // @router GET /api/v1/collections/{collectionID} // @desc Returns collection based on id // @access Public -router.get('/:collectionID', async (req, res) => { - var q = Collections.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.collectionID) }] } }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - ]); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - - if (_.isEmpty(data)) return res.status(404).send(`Collection not found for Id: ${escape(req.params.collectionID)}`); - - data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); - return res.json({ success: true, data: data }); - }); -}); +router.get('/:collectionID', (req, res) => collectionsController.getCollection(req, res)); // @router GET /api/v1/collections/relatedobjects/{collectionID} // @desc Returns related resources for collection based on id // @access Public -router.get('/relatedobjects/:collectionID', async (req, res) => { - await getCollectionObjects(req) - .then(data => { - return res.json({ success: true, data }); - }) - .catch(err => { - return res.json({ success: false, err }); - }); -}); +router.get('/relatedobjects/:collectionID', (req, res) => collectionsController.getCollectionRelatedResources(req, res)); // @router GET /api/v1/collections/entityid/{entityID} // @desc Returns collections that contant the entity id // @access Public -router.get('/entityid/:entityID', async (req, res) => { - let entityID = req.params.entityID; - let dataVersions = await Data.find({ pid: entityID }, { _id: 0, datasetid: 1 }); - let dataVersionsArray = dataVersions.map(a => a.datasetid); - dataVersionsArray.push(entityID); - - var q = Collections.aggregate([ - { - $match: { - $and: [ - { - relatedObjects: { - $elemMatch: { - $or: [ - { - objectId: { $in: dataVersionsArray }, - }, - { - pid: entityID, - }, - ], - }, - }, - }, - { publicflag: true }, - { activeflag: 'active' }, - ], - }, - }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - { - $project: { _id: 1, id: 1, name: 1, description: 1, imageLink: 1, relatedObjects: 1, 'persons.firstname': 1, 'persons.lastname': 1 }, - }, - ]); - - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); -}); +router.get('/entityid/:entityID', (req, res) => collectionsController.getCollectionByEntity(req, res)); // @router PUT /api/v1/collections/edit/{id} // @desc Edit Collection // @access Private -router.put('/edit/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), async (req, res) => { - let id = req.params.id; - let { name, description, imageLink, authors, relatedObjects, publicflag, keywords, previousPublicFlag, collectionCreator } = req.body; - imageLink = urlValidator.validateURL(imageLink); - let updatedon = Date.now(); - - let collectionId = parseInt(id); - - await Collections.findOneAndUpdate( - { id: { $eq: collectionId } }, - { - name: inputSanitizer.removeNonBreakingSpaces(name), - description: inputSanitizer.removeNonBreakingSpaces(description), - imageLink, - authors, - relatedObjects, - publicflag, - keywords, - updatedon, - }, - err => { - if (err) { - return res.json({ success: false, error: err }); - } - } - ).then(() => { - filtersService.optimiseFilters('collection'); - return res.json({ success: true }); - }); - - await Collections.find({ id: collectionId }, { publicflag: 1, id: 1, activeflag: 1, authors: 1, name: 1 }).then(async res => { - if (previousPublicFlag === false && publicflag === true) { - await sendEmailNotifications(res[0], res[0].activeflag, collectionCreator, true); - - if (res[0].authors) { - res[0].authors.forEach(async authorId => { - await createMessage(authorId, res[0], res[0].activeflag, collectionCreator, true); - }); - } - - await createMessage(0, res[0], res[0].activeflag, collectionCreator, true); - } - }); -}); +router.put('/edit/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), (req, res) => + collectionsController.editCollection(req, res) +); // @router POST /api/v1/collections/add // @desc Add Collection // @access Private -router.post('/add', passport.authenticate('jwt'), async (req, res) => { - let collections = new Collections(); - - const collectionCreator = req.body.collectionCreator; - - const { name, description, imageLink, authors, relatedObjects, publicflag, keywords } = req.body; - - collections.id = parseInt(Math.random().toString().replace('0.', '')); - collections.name = inputSanitizer.removeNonBreakingSpaces(name); - collections.description = inputSanitizer.removeNonBreakingSpaces(description); - collections.imageLink = imageLink; - collections.authors = authors; - collections.relatedObjects = relatedObjects; - collections.activeflag = 'active'; - collections.publicflag = publicflag; - collections.keywords = keywords; - collections.updatedon = Date.now(); - - if (collections.authors) { - collections.authors.forEach(async authorId => { - await createMessage(authorId, collections, collections.activeflag, collectionCreator); - }); - } - - await createMessage(0, collections, collections.activeflag, collectionCreator); - - await sendEmailNotifications(collections, collections.activeflag, collectionCreator); - - collections.save(err => { - if (err) { - return res.json({ success: false, error: err }); - } else { - return res.json({ success: true, id: collections.id }); - } - }); -}); +router.post('/add', passport.authenticate('jwt'), (req, res) => collectionsController.addCollection(req, res)); // @router PUT /api/v1/collections/status/{id} // @desc Edit Collection // @access Private -router.put('/status/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), async (req, res) => { - const collectionId = parseInt(req.params.id); - let { activeflag } = req.body; - activeflag = activeflag.toString(); - - Collections.findOneAndUpdate({ id: collectionId }, { activeflag }, err => { - if (err) { - return res.json({ success: false, error: err }); - } - }).then(() => { - filtersService.optimiseFilters('collection'); - return res.json({ success: true }); - }); -}); +router.put('/status/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), (req, res) => + collectionsController.changeStatus(req, res) +); // @router DELETE /api/v1/collections/delete/{id} // @desc Delete Collection // @access Private -router.delete('/delete/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), async (req, res) => { - const id = parseInt(req.params.id); - Collections.findOneAndRemove({ id }, err => { - if (err) return res.send(err); - return res.json({ success: true }); - }); -}); +router.delete('/delete/:id', passport.authenticate('jwt'), utils.checkAllowedToAccess('collection'), (req, res) => + collectionsController.deleteCollection(req, res) +); // eslint-disable-next-line no-undef module.exports = router; - -async function createMessage(authorId, collections, activeflag, collectionCreator, isEdit) { - let message = new MessagesModel(); - - const messageRecipients = await UserModel.find({ $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] }); - async function saveMessage() { - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = authorId; - message.messageObjectID = collections.id; - message.messageSent = Date.now(); - message.isRead = false; - await message.save(); - } - - if (authorId === 0) { - message.messageType = 'added collection'; - message.messageDescription = generateCollectionEmailSubject('Admin', collections.publicflag, collections.name, false, isEdit); - saveMessage(); - } - - for (let messageRecipient of messageRecipients) { - if (activeflag === 'active' && authorId === messageRecipient.id) { - message.messageType = 'added collection'; - message.messageDescription = generateCollectionEmailSubject( - 'Creator', - collections.publicflag, - collections.name, - authorId === collectionCreator.id ? true : false, - isEdit - ); - saveMessage(); - } - } -} diff --git a/src/resources/collections/collections.service.js b/src/resources/collections/collections.service.js new file mode 100644 index 00000000..d495a596 --- /dev/null +++ b/src/resources/collections/collections.service.js @@ -0,0 +1,578 @@ +import { Data } from '../tool/data.model'; +import { Course } from '../course/course.model'; +import { Collections } from '../collections/collections.model'; +import { UserModel } from '../user/user.model'; +import { DataUseRegister } from '../dataUseRegister/dataUseRegister.model'; +import emailGenerator from '../utilities/emailGenerator.util'; +import inputSanitizer from '../utilities/inputSanitizer'; +import _ from 'lodash'; + +export default class CollectionsService { + constructor(collectionsRepository) { + this.collectionsRepository = collectionsRepository; + this.hdrukEmail = 'enquiry@healthdatagateway.org'; + } + + async getCollectionObjects(collectionID) { + let relatedObjects = []; + await this.collectionsRepository + .getCollections( + { id: collectionID }, + { + 'relatedObjects._id': 1, + 'relatedObjects.objectId': 1, + 'relatedObjects.objectType': 1, + 'relatedObjects.pid': 1, + 'relatedObjects.updated': 1, + } + ) + .then(async res => { + await new Promise(async (resolve, reject) => { + if (_.isEmpty(res)) { + reject(`Collection not found for ID: ${collectionID}.`); + } else { + for (let object of res[0].relatedObjects) { + let relatedObject = await this.getCollectionObject(object.objectId, object.objectType, object.pid, object.updated); + if (!_.isUndefined(relatedObject)) { + relatedObjects.push(relatedObject); + } else { + await this.collectionsRepository.updateCollection({ id: collectionID }, { $pull: { relatedObjects: { _id: object._id } } }); + } + } + resolve(relatedObjects); + } + }); + }); + + return relatedObjects.sort((a, b) => b.updated - a.updated); + } + + getCollectionObject(objectId, objectType, pid, updated) { + let id = pid && pid.length > 0 ? pid : objectId; + + return new Promise(async resolve => { + let data; + if (objectType !== 'dataset' && objectType !== 'course' && objectType !== 'dataUseRegister') { + data = await Data.find( + { id: parseInt(id) }, + { + id: 1, + type: 1, + activeflag: 1, + tags: 1, + description: 1, + name: 1, + persons: 1, + categories: 1, + programmingLanguage: 1, + firstname: 1, + lastname: 1, + bio: 1, + authors: 1, + counter: { $ifNull: ['$counter', 0] }, + relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, + } + ) + .populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]) + .lean(); + } else if (!isNaN(id) && objectType === 'course') { + data = await Course.find( + { id: parseInt(id) }, + { + id: 1, + type: 1, + activeflag: 1, + title: 1, + provider: 1, + courseOptions: 1, + award: 1, + domains: 1, + tags: 1, + description: 1, + counter: { $ifNull: ['$counter', 0] }, + relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, + } + ).lean(); + } else if (!isNaN(id) && objectType === 'dataUseRegister') { + data = await DataUseRegister.find( + { id: parseInt(id) }, + { + id: 1, + type: 1, + activeflag: 1, + projectTitle: 1, + organisationName: 1, + keywords: 1, + gatewayDatasets: 1, + nonGatewayDatasets: 1, + datasetTitles: 1, + publisher: 1, + counter: { $ifNull: ['$counter', 0] }, + relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, + } + ) + .populate([ + { path: 'gatewayDatasetsInfo', select: { name: 1 } }, + { + path: 'publisherInfo', + select: { name: 1, _id: 0 }, + }, + ]) + .lean(); + } else { + const datasetRelatedResources = { + $lookup: { + from: 'tools', + let: { + pid: '$pid', + }, + pipeline: [ + { $unwind: '$relatedObjects' }, + { + $match: { + $expr: { + $and: [ + { + $eq: ['$relatedObjects.pid', '$$pid'], + }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $group: { _id: null, count: { $sum: 1 } } }, + ], + as: 'relatedResourcesTools', + }, + }; + + const datasetRelatedCourses = { + $lookup: { + from: 'course', + let: { + pid: '$pid', + }, + pipeline: [ + { $unwind: '$relatedObjects' }, + { + $match: { + $expr: { + $and: [ + { + $eq: ['$relatedObjects.pid', '$$pid'], + }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $group: { _id: null, count: { $sum: 1 } } }, + ], + as: 'relatedResourcesCourses', + }, + }; + + const datasetProjectFields = { + $project: { + id: 1, + datasetid: 1, + pid: 1, + type: 1, + activeflag: 1, + name: 1, + datasetv2: 1, + datasetfields: 1, + tags: 1, + description: 1, + counter: { $ifNull: ['$counter', 0] }, + relatedresources: { + $add: [ + { + $cond: { + if: { $eq: [{ $size: '$relatedResourcesTools' }, 0] }, + then: 0, + else: { $first: '$relatedResourcesTools.count' }, + }, + }, + { + $cond: { + if: { $eq: [{ $size: '$relatedResourcesCourses' }, 0] }, + then: 0, + else: { $first: '$relatedResourcesCourses.count' }, + }, + }, + ], + }, + }, + }; + + // 1. Search for a dataset based on pid + data = await Data.aggregate([ + { $match: { $and: [{ pid: id }, { activeflag: 'active' }] } }, + datasetRelatedResources, + datasetRelatedCourses, + datasetProjectFields, + ]); + + // 2. If dataset not found search for a dataset based on datasetID + if (!data || data.length <= 0) { + data = await Data.find({ datasetid: objectId }, { datasetid: 1, pid: 1 }).lean(); + // 3. Use retrieved dataset's pid to search by pid again + data = await Data.aggregate([ + { $match: { $and: [{ pid: data[0].pid }, { activeflag: 'active' }] } }, + datasetRelatedResources, + datasetRelatedCourses, + datasetProjectFields, + ]); + } + + // 4. If dataset still not found search for deleted dataset by pid + if (!data || data.length <= 0) { + data = await Data.aggregate([ + { $match: { $and: [{ pid: id }, { activeflag: 'archive' }] } }, + datasetRelatedResources, + datasetRelatedCourses, + datasetProjectFields, + ]); + } + } + + let relatedObject = { ...data[0], updated: Date.parse(updated) }; + resolve(relatedObject); + }); + } + + getCollectionByEntity(entityID, dataVersionsArray) { + var q = Collections.aggregate([ + { + $match: { + $and: [ + { + relatedObjects: { + $elemMatch: { + $or: [ + { + objectId: { $in: dataVersionsArray }, + }, + { + pid: entityID, + }, + ], + }, + }, + }, + { publicflag: true }, + { activeflag: 'active' }, + ], + }, + }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { + $project: { + _id: 1, + id: 1, + name: 1, + description: 1, + imageLink: 1, + relatedObjects: 1, + 'persons.firstname': 1, + 'persons.lastname': 1, + }, + }, + ]); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (err) { + return reject(err); + } else { + return resolve(data); + } + }); + }); + } + + async editCollection(collectionID, updatedCollection) { + let { name, description, imageLink, authors, relatedObjects, publicflag, keywords } = updatedCollection; + let updatedon = Date.now(); + + return new Promise(async (resolve, reject) => { + await Collections.findOneAndUpdate( + { id: { $eq: collectionID } }, + { + name: inputSanitizer.removeNonBreakingSpaces(name), + description: inputSanitizer.removeNonBreakingSpaces(description), + imageLink, + authors, + relatedObjects, + publicflag, + keywords, + updatedon, + }, + err => { + err ? reject(err) : resolve(); + } + ); + }); + } + + addCollection(collections) { + return new Promise(async (resolve, reject) => { + try { + await collections.save(); + resolve(); + } catch (err) { + reject({ success: false, error: err }); + } + }); + } + + changeStatus(collectionID, activeflag) { + return new Promise(async (resolve, reject) => { + Collections.findOneAndUpdate({ id: collectionID }, { activeflag }, err => { + err ? reject(err) : resolve(); + }); + }); + } + + deleteCollection(id) { + return new Promise(async (resolve, reject) => { + await Collections.findOneAndRemove({ id }, err => { + err ? reject(err) : resolve(); + }); + }); + } + + async sendEmailNotifications(collections, activeflag, collectionCreator, isEdit) { + // Generate URL for linking collection in email + const collectionLink = process.env.homeURL + '/collection/' + collections.id; + + // Query Db for all admins or authors of the collection + var q = UserModel.aggregate([ + { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + { + $project: { + _id: 1, + firstname: 1, + lastname: 1, + email: 1, + role: 1, + id: 1, + }, + }, + ]); + + // Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } else { + let subject; + let html; + + emailRecipients.map(emailRecipient => { + if (collections.authors.includes(emailRecipient.id)) { + let author = Number(collections.authors.filter(author => author === emailRecipient.id)); + + if (activeflag === 'active') { + subject = this.generateCollectionEmailSubject( + 'Creator', + collections.publicflag, + collections.name, + author === collectionCreator.id ? true : false, + isEdit + ); + html = this.generateCollectionEmailContent( + 'Creator', + collections.publicflag, + collections.name, + collectionLink, + author === collectionCreator.id ? true : false, + isEdit + ); + } + } else if (activeflag === 'active' && emailRecipient.role === 'Admin') { + subject = this.generateCollectionEmailSubject('Admin', collections.publicflag, collections.name, false, isEdit); + html = this.generateCollectionEmailContent('Admin', collections.publicflag, collections.name, collectionLink, false, isEdit); + } + + emailGenerator.sendEmail([emailRecipient], `${this.hdrukEmail}`, subject, html, false); + }); + } + }); + } + + generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit) { + let emailSubject; + + if (role !== 'Admin' && isCreator !== true) { + if (isEdit === true) { + emailSubject = `The ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName} that you are a collaborator on has been edited and is now live`; + } else { + emailSubject = `You have been added as a collaborator on the ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName}`; + } + } else { + emailSubject = `${role === 'Admin' ? 'A' : 'Your'} ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName} has been ${isEdit === true ? 'edited' : 'published'} and is now live`; + } + + return emailSubject; + } + + generateCollectionEmailContent(role, publicflag, collectionName, collectionLink, isCreator, isEdit) { + return `
+
+ + + + + + + + + + + + + + +
+ ${this.generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit)} +
+ ${ + publicflag === true + ? `${role === 'Admin' ? 'A' : 'Your'} public collection has been ${ + isEdit === true ? 'edited on' : 'published to' + } the Gateway. The collection is searchable on the Gateway and can be viewed by all users.` + : `${role === 'Admin' ? 'A' : 'Your'} private collection has been ${ + isEdit === true ? 'edited on' : 'published to' + } the Gateway. Only those who you share the collection link with will be able to view the collection.` + } +
+ View Collection +
+
+
`; + } + + async getCollectionsAdmin(searchString, status, startIndex, limit) { + return new Promise(async resolve => { + let searchQuery; + if (status === 'all') { + searchQuery = {}; + } else { + searchQuery = { $and: [{ activeflag: status }] }; + } + + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + + await Promise.all([this.getObjectResult(searchAll, searchQuery, startIndex, limit), this.getCountsByStatus()]).then(values => { + resolve(values); + }); + }); + } + + async getCollections(idString, status, startIndex, limit) { + return new Promise(async resolve => { + let searchQuery; + if (status === 'all') { + searchQuery = [{ authors: parseInt(idString) }]; + } else { + searchQuery = [{ authors: parseInt(idString) }, { activeflag: status }]; + } + + let query = Collections.aggregate([ + { $match: { $and: searchQuery } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1, _id: 1 } }, + ]) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + + await Promise.all([this.collectionsRepository.searchCollections(query), this.getCountsByStatus(idString)]).then(values => { + resolve(values); + }); + }); + } + + async getCollection(collectionID) { + var q = Collections.aggregate([ + { $match: { $and: [{ id: collectionID }] } }, + + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]); + return new Promise((resolve, reject) => { + q.exec((err, data) => { + err ? reject(err) : resolve(data); + }); + }); + } + + getObjectResult(searchAll, searchQuery, startIndex, limit) { + let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + let q = ''; + + if (searchAll) { + q = Collections.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]) + .sort({ updatedAt: -1, _id: 1 }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } else { + q = Collections.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]) + .sort({ score: { $meta: 'textScore' } }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } + return this.collectionsRepository.searchCollections(q); + } + + getCountsByStatus(idString) { + let q; + + if (_.isUndefined(idString)) { + q = Collections.find({}, { id: 1, name: 1, activeflag: 1 }); + } else { + q = Collections.find({ authors: parseInt(idString) }, { id: 1, name: 1, activeflag: 1 }); + } + + return new Promise(resolve => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length; + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length; + + let countSummary = { activeCount: activeCount, archiveCount: archiveCount }; + + resolve(countSummary); + }); + }); + } +} diff --git a/src/resources/collections/dependency.js b/src/resources/collections/dependency.js new file mode 100644 index 00000000..ef9d18e4 --- /dev/null +++ b/src/resources/collections/dependency.js @@ -0,0 +1,6 @@ +import CollectionsService from './collections.service'; +import CollectionsRepository from './collections.repository'; + +const collectionsRepository = new CollectionsRepository(); + +export const collectionsService = new CollectionsService(collectionsRepository); diff --git a/src/resources/course/__tests__/course.repository.it.test.js b/src/resources/course/__tests__/course.repository.it.test.js index d8156ff3..ed57a444 100644 --- a/src/resources/course/__tests__/course.repository.it.test.js +++ b/src/resources/course/__tests__/course.repository.it.test.js @@ -35,7 +35,7 @@ describe('CourseRepository', function () { describe('getCourses', () => { it('should return an array of courses', async function () { const courseRepository = new CourseRepository(); - const courses = await courseRepository.getCourses(); + const courses = await courseRepository.getCourses({}, {}); expect(courses.length).toBeGreaterThan(0); }); }); diff --git a/src/resources/course/__tests__/course.repository.test.js b/src/resources/course/__tests__/course.repository.test.js index c07f07ec..1d38fbd7 100644 --- a/src/resources/course/__tests__/course.repository.test.js +++ b/src/resources/course/__tests__/course.repository.test.js @@ -41,7 +41,7 @@ describe('CourseRepository', function () { it('should return an array of courses', async function () { const courseRepository = new CourseRepository(); const stub = sinon.stub(courseRepository, 'find').returns(coursesStub); - const courses = await courseRepository.getCourses(); + const courses = await courseRepository.getCourses({}, {}); expect(stub.calledOnce).toBe(true); @@ -54,10 +54,10 @@ describe('CourseRepository', function () { const courseRepository = new CourseRepository(); const stub = sinon.stub(courseRepository, 'findCountOf').returns(1); const courseCount = await courseRepository.findCountOf({ name: 'Admitted Patient Care Course' }); - + expect(stub.calledOnce).toBe(true); expect(courseCount).toEqual(1); }); }); -}); \ No newline at end of file +}); diff --git a/src/resources/course/v2/course.route.js b/src/resources/course/v2/course.route.js index 8c6aab39..121e6739 100644 --- a/src/resources/course/v2/course.route.js +++ b/src/resources/course/v2/course.route.js @@ -1,6 +1,7 @@ import express from 'express'; import CourseController from './course.controller'; import { courseService } from './dependency'; +import { resultLimit } from '../../../config/middleware'; const router = express.Router(); const courseController = new CourseController(courseService); @@ -13,6 +14,6 @@ router.get('/:id', (req, res) => courseController.getCourse(req, res)); // @route GET /api/v2/courses // @desc Returns a collection of courses based on supplied query parameters // @access Public -router.get('/', (req, res) => courseController.getCourses(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => courseController.getCourses(req, res)); module.exports = router; diff --git a/src/resources/dataUseRegister/__mocks__/dataUseRegisterUsers.js b/src/resources/dataUseRegister/__mocks__/dataUseRegisterUsers.js new file mode 100644 index 00000000..93295172 --- /dev/null +++ b/src/resources/dataUseRegister/__mocks__/dataUseRegisterUsers.js @@ -0,0 +1,11 @@ +import mongoose from 'mongoose'; + +const uploader = { + _id: new mongoose.Types.ObjectId(), + firstname: 'James', + lastname: 'Smith', +}; + +export { + uploader +}; diff --git a/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js b/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js new file mode 100644 index 00000000..680caaa9 --- /dev/null +++ b/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js @@ -0,0 +1,406 @@ +export const dataUseRegisterUploads = [ + { + projectTitle: 'This a test data use register', + projectIdText: 'this is the project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This is another test data use register', + projectIdText: 'this is the other project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'other lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, +]; + +export const dataUseRegisterUploadsWithDuplicates = [ + { + projectTitle: 'This a test data use register', + projectIdText: 'this is the project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This a test data use register', + projectIdText: 'this is the project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This a test data use register', + projectIdText: 'this is the project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This a test data use register', + projectIdText: 'this is the project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This a test data use register', + projectIdText: 'this is another project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'another organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, + { + projectTitle: 'This a test data use register', + projectIdText: 'this is another project id', + datasetNames: [ + 'This is the dataset title', + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', + ], + applicantNames: [' Michael Donnelly', 'http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793'], + organisationName: 'another organisation name', + organisationSector: 'organisation sector', + applicantId: 'applicant id', + fundersAndSponsors: 'funder1 , funder2 , funder3 ', + accreditedResearcherStatus: 'accredited Researcher Status', + sublicenceArrangements: 'sublicence Arrangements', + laySummary: 'another lay Summary', + publicBenefitStatement: 'public Benefit Statement', + requestCategoryType: 'request Category Type', + technicalSummary: 'technical Summary', + otherApprovalCommittees: 'other Approval Committees', + projectStartDate: '2021-09-25', + projectEndDate: '2021-09-30', + latestApprovalDate: '2021-09-21', + dataSensitivityLevel: 'data Sensitivity Level', + legalBasisForDataArticle6: 'legal Basis For Data 6', + legalBasisForDataArticle9: 'legal Basis For Data 9', + dutyOfConfidentiality: 'duty Of Confidentiality', + nationalDataOptOut: 'national Data Opt Out', + requestFrequency: 'request Frequency', + datasetLinkageDescription: 'data Processing Description', + confidentialDataDescription: 'confidential Data Description', + accessDate: '2021-09-26', + accessType: 'accessType', + privacyEnhancements: 'privacy Enhancements', + researchOutputs: 'research Outputs', + }, +]; + +export const datasets = [ + { + id: '70b4d407-288a-4945-a4d5-506d60715110', + pid: 'e55df485-5acd-4606-bbb8-668d4c06380a', + }, + { + id: '82ef7d1a-98d8-48b6-9acd-461bf2a399c3', + pid: 'e55df485-5acd-4606-bbb8-668d4c06380a', + }, + { + id: '673626f3-bdac-4d32-9bb8-c890b727c0d1', + pid: '594d79a4-92b9-4a7f-b991-abf850bf2b67', + }, + { + id: '89e57932-ac48-48ac-a6e5-29795bc38b94', + pid: 'efbd4275-70e2-4887-8499-18b1fb24ce5b', + }, +]; + +export const relatedObjectDatasets = [ + { + objectId: '70b4d407-288a-4945-a4d5-506d60715110', + pid: 'e55df485-5acd-4606-bbb8-668d4c06380a', + objectType: 'dataset', + user: 'James Smith', + updated: '24 Sept 2021', + isLocked: true, + reason: 'This dataset was added automatically during the manual upload of this data use register', + }, + { + objectId: '82ef7d1a-98d8-48b6-9acd-461bf2a399c3', + pid: 'e55df485-5acd-4606-bbb8-668d4c06380a', + objectType: 'dataset', + user: 'James Smith', + updated: '24 Sept 2021', + isLocked: true, + reason: 'This dataset was added automatically during the manual upload of this data use register', + }, + { + objectId: '673626f3-bdac-4d32-9bb8-c890b727c0d1', + pid: '594d79a4-92b9-4a7f-b991-abf850bf2b67', + objectType: 'dataset', + user: 'James Smith', + updated: '24 Sept 2021', + isLocked: true, + reason: 'This dataset was added automatically during the manual upload of this data use register', + }, + { + objectId: '89e57932-ac48-48ac-a6e5-29795bc38b94', + pid: 'efbd4275-70e2-4887-8499-18b1fb24ce5b', + objectType: 'dataset', + user: 'James Smith', + updated: '24 Sept 2021', + isLocked: true, + reason: 'This dataset was added automatically during the manual upload of this data use register', + }, +]; + +export const nonGatewayDatasetNames = ['dataset one', 'dataset two', ' dataset three', 'dataset four']; + +export const gatewayDatasetNames = [ + 'http://localhost:3000/dataset/f725187f-7352-482b-a43b-64ebc96e66f2', + 'http://localhost:3000/dataset/c6d6bbd3-74ed-46af-841d-ac5e05f4da41', + 'http://localhost:3000/dataset/e55df485-5acd-4606-bbb8-668d4c06380a', +]; + +export const expectedGatewayDatasets = [ + { datasetid: '1', name: 'dataset 1', pid: '111' }, + { datasetid: '2', name: 'dataset 2', pid: '222' }, + { datasetid: '3', name: 'dataset 3', pid: '333' }, +]; + +export const expectedGatewayDatasetsReturned = [ + { id: '1', name: 'dataset 1', pid: '111' }, + { id: '2', name: 'dataset 2', pid: '222' }, + { id: '3', name: 'dataset 3', pid: '333' }, +]; + +export const nonGatewayApplicantNames = ['applicant one', 'applicant two', 'applicant three', 'applicant four']; + +export const gatewayApplicantNames = ['http://localhost:3000/person/8495781222000176', 'http://localhost:3000/person/4495285946631793']; + +export const expectedGatewayApplicants = ['89e57932-ac48-48ac-a6e5-29795bc38b94', '0cfe60cd-038d-4c03-9a95-894c52135922']; + +export const applications = [ + { + questionAnswers: { + safepeopleprimaryapplicantfullname: 'applicant name', + safeprojectfunderinformationprojecthasfundername: 'funder 1', + safeprojectfunderinformationprojecthasfundername_gRvcG: 'funder 2', + safeprojectsponsorinformationprojecthassponsororganisationname: 'sponsor 1', + safeprojectsponsorinformationprojecthassponsororganisationname_2gixm: 'sponsor 2', + safepeopleprimaryapplicantfullname: 'James Smith', + safepeopleprimaryapplicantfullname_xRtvc: 'Michael Howard', + safepeopleotherindividualsfullname: 'Colin Devlin', + safepeopleotherindividualsfullname_3uGds: 'Graham Patterson', + }, + }, +]; + +export const authors = [ + { _id: '607db9c6e1f9d3704d570d93', firstname: 'James', lastname: 'Smith' }, + { _id: '5fb628de6f3f9767bd2d9281', firstname: 'Michael', lastname: 'Howard' }, +]; diff --git a/src/resources/dataUseRegister/__tests__/dataUseRegister.service.test.js b/src/resources/dataUseRegister/__tests__/dataUseRegister.service.test.js new file mode 100644 index 00000000..cc2c236a --- /dev/null +++ b/src/resources/dataUseRegister/__tests__/dataUseRegister.service.test.js @@ -0,0 +1,61 @@ +import sinon from 'sinon'; + +import DataUseRegisterService from '../dataUseRegister.service'; +import DataUseRegisterRepository from '../dataUseRegister.repository'; +import dataUseRegisterUtil from '../dataUseRegister.util'; +import { dataUseRegisterUploadsWithDuplicates, dataUseRegisterUploads } from '../__mocks__/dataUseRegisters'; + +describe('DataUseRegisterService', function () { + describe('filterDuplicateDataUseRegisters', function () { + it('filters out data uses that have matching project Ids', async function () { + // Arrange + const dataUseRegisterRepository = new DataUseRegisterRepository(); + const dataUseRegisterService = new DataUseRegisterService(dataUseRegisterRepository); + + // Act + const result = dataUseRegisterService.filterDuplicateDataUseRegisters(dataUseRegisterUploadsWithDuplicates); + + // Assert + expect(dataUseRegisterUploadsWithDuplicates.length).toEqual(6); + expect(result.length).toEqual(2); + expect(result[0].projectIdText).not.toEqual(result[1].projectIdText); + expect(result[0]).toEqual(dataUseRegisterUploadsWithDuplicates[0]); + }); + it('filters out duplicate data uses that match across the following fields: project title, lay summary, organisation name, dataset names and latest approval date', async function () { + // Arrange + const dataUseRegisterRepository = new DataUseRegisterRepository(); + const dataUseRegisterService = new DataUseRegisterService(dataUseRegisterRepository); + + // Act + const result = dataUseRegisterService.filterDuplicateDataUseRegisters(dataUseRegisterUploadsWithDuplicates); + + // Assert + expect(dataUseRegisterUploadsWithDuplicates.length).toEqual(6); + expect(result.length).toEqual(2); + expect(result[1]).toEqual(dataUseRegisterUploadsWithDuplicates[4]); + }); + }); + + describe('filterExistingDataUseRegisters', function () { + it('filters out data uses that are found to already exist in the database', async function () { + // Arrange + const dataUseRegisterRepository = new DataUseRegisterRepository(); + const dataUseRegisterService = new DataUseRegisterService(dataUseRegisterRepository); + + const checkDataUseRegisterExistsStub = sinon.stub(dataUseRegisterRepository, 'checkDataUseRegisterExists'); + checkDataUseRegisterExistsStub.onCall(0).returns(false); + checkDataUseRegisterExistsStub.onCall(1).returns(true); + const getLinkedDatasetsStub = sinon.stub(dataUseRegisterUtil, 'getLinkedDatasets'); + getLinkedDatasetsStub.returns({ linkedDatasets: [], namedDatasets: [] }); + + // Act + const result = await dataUseRegisterService.filterExistingDataUseRegisters(dataUseRegisterUploads); + + // Assert + expect(checkDataUseRegisterExistsStub.calledTwice).toBe(true); + expect(dataUseRegisterUploads.length).toBe(2); + expect(result.length).toBe(1); + expect(result[0].projectIdText).toEqual(dataUseRegisterUploads[0].projectIdText); + }); + }); +}); diff --git a/src/resources/dataUseRegister/__tests__/dataUseRegister.util.test.js b/src/resources/dataUseRegister/__tests__/dataUseRegister.util.test.js new file mode 100644 index 00000000..843ac425 --- /dev/null +++ b/src/resources/dataUseRegister/__tests__/dataUseRegister.util.test.js @@ -0,0 +1,155 @@ +import sinon from 'sinon'; +import { fn as momentProto } from 'moment'; +import { cloneDeep } from 'lodash'; + +import dataUseRegisterUtil from '../dataUseRegister.util'; +import { + datasets, + relatedObjectDatasets, + nonGatewayDatasetNames, + gatewayDatasetNames, + expectedGatewayDatasets, + expectedGatewayDatasetsReturned, + nonGatewayApplicantNames, + gatewayApplicantNames, + expectedGatewayApplicants, + applications, + authors, +} from '../__mocks__/dataUseRegisters'; +import { uploader } from '../__mocks__/dataUseRegisterUsers'; +import * as userRepository from '../../user/user.repository'; +import { datasetService } from '../../dataset/dependency'; + +const sandbox = sinon.createSandbox(); + +describe('DataUseRegisterUtil', function () { + beforeAll(function () { + process.env.homeURL = 'http://localhost:3000'; + }); + + describe('getLinkedDatasets', function () { + it('returns the names of the datasets that could not be found on the Gateway as named datasets', async function () { + // Arrange + const getDatasetsByNameStub = sinon.stub(datasetService, 'getDatasetsByName'); + getDatasetsByNameStub.returns(); + + // Act + const result = await dataUseRegisterUtil.getLinkedDatasets(nonGatewayDatasetNames); + + // Assert + expect(result).toEqual({ linkedDatasets: [], namedDatasets: nonGatewayDatasetNames }); + }); + it('returns the details of datasets that could be found on the Gateway when valid URLs are given', async function () { + // Arrange + const getDatasetsByPidsStub = sinon.stub(datasetService, 'getDatasetsByPids'); + getDatasetsByPidsStub.returns(expectedGatewayDatasets); + + // Act + const result = await dataUseRegisterUtil.getLinkedDatasets(gatewayDatasetNames); + + // Assert + expect(getDatasetsByPidsStub.calledOnce).toBe(true); + expect(result).toEqual({ linkedDatasets: expectedGatewayDatasetsReturned, namedDatasets: [] }); + }); + }); + + describe('getLinkedApplicants', function () { + it('returns the names of the applicants that could not be found on the Gateway', async function () { + // Act + const result = await dataUseRegisterUtil.getLinkedApplicants(nonGatewayApplicantNames); + + // Assert + expect(result).toEqual({ gatewayApplicants: [], nonGatewayApplicants: nonGatewayApplicantNames }); + }); + it('returns the details of applicants that could be found on the Gateway when valid profile URLs are given', async function () { + // Arrange + const getUsersByIdsStub = sinon.stub(userRepository, 'getUsersByIds'); + getUsersByIdsStub.returns([{ _id: '89e57932-ac48-48ac-a6e5-29795bc38b94' }, { _id: '0cfe60cd-038d-4c03-9a95-894c52135922' }]); + + // Act + const result = await dataUseRegisterUtil.getLinkedApplicants(gatewayApplicantNames); + + // Assert + expect(getUsersByIdsStub.calledOnce).toBe(true); + expect(result.gatewayApplicants[0]._id).toEqual(expectedGatewayApplicants[0]); + expect(result.gatewayApplicants[1]._id).toEqual(expectedGatewayApplicants[1]); + expect(result.nonGatewayApplicants).toEqual([]); + }); + }); + + describe('buildRelatedObjects', function () { + beforeEach(() => { + sandbox.stub(momentProto, 'format'); + momentProto.format.withArgs('DD MMM YYYY').returns('24 Sept 2021'); + }); + + it('filters out data uses that are found to already exist in the database', async function () { + // Arrange + const data = cloneDeep(datasets); + + // Act + const result = dataUseRegisterUtil.buildRelatedObjects(uploader, 'dataset', data); + + // Assert + expect(result.length).toBe(data.length); + expect(result).toEqual(relatedObjectDatasets); + }); + + afterEach(function () { + sinon.restore(); + sandbox.restore(); + }); + }); + + describe('extractFormApplicants', function () { + it('identifies and combines gateway and non gateway applicants in the correct format', function () { + // Arrange + const questionAnswersStub = cloneDeep(applications[0].questionAnswers); + const authorsStub = cloneDeep(authors); + + // Act + const result = dataUseRegisterUtil.extractFormApplicants(authorsStub, questionAnswersStub); + + // Assert + expect(result.gatewayApplicants.length).toBe(2); + expect(result.gatewayApplicants).toEqual(expect.arrayContaining(['607db9c6e1f9d3704d570d93', '5fb628de6f3f9767bd2d9281'])); + + expect(result.nonGatewayApplicants.length).toBe(2); + expect(result.nonGatewayApplicants).toEqual(expect.arrayContaining(['Colin Devlin', 'Graham Patterson'])); + }); + + it('removes duplicate applicants who are both authors of the application and named in the questions answers', function () { + // Arrange + const questionAnswersStub = cloneDeep(applications[0].questionAnswers); + const authorsStub = cloneDeep(authors); + + // Act + const result = dataUseRegisterUtil.extractFormApplicants(authorsStub, questionAnswersStub); + + // Assert + expect(result.gatewayApplicants.length).toBe(2); + expect(result.gatewayApplicants).toEqual(expect.arrayContaining(['607db9c6e1f9d3704d570d93', '5fb628de6f3f9767bd2d9281'])); + + expect(result.nonGatewayApplicants.length).toBe(2); + expect(result.nonGatewayApplicants).toEqual(expect.arrayContaining(['Colin Devlin', 'Graham Patterson'])); + }); + }); + + describe('extractFundersAndSponsors', function () { + it('identifies and combines funder and sponsor organisations named in the question answers ', function () { + // Arrange + const questionAnswersStub = cloneDeep(applications[0].questionAnswers); + + // Act + const result = dataUseRegisterUtil.extractFundersAndSponsors(questionAnswersStub); + + // Assert + expect(result.length).toBe(4); + expect(result).toEqual(expect.arrayContaining(['funder 1', 'funder 2', 'sponsor 1', 'sponsor 2'])); + }); + }); + + afterAll(function () { + delete process.env.homeURL; + }); +}); diff --git a/src/resources/dataUseRegister/dataUseRegister.controller.js b/src/resources/dataUseRegister/dataUseRegister.controller.js new file mode 100644 index 00000000..93701e17 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.controller.js @@ -0,0 +1,441 @@ +/* eslint-disable class-methods-use-this */ +import Mongoose from 'mongoose'; +import Controller from '../base/controller'; +import { logger } from '../utilities/logger'; +import constants from './../utilities/constants.util'; +import { Data } from '../tool/data.model'; +import { TeamModel } from '../team/team.model'; +import teamController from '../team/team.controller'; +import emailGenerator from '../utilities/emailGenerator.util'; +import { getObjectFilters } from '../search/search.repository'; +import { filtersService } from '../filters/dependency'; + +import { DataUseRegister } from '../dataUseRegister/dataUseRegister.model'; +import { isEmpty, isUndefined } from 'lodash'; + +const logCategory = 'dataUseRegister'; + +export default class DataUseRegisterController extends Controller { + constructor(dataUseRegisterService, activityLogService) { + super(dataUseRegisterService); + this.dataUseRegisterService = dataUseRegisterService; + this.activityLogService = activityLogService; + } + + async getDataUseRegister(req, res) { + try { + // Extract id parameter from query string + const { id } = req.params; + const isEdit = req.query.isEdit || false; + if (req.query.isEdit) delete req.query.isEdit; + + // If no id provided, it is a bad request + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a dataUseRegister identifier', + }); + } + + // Find the dataUseRegister + const options = { + lean: true, + populate: [ + { path: 'gatewayApplicants', select: 'id firstname lastname' }, + { path: 'gatewayDatasetsInfo', select: 'name pid' }, + { path: 'gatewayOutputsToolsInfo', select: 'name id' }, + { path: 'gatewayOutputsPapersInfo', select: 'name id' }, + ], + }; + const dataUseRegister = await this.dataUseRegisterService.getDataUseRegister(id, req.query, options); + + // Return if no dataUseRegister found + if (!dataUseRegister) { + return res.status(404).json({ + success: false, + message: 'A dataUseRegister could not be found with the provided id', + }); + } + + // Reverse look up + var p = Data.aggregate([{ $match: { $and: [{ relatedObjects: { $elemMatch: { objectId: id } } }] } }]); + p.exec((err, relatedData) => { + if (!isEdit) { + relatedData.forEach(dat => { + dat.relatedObjects.forEach(x => { + if (x.objectId === id && dat.id !== id) { + if (typeof dataUseRegister.relatedObjects === 'undefined') dataUseRegister.relatedObjects = []; + dataUseRegister.relatedObjects.push({ + objectId: dat.id, + reason: x.reason, + objectType: dat.type, + user: x.user, + updated: x.updated, + }); + } + }); + }); + } + if (err) return res.json({ success: false, error: err }); + + // Return the dataUseRegister + return res.status(200).json({ + success: true, + ...dataUseRegister, + }); + }); + } catch (err) { + // Return error response if something goes wrong + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async getDataUseRegisters(req, res) { + try { + const { team } = req.query; + const requestingUser = req.user; + + let query = ''; + + if (!isUndefined(team)) { + if (team === 'user') { + delete req.query.team; + query = { ...req.query, gatewayApplicants: requestingUser._id }; + } else if (team === 'admin') { + delete req.query.team; + query = { ...req.query, activeflag: constants.dataUseRegisterStatus.INREVIEW }; + } else if (team !== 'user' && team !== 'admin') { + delete req.query.team; + query = { publisher: new Mongoose.Types.ObjectId(team) }; + } + + const dataUseRegisters = await this.dataUseRegisterService + .getDataUseRegisters({ $and: [query] }, { aggregate: true }) + .catch(err => { + logger.logError(err, logCategory); + }); + // Return the dataUseRegisters + return res.status(200).json({ + success: true, + data: dataUseRegisters, + }); + } else { + const dataUseRegisters = await this.dataUseRegisterService.getDataUseRegisters(req.query).catch(err => { + logger.logError(err, logCategory); + }); + // Return the dataUseRegisters + return res.status(200).json({ + success: true, + data: dataUseRegisters, + }); + } + } catch (err) { + // Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async updateDataUseRegister(req, res) { + try { + const id = req.params.id; + const requestingUser = req.user; + const { rejectionReason } = req.body; + + const options = { lean: true, populate: 'applicantDetails' }; + const dataUseRegister = await this.dataUseRegisterService.getDataUseRegister(id, {}, options); + const updateObj = await this.dataUseRegisterService.buildUpdateObject(dataUseRegister, req.body, requestingUser); + + if (isEmpty(updateObj)) { + return res.status(200).json({ + success: true, + }); + } + + await this.dataUseRegisterService.updateDataUseRegister(dataUseRegister._id, updateObj).catch(err => { + logger.logError(err, logCategory); + }); + + filtersService.optimiseFilters('dataUseRegister'); + + const isDataUseRegisterApproved = + updateObj.activeflag && + updateObj.activeflag === constants.dataUseRegisterStatus.ACTIVE && + dataUseRegister.activeflag === constants.dataUseRegisterStatus.INREVIEW; + + const isDataUseRegisterRejected = + updateObj.activeflag && + updateObj.activeflag === constants.dataUseRegisterStatus.REJECTED && + dataUseRegister.activeflag === constants.dataUseRegisterStatus.INREVIEW; + + // Send notifications + if (isDataUseRegisterApproved) { + await this.createNotifications(constants.dataUseRegisterNotifications.DATAUSEAPPROVED, {}, dataUseRegister); + } else if (isDataUseRegisterRejected) { + await this.createNotifications(constants.dataUseRegisterNotifications.DATAUSEREJECTED, { rejectionReason }, dataUseRegister); + } + + if (!isEmpty(updateObj)) { + await this.activityLogService.logActivity(constants.activityLogEvents.DATA_USE_REGISTER_UPDATED, { + dataUseRegister, + updateObj, + user: requestingUser, + }); + } + + // Return success + return res.status(200).json({ + success: true, + }); + } catch (err) { + // Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async uploadDataUseRegisters(req, res) { + try { + const { teamId, dataUses } = req.body; + const requestingUser = req.user; + const result = await this.dataUseRegisterService.uploadDataUseRegisters(requestingUser, teamId, dataUses); + // Return success + await this.createNotifications(constants.dataUseRegisterNotifications.DATAUSEPENDING, {}, result, teamId); + return res.status(result.uploadedCount > 0 ? 201 : 200).json({ + success: true, + result, + }); + } catch (err) { + // Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async checkDataUseRegister(req, res) { + try { + const { dataUses } = req.body; + + const result = await this.dataUseRegisterService.checkDataUseRegisters(dataUses); + + return res.status(200).json({ success: true, result }); + } catch (err) { + // Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async searchDataUseRegisters(req, res) { + try { + let searchString = req.query.search || ''; + + if (typeof searchString === 'string' && searchString.includes('-') && !searchString.includes('"')) { + const regex = /(?=\S*[-])([a-zA-Z'-]+)/g; + searchString = searchString.replace(regex, '"$1"'); + } + let searchQuery = { $and: [{ activeflag: 'active' }] }; + + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + + searchQuery = getObjectFilters(searchQuery, req, 'dataUseRegister'); + + const aggregateQuery = [ + { + $lookup: { + from: 'publishers', + localField: 'publisher', + foreignField: '_id', + as: 'publisherDetails', + }, + }, + { + $lookup: { + from: 'tools', + localField: 'gatewayOutputsTools', + foreignField: 'id', + as: 'gatewayOutputsToolsInfo', + }, + }, + { + $lookup: { + from: 'tools', + localField: 'gatewayOutputsPapers', + foreignField: 'id', + as: 'gatewayOutputsPapersInfo', + }, + }, + { + $lookup: { + from: 'users', + let: { + listOfGatewayApplicants: '$gatewayApplicants', + }, + pipeline: [ + { + $match: { + $expr: { + $and: [{ $in: ['$_id', '$$listOfGatewayApplicants'] }], + }, + }, + }, + { $project: { firstname: 1, lastname: 1 } }, + ], + + as: 'gatewayApplicantsDetails', + }, + }, + { + $lookup: { + from: 'tools', + let: { + listOfGatewayDatasets: '$gatewayDatasets', + }, + pipeline: [ + { + $match: { + $expr: { + $and: [ + { $in: ['$pid', '$$listOfGatewayDatasets'] }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $project: { pid: 1, name: 1 } }, + ], + as: 'gatewayDatasetsInfo', + }, + }, + { + $addFields: { + publisherInfo: { name: '$publisherDetails.name' }, + }, + }, + { $match: searchQuery }, + ]; + + const result = await DataUseRegister.aggregate(aggregateQuery); + + return res.status(200).json({ success: true, result }); + } catch (err) { + //Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } + + async createNotifications(type, context, dataUseRegister, publisher) { + const { rejectionReason } = context; + const { id, projectTitle, user: uploader } = dataUseRegister; + + switch (type) { + case constants.dataUseRegisterNotifications.DATAUSEAPPROVED: { + const adminTeam = await TeamModel.findOne({ type: 'admin' }) + .populate({ + path: 'users', + }) + .lean(); + const dataUseTeamMembers = teamController.getTeamMembersByRole(adminTeam, constants.roleTypes.ADMIN_DATA_USE); + const emailRecipients = [...dataUseTeamMembers, uploader]; + + const options = { + id, + projectTitle, + }; + + const html = emailGenerator.generateDataUseRegisterApproved(options); + emailGenerator.sendEmail(emailRecipients, constants.hdrukEmail, `A data use has been approved by HDR UK`, html, false); + break; + } + + case constants.dataUseRegisterNotifications.DATAUSEREJECTED: { + const adminTeam = await TeamModel.findOne({ type: 'admin' }) + .populate({ + path: 'users', + }) + .lean(); + + const dataUseTeamMembers = teamController.getTeamMembersByRole(adminTeam, constants.roleTypes.ADMIN_DATA_USE); + const emailRecipients = [...dataUseTeamMembers, uploader]; + + const options = { + id, + projectTitle, + rejectionReason, + }; + + const html = emailGenerator.generateDataUseRegisterRejected(options); + emailGenerator.sendEmail(emailRecipients, constants.hdrukEmail, `A data use has been rejected by HDR UK`, html, false); + break; + } + case constants.dataUseRegisterNotifications.DATAUSEPENDING: { + const adminTeam = await TeamModel.findOne({ type: 'admin' }) + .populate({ + path: 'users', + }) + .lean(); + + const publisherTeam = await TeamModel.findOne({ _id: { $eq: publisher } }) + .populate({ + path: 'publisher', + }) + .lean(); + + const dataUseTeamMembers = teamController.getTeamMembersByRole(adminTeam, constants.roleTypes.ADMIN_DATA_USE); + const emailRecipients = [...dataUseTeamMembers]; + + const { uploaded } = dataUseRegister; + let listOfProjectTitles = []; + uploaded.forEach(dataset => { + listOfProjectTitles.push(dataset.projectTitle); + }); + + const options = { + listOfProjectTitles, + publisher: publisherTeam.publisher.name, + }; + + const html = emailGenerator.generateDataUseRegisterPending(options); + emailGenerator.sendEmail(emailRecipients, constants.hdrukEmail, `New data uses to review`, html, false); + break; + } + } + } + + updateDataUseRegisterCounter(req, res) { + try { + const { id, counter } = req.body; + this.dataUseRegisterService.updateDataUseRegister(id, { counter }); + return res.status(200).json({ success: true }); + } catch (err) { + // Return error response if something goes wrong + logger.logError(err, logCategory); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } +} diff --git a/src/resources/dataUseRegister/dataUseRegister.entity.js b/src/resources/dataUseRegister/dataUseRegister.entity.js new file mode 100644 index 00000000..3af49d17 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.entity.js @@ -0,0 +1,10 @@ +import Entity from '../base/entity'; + +export default class DataUseRegisterClass extends Entity { + constructor(obj) { + super(); + if(!obj.id) obj.id = this.generateId(); + obj.type = 'dataUseRegister'; + Object.assign(this, obj); + } +} diff --git a/src/resources/dataUseRegister/dataUseRegister.model.js b/src/resources/dataUseRegister/dataUseRegister.model.js new file mode 100644 index 00000000..0db2f02e --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.model.js @@ -0,0 +1,124 @@ +import { model, Schema } from 'mongoose'; + +import DataUseRegisterClass from './dataUseRegister.entity'; +import constants from './../../resources/utilities/constants.util'; + +const dataUseRegisterSchema = new Schema( + { + id: { type: Number, required: true }, + type: { type: String, required: true }, + activeflag: { type: String, required: true, enum: Object.values(constants.dataUseRegisterStatus) }, + updatedon: Date, + counter: { type: Number, default: 0 }, + discourseTopicId: Number, + relatedObjects: [ + { + objectId: String, + reason: String, + objectType: String, + pid: String, + user: String, + updated: String, + }, + ], + keywords: [String], + manualUpload: Boolean, + + lastActivity: Date, + projectTitle: { type: String }, + projectId: { type: Schema.Types.ObjectId, ref: 'data_request' }, + projectIdText: String, //Project ID + datasetTitles: [{ type: String }], //Dataset Name(s) + gatewayDatasets: [{ type: String }], //Datasets on the Gateway + nonGatewayDatasets: [{ type: String }], //Dataset Name(s) + publisher: { type: Schema.Types.ObjectId, ref: 'Publisher', required: true }, + user: { type: Schema.Types.ObjectId, ref: 'User', required: true }, + organisationName: { type: String }, //Organisation Name + organisationId: { type: String }, //Organisation ID + organisationSector: String, //Organisation Sector + gatewayApplicants: [ + { + type: Schema.Types.ObjectId, + ref: 'User', + }, + ], + nonGatewayApplicants: [{ type: String }], //Applicant Name(s) + applicantId: String, //Applicant ID + fundersAndSponsors: [{ type: String }], // Funders/Sponsors + accreditedResearcherStatus: String, //Accredited Researcher Status + sublicenceArrangements: String, //Sub-Licence Arrangements (if any)? + laySummary: String, //Lay Summary + publicBenefitStatement: String, //Public Benefit Statement + requestCategoryType: String, //Request Category Type + technicalSummary: String, //Technical Summary + otherApprovalCommittees: [{ type: String }], //Other Approval Committees + projectStartDate: Date, //Project Start Date + projectEndDate: Date, //Project End Date + latestApprovalDate: Date, //Latest Approval Date + dataSensitivityLevel: String, //Data Sensitivity Level + legalBasisForDataArticle6: String, //Legal Basis For Provision Of Data (changed to 'Legal basis for provision of data under Article 6') + legalBasisForDataArticle9: String, //Added 'Lawful conditions for provision of data under Article 9' + dutyOfConfidentiality: String, //Common Law Duty Of Confidentiality + nationalDataOptOut: String, //National Data Opt-Out Applied + requestFrequency: String, //Request Frequency + datasetLinkageDescription: String, //Description Of How The Data Will Be Processed (changed to 'For linked datasets, specify how the linkage will take place') + confidentialDataDescription: String, //Description Of The Confidential Data Being Used + accessDate: Date, //Release/Access Date + accessType: String, //TRE Or Any Other Specified Location + privacyEnhancements: String, //How Has Data Been Processed To Enhance Privacy + gatewayOutputsTools: [{ type: Number }], //Link To Gateway Tool Research Outputs + gatewayOutputsPapers: [{ type: Number }], //Link To Gateway Paper Research Outputs + nonGatewayOutputs: [{ type: String }], //Link To NonGateway Research Outputs + rejectionReason: String, //Reason For Rejecting A Data Use Register + }, + { + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + strict: false, + } +); + +// Load entity class +dataUseRegisterSchema.loadClass(DataUseRegisterClass); + +dataUseRegisterSchema.virtual('publisherInfo', { + ref: 'Publisher', + foreignField: '_id', + localField: 'publisher', + justOne: true, +}); + +dataUseRegisterSchema.virtual('publisherDetails', { + ref: 'Publisher', + foreignField: '_id', + localField: 'publisher', + justOne: true, +}); + +dataUseRegisterSchema.virtual('applicantDetails', { + ref: 'User', + foreignField: '_id', + localField: 'gatewayApplicants', +}); + +dataUseRegisterSchema.virtual('gatewayDatasetsInfo', { + ref: 'Data', + foreignField: 'pid', + localField: 'gatewayDatasets', + options: { sort: { createdAt: -1 } }, +}); + +dataUseRegisterSchema.virtual('gatewayOutputsToolsInfo', { + ref: 'Data', + foreignField: 'id', + localField: 'gatewayOutputsTools', +}); + +dataUseRegisterSchema.virtual('gatewayOutputsPapersInfo', { + ref: 'Data', + foreignField: 'id', + localField: 'gatewayOutputsPapers', +}); + +export const DataUseRegister = model('DataUseRegister', dataUseRegisterSchema); diff --git a/src/resources/dataUseRegister/dataUseRegister.repository.js b/src/resources/dataUseRegister/dataUseRegister.repository.js new file mode 100644 index 00000000..ca74ed61 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.repository.js @@ -0,0 +1,135 @@ +import Repository from '../base/repository'; +import { DataUseRegister } from './dataUseRegister.model'; +import { isNil } from 'lodash'; + +export default class DataUseRegisterRepository extends Repository { + constructor() { + super(DataUseRegister); + this.dataUseRegister = DataUseRegister; + } + + getDataUseRegister(query, options) { + return this.findOne(query, options); + } + + async getDataUseRegisters(query, options = {}) { + if (options.aggregate) { + const searchTerm = (query && query['$and'] && query['$and'].find(exp => !isNil(exp['$text']))) || {}; + + if (searchTerm) { + query['$and'] = query['$and'].filter(exp => !exp['$text']); + } + + const aggregateQuery = [ + { $match: searchTerm }, + { + $lookup: { + from: 'publishers', + localField: 'publisher', + foreignField: '_id', + as: 'publisherDetails', + }, + }, + { + $lookup: { + from: 'tools', + let: { + listOfGatewayDatasets: '$gatewayDatasets', + }, + pipeline: [ + { + $match: { + $expr: { + $and: [ + { $in: ['$pid', '$$listOfGatewayDatasets'] }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $project: { pid: 1, name: 1 } }, + ], + as: 'gatewayDatasetsInfo', + }, + }, + { + $addFields: { + publisherInfo: { name: '$publisherDetails.name' }, + }, + }, + { $match: { $and: [...query['$and']] } }, + ]; + + if (query.fields) { + aggregateQuery.push({ + $project: query.fields.split(',').reduce((obj, key) => { + return { ...obj, [key]: 1 }; + }, {}), + }); + } + return DataUseRegister.aggregate(aggregateQuery); + } else { + const options = { lean: true }; + return this.find(query, options); + } + } + + getDataUseRegisterByApplicationId(applicationId) { + return this.dataUseRegister.findOne({ projectId: applicationId }, 'id').lean(); + } + + async updateDataUseRegister(id, body) { + body.updatedon = Date.now(); + body.lastActivity = Date.now(); + const updatedBody = await this.update(id, body); + return updatedBody; + } + + uploadDataUseRegisters(dataUseRegisters) { + return this.dataUseRegister.insertMany(dataUseRegisters); + } + + async createDataUseRegister(dataUseRegister) { + await this.linkRelatedDataUseRegisters(dataUseRegister); + return await this.create(dataUseRegister); + } + + async linkRelatedDataUseRegisters(dataUseRegister) { + const { relatedObjects = [], userName } = dataUseRegister; + const dataUseRegisterIds = relatedObjects.filter(el => el.objectType === 'dataUseRegister').map(el => el.objectId); + const relatedObject = { + objectId: dataUseRegister.id, + objectType: 'dataUseRegister', + user: userName, + updated: Date.now(), + isLocked: true, + reason: `This data use register was added automatically as it was derived from a newer approved version of the same data access request`, + }; + + await this.dataUseRegister.updateMany( + { id: { $in: dataUseRegisterIds } }, + { + $push: { + relatedObjects: relatedObject, + }, + } + ); + } + + async checkDataUseRegisterExists(projectIdText, projectTitle, organisationName, datasetTitles) { + const duplicatesFound = await this.dataUseRegister.countDocuments({ + $or: [ + { projectIdText }, + { + projectTitle, + organisationName, + datasetTitles, + }, + ], + }); + + return duplicatesFound > 0; + } +} diff --git a/src/resources/dataUseRegister/dataUseRegister.route.js b/src/resources/dataUseRegister/dataUseRegister.route.js new file mode 100644 index 00000000..5b63ad54 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.route.js @@ -0,0 +1,189 @@ +import express from 'express'; +import DataUseRegisterController from './dataUseRegister.controller'; + +import { dataUseRegisterService } from './dependency'; +import { activityLogService } from '../activitylog/dependency'; +import { logger } from '../utilities/logger'; +import passport from 'passport'; +import constants from './../utilities/constants.util'; +import { isEmpty, isNull, isEqual } from 'lodash'; + +const router = express.Router(); +const dataUseRegisterController = new DataUseRegisterController(dataUseRegisterService, activityLogService); +const logCategory = 'dataUseRegister'; + +function isUserMemberOfTeam(user, teamId) { + let { teams } = user; + return teams.filter(team => !isNull(team.publisher)).some(team => team.publisher._id.equals(teamId)); +} + +function isUserDataUseAdmin(user) { + let { teams } = user; + + if (teams) { + teams = teams.map(team => { + let { publisher, type, members } = team; + let member = members.find(member => { + return member.memberid.toString() === user._id.toString(); + }); + let { roles } = member; + return { ...publisher, type, roles }; + }); + } + + return teams + .filter(team => team.type === constants.teamTypes.ADMIN) + .some(team => team.roles.includes(constants.roleTypes.ADMIN_DATA_USE)); +} + +const validateUpdateRequest = (req, res, next) => { + const { id } = req.params; + + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a data user register identifier', + }); + } + + next(); +}; + +const validateUploadRequest = (req, res, next) => { + const { teamId, dataUses } = req.body; + let errors = []; + + if (!teamId) { + errors.push('You must provide the custodian team identifier to associate the data uses to'); + } + + if (!dataUses || isEmpty(dataUses)) { + errors.push('You must provide data uses to upload'); + } + + if (!isEmpty(errors)) { + return res.status(400).json({ + success: false, + message: errors.join(', '), + }); + } + + next(); +}; + +const authorizeUpdate = async (req, res, next) => { + const requestingUser = req.user; + const { id } = req.params; + const { projectIdText, datasetTitles } = req.body; + + const dataUseRegister = await dataUseRegisterService.getDataUseRegister(id); + + if (!dataUseRegister) { + return res.status(404).json({ + success: false, + message: 'The requested data use register entry could not be found', + }); + } + + const { publisher } = dataUseRegister; + const authorised = isUserDataUseAdmin(requestingUser) || isUserMemberOfTeam(requestingUser, publisher._id); + if (!authorised) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + if (!dataUseRegister.manualUpload) { + if (!isEqual(projectIdText, dataUseRegister.projectIdText)) + return res.status(401).json({ + success: false, + message: 'You are not authorised to update the project ID of an automatic data use register', + }); + + if (!isEqual(datasetTitles, dataUseRegister.datasetTitles)) + return res.status(401).json({ + success: false, + message: 'You are not authorised to update the datasets of an automatic data use register', + }); + } + + next(); +}; + +const authorizeUpload = async (req, res, next) => { + const requestingUser = req.user; + const { teamId } = req.body; + + const authorised = isUserDataUseAdmin(requestingUser) || isUserMemberOfTeam(requestingUser, teamId); + + if (!authorised) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + next(); +}; + +router.get('/search', logger.logRequestMiddleware({ logCategory, action: 'Search uploaded data uses' }), (req, res) => + dataUseRegisterController.searchDataUseRegisters(req, res) +); + +// @route GET /api/v2/data-use-registers/id +// @desc Returns a dataUseRegister based on dataUseRegister ID provided +// @access Public +router.get('/:id', logger.logRequestMiddleware({ logCategory, action: 'Viewed dataUseRegister data' }), (req, res) => + dataUseRegisterController.getDataUseRegister(req, res) +); + +// @route GET /api/v2/data-use-registers +// @desc Returns a collection of dataUseRegisters based on supplied query parameters +// @access Public +router.get( + '/', + passport.authenticate('jwt'), + logger.logRequestMiddleware({ logCategory, action: 'Viewed dataUseRegisters data' }), + (req, res) => dataUseRegisterController.getDataUseRegisters(req, res) +); + +// @route PATCH /api/v2/data-use-registers/counter +// @desc Updates the data use register counter for page views +// @access Public +router.patch('/counter', logger.logRequestMiddleware({ logCategory, action: 'Data use counter update' }), (req, res) => + dataUseRegisterController.updateDataUseRegisterCounter(req, res) +); + +// @route PATCH /api/v2/data-use-registers/id +// @desc Update the content of the data user register based on dataUseRegister ID provided +// @access Public +router.patch( + '/:id', + passport.authenticate('jwt'), + validateUpdateRequest, + authorizeUpdate, + logger.logRequestMiddleware({ logCategory, action: 'Updated dataUseRegister data' }), + (req, res) => dataUseRegisterController.updateDataUseRegister(req, res) +); + +// @route POST /api/v2/data-use-registers/check +// @desc Check the submitted data uses for duplicates and returns links to Gatway entities (datasets, users) +// @access Public +router.post('/check', passport.authenticate('jwt'), logger.logRequestMiddleware({ logCategory, action: 'Check data uses' }), (req, res) => + dataUseRegisterController.checkDataUseRegister(req, res) +); + +// @route POST /api/v2/data-use-registers/upload +// @desc Accepts a bulk upload of data uses with built-in duplicate checking and rejection +// @access Public +router.post( + '/upload', + passport.authenticate('jwt'), + validateUploadRequest, + authorizeUpload, + logger.logRequestMiddleware({ logCategory, action: 'Bulk uploaded data uses' }), + (req, res) => dataUseRegisterController.uploadDataUseRegisters(req, res) +); + +module.exports = router; diff --git a/src/resources/dataUseRegister/dataUseRegister.service.js b/src/resources/dataUseRegister/dataUseRegister.service.js new file mode 100644 index 00000000..982be6c5 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.service.js @@ -0,0 +1,549 @@ +/* eslint-disable class-methods-use-this */ +import dataUseRegisterUtil from './dataUseRegister.util'; +import DataUseRegister from './dataUseRegister.entity'; +import constants from '../utilities/constants.util'; +import { isEmpty, isNil, isEqual, isUndefined } from 'lodash'; +import moment from 'moment'; + +export default class DataUseRegisterService { + constructor(dataUseRegisterRepository) { + this.dataUseRegisterRepository = dataUseRegisterRepository; + } + + getDataUseRegister(id, query = {}, options = {}) { + // Protect for no id passed + if (!id) return; + + query = { ...query, id }; + return this.dataUseRegisterRepository.getDataUseRegister(query, options); + } + + getDataUseRegisters(query = {}, options = {}) { + return this.dataUseRegisterRepository.getDataUseRegisters(query, options); + } + + updateDataUseRegister(id, body = {}) { + // Protect for no id passed + if (!id) return; + + return this.dataUseRegisterRepository.updateDataUseRegister({ _id: id }, body); + } + + /** + * Upload Data Use Registers + * + * @desc Accepts multiple data uses to upload and a team identifier indicating which Custodian team to add the data uses to. + * + * @param {String} teamId Array of data use objects to filter until uniqueness exists + * @param {Array} dataUseUploads Array of data use objects to filter until uniqueness exists + * @returns {Object} Object containing the details of the upload operation including number of duplicates found in payload, database and number successfully added + */ + async uploadDataUseRegisters(creatorUser, teamId, dataUseRegisterUploads = []) { + const dedupedDataUseRegisters = this.filterDuplicateDataUseRegisters(dataUseRegisterUploads); + + const dataUseRegisters = await dataUseRegisterUtil.buildDataUseRegisters(creatorUser, teamId, dedupedDataUseRegisters); + + const newDataUseRegisters = await this.filterExistingDataUseRegisters(dataUseRegisters); + + const uploadedDataUseRegisters = await this.dataUseRegisterRepository.uploadDataUseRegisters(newDataUseRegisters); + + return { + uploadedCount: uploadedDataUseRegisters.length, + duplicateCount: dataUseRegisterUploads.length - newDataUseRegisters.length, + uploaded: uploadedDataUseRegisters, + }; + } + + /** + * Filter Duplicate Data Uses + * + * @desc Accepts multiple data uses and outputs a unique list of data uses based on each entities properties. + * A duplicate project id is automatically indicates a duplicate entry as the id must be unique. + * Alternatively, a combination of matching title, summary, organisation name, dataset titles and latest approval date indicates a duplicate entry. + * @param {Array} dataUses Array of data use objects to filter until uniqueness exists + * @returns {Array} Filtered array of data uses assumed unique based on filter criteria + */ + filterDuplicateDataUseRegisters(dataUses) { + return dataUses.reduce((arr, dataUse) => { + const isDuplicate = arr.some( + el => + el.projectIdText === dataUse.projectIdText || + (el.projectTitle === dataUse.projectTitle && + el.organisationName === dataUse.organisationName && + el.datasetTitles === dataUse.datasetTitles) + ); + if (!isDuplicate) arr = [...arr, dataUse]; + return arr; + }, []); + } + + /** + * Filter Existing Data Uses + * + * @desc Accepts multiple data uses, verifying each in turn is considered 'new' to the database, then outputs the list of data uses. + * A duplicate project id is automatically indicates a duplicate entry as the id must be unique. + * Alternatively, a combination of matching title, summary, organisation name and dataset titles indicates a duplicate entry. + * @param {Array} dataUses Array of data use objects to iterate through and check for existence in database + * @returns {Array} Filtered array of data uses assumed to be 'new' to the database based on filter criteria + */ + async filterExistingDataUseRegisters(dataUses) { + const newDataUses = []; + + for (const dataUse of dataUses) { + const { linkedDatasets = [], namedDatasets = [] } = await dataUseRegisterUtil.getLinkedDatasets( + dataUse.datasetNames && + dataUse.datasetNames + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + const datasetTitles = [...linkedDatasets.map(dataset => dataset.name), ...namedDatasets]; + + const { projectIdText, projectTitle, organisationName } = dataUse; + + const exists = await this.dataUseRegisterRepository.checkDataUseRegisterExists( + projectIdText, + projectTitle, + organisationName, + datasetTitles + ); + if (exists === false) newDataUses.push(dataUse); + } + + return newDataUses; + } + + /** + * Filter Existing Data Uses + * + * @desc Accepts multiple data uses, verifying each in turn is considered 'new' to the database, then outputs the list of data uses. + * A duplicate project id is automatically indicates a duplicate entry as the id must be unique. + * Alternatively, a combination of matching title, summary, organisation name and dataset titles indicates a duplicate entry. + * @param {Array} dataUses Array of data use objects to iterate through and check for existence in database + * @returns {Array} Filtered array of data uses linked entites and flat to indicates a duplicate entry + */ + async checkDataUseRegisters(dataUses = []) { + const dataUsesChecks = []; + + for (const obj of dataUses) { + const { linkedDatasets = [], namedDatasets = [] } = await dataUseRegisterUtil.getLinkedDatasets( + obj.datasetNames && + obj.datasetNames + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + const { gatewayApplicants, nonGatewayApplicants } = await dataUseRegisterUtil.getLinkedApplicants( + obj.applicantNames && + obj.applicantNames + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + const { gatewayOutputsTools, gatewayOutputsPapers, nonGatewayOutputs } = await dataUseRegisterUtil.getLinkedOutputs( + obj.researchOutputs && + obj.researchOutputs + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + const { projectIdText, projectTitle, organisationName } = obj; + const datasetTitles = [...linkedDatasets.map(dataset => dataset.name), ...namedDatasets]; + + const exists = await this.dataUseRegisterRepository.checkDataUseRegisterExists( + projectIdText, + projectTitle, + organisationName, + datasetTitles + ); + + //Add new data use with linked entities + dataUsesChecks.push({ + projectIdText: obj.projectIdText, + projectTitle: obj.projectTitle, + laySummary: obj.laySummary, + organisationName: obj.organisationName, + datasetTitles: obj.datasetTitles, + latestApprovalDate: obj.latestApprovalDate, + linkedDatasets, + namedDatasets, + gatewayApplicants, + nonGatewayApplicants, + gatewayOutputsTools, + gatewayOutputsPapers, + nonGatewayOutputs, + isDuplicated: exists, + }); + } + + return dataUsesChecks; + } + + /* Create Data Use Register + * + * @desc Accepts a single data access request record and automatically generates a data use register record in the 'inReview' state. + * Related resources, project Id, origin, uploader, applicant names, and answers are determined from the application provided. + * @param {Object} accessRecord Data access request model used to create the data use register + * @returns {Object} Returns the saved data use register + */ + async createDataUseRegister(creatorUser, accessRecord) { + const { + _id: applicationId, + projectId, + publisherObj: { _id: publisher }, + datasets, + authors, + mainApplicant, + dateFinalStatus, + questionAnswers, + versionTree, + questionAnswers: { + safepeopleprimaryapplicantorganisationname: organisationName, + safepeopleprimaryapplicantorcid: applicantId, + safeprojectprojectdetailstitle: projectTitle, + safepeopleprimaryapplicantaccreditedresearcher: accreditedResearcherStatus, + safeprojectprojectdetailslaysummary: laySummary, + safeprojectprojectdetailspublicbenefitimpact: publicBenefitStatement, + safeprojectprojectdetailsresearchprojectsummarykeywords: keywords, + ['safeproject-projectdetails-startdate']: startDate, + ['safeproject-projectdetails-enddate']: endDate, + safedatastorageandprocessingaccessmethodtrustedresearchenvironment: accessType, + safedataconfidentialityavenuelegalbasisconfidentialinformation: dutyOfConfidentiality, + safedataotherdatasetslinkadditionaldatasetslinkagedetails: datasetLinkageDetails = '', + safedataotherdatasetsrisksmitigations: datasetLinkageRiskMitigation = '', + safedatalawfulbasisgdprarticle6basis: legalBasisForDataArticle6, + safedatalawfulbasisgdprarticle9conditions: legalBasisForDataArticle9, + safedatadatafieldsdatarefreshrequired: dataRefreshRequired = '', + safeoutputsoutputsdisseminationplansdisclosurecontrolpolicy: privacyEnhancements, + }, + } = accessRecord; + + const fundersAndSponsors = dataUseRegisterUtil.extractFundersAndSponsors(questionAnswers); + const { gatewayApplicants = [], nonGatewayApplicants = [] } = dataUseRegisterUtil.extractFormApplicants( + [...authors, mainApplicant], + questionAnswers + ); + const { linkedDatasets = [], namedDatasets = [] } = await dataUseRegisterUtil.getLinkedDatasets([ + ...datasets.map(dataset => dataset.name), + ]); + const datasetTitles = [...linkedDatasets.map(dataset => dataset.name), ...namedDatasets]; + const relatedDatasets = dataUseRegisterUtil.buildRelatedObjects(creatorUser, 'dataset', datasets, false); + const relatedApplications = await this.buildRelatedDataUseRegisters(creatorUser, versionTree, applicationId); + const datasetLinkageDescription = `${datasetLinkageDetails.toString().trim()} ${datasetLinkageRiskMitigation.toString().trim()}`; + const requestFrequency = dataRefreshRequired === 'Yes' ? 'Recurring' : dataRefreshRequired === 'No' ? 'One-off' : ''; + + const projectStartDate = moment(startDate, 'DD/MM/YYYY'); + const projectEndDate = moment(endDate, 'DD/MM/YYYY'); + const latestApprovalDate = moment(dateFinalStatus); + + const dataUseRegister = new DataUseRegister({ + publisher, + projectIdText: projectId, + projectId: applicationId, + applicantId: applicantId ? applicantId.trim() : '', + accreditedResearcherStatus: isNil(accreditedResearcherStatus) ? 'Unknown' : accreditedResearcherStatus.toString().trim(), + ...(projectTitle && { projectTitle: projectTitle.toString().trim() }), + ...(organisationName && { organisationName: organisationName.toString().trim() }), + ...(laySummary && { laySummary: laySummary.toString().trim() }), + ...(publicBenefitStatement && { publicBenefitStatement: publicBenefitStatement.toString().trim() }), + ...(accessType && { accessType: accessType.toString().trim() }), + ...(dutyOfConfidentiality && { dutyOfConfidentiality: dutyOfConfidentiality.toString().trim() }), + ...(!isEmpty(datasetLinkageDescription) && { datasetLinkageDescription: datasetLinkageDescription.trim() }), + ...(!isEmpty(requestFrequency) && { requestFrequency }), + ...(legalBasisForDataArticle6 && { legalBasisForDataArticle6: legalBasisForDataArticle6.toString().trim() }), + ...(legalBasisForDataArticle9 && { legalBasisForDataArticle9: legalBasisForDataArticle9.toString().trim() }), + ...(privacyEnhancements && { privacyEnhancements: privacyEnhancements.toString().trim() }), + ...(projectStartDate.isValid() && { projectStartDate }), + ...(projectEndDate.isValid() && { projectEndDate }), + ...(latestApprovalDate.isValid() && { latestApprovalDate }), + ...(!isEmpty(datasetTitles) && { datasetTitles }), + ...(!isEmpty(linkedDatasets) && { gatewayDatasets: linkedDatasets.map(dataset => dataset.pid) }), + ...(!isEmpty(namedDatasets) && { nonGatewayDatasets: namedDatasets }), + keywords: isNil(keywords) || isEmpty(keywords) ? [] : keywords.split(' ').slice(0, 6), + fundersAndSponsors, + gatewayApplicants, + nonGatewayApplicants, + relatedObjects: [...relatedDatasets, ...relatedApplications], + activeflag: 'active', + user: creatorUser._id, + userName: `${creatorUser.firstname} ${creatorUser.lastname}`, + updatedon: Date.now(), + lastActivity: Date.now(), + manualUpload: false, + }); + + this.dataUseRegisterRepository.createDataUseRegister(dataUseRegister); + } + + /** + * Build Related Data Use Registers + * + * @desc Accepts the requesting user, an application identifier and the same application's version tree. + * The function uses this information to extract related applications versions which will have data use registers already in existence. + * Upon finding related data use registers, related objects are created and returned. + * @param {Object} creatorUser The requesting user calling this function + * @param {Object} versionTree An object data structure containing the linkages from this application to other versions of the application + * @returns {Array} Returns an array of related objects which are of the data use register type + */ + async buildRelatedDataUseRegisters(creatorUser, versionTree, applicationId) { + const relatedDataUseRegisters = []; + const { firstname, lastname } = creatorUser; + const ignoredApplicationTypes = [constants.submissionTypes.INPROGRESS, constants.submissionTypes.RESUBMISSION]; + + for (const key of Object.keys(versionTree)) { + if ( + versionTree[key].applicationType && + !ignoredApplicationTypes.includes(versionTree[key].applicationType) && + versionTree[key].toString() !== applicationId.toString() + ) { + const { applicationId } = versionTree[key]; + const dataUseRegister = await this.dataUseRegisterRepository.getDataUseRegisterByApplicationId(applicationId); + + if (dataUseRegister) { + relatedDataUseRegisters.push({ + objectId: dataUseRegister.id, + objectType: 'dataUseRegister', + user: `${firstname} ${lastname}`, + updated: Date.now(), + isLocked: true, + reason: `This data use register was added automatically as it was derived from a previously approved version of the same data access request`, + }); + } + } + } + + return relatedDataUseRegisters; + } + + async buildUpdateObject(dataUseRegister, dataUseRegisterPayload, user) { + let updateObj = {}; + + const { + activeflag, + rejectionReason, + discourseTopicId, + relatedObjects, + keywords, + projectTitle, + projectId, + projectIdText, + datasetTitles, + gatewayDatasets, + nonGatewayDatasets, + organisationName, + organisationId, + organisationSector, + gatewayApplicants, + nonGatewayApplicants, + applicantId, + fundersAndSponsors, + accreditedResearcherStatus, + sublicenceArrangements, + laySummary, + publicBenefitStatement, + requestCategoryType, + technicalSummary, + otherApprovalCommittees, + projectStartDate, + projectEndDate, + latestApprovalDate, + dataSensitivityLevel, + legalBasisForDataArticle6, + legalBasisForDataArticle9, + dutyOfConfidentiality, + nationalDataOptOut, + requestFrequency, + datasetLinkageDescription, + confidentialDataDescription, + accessDate, + accessType, + privacyEnhancements, + gatewayOutputs, + nonGatewayOutputs, + } = dataUseRegisterPayload; + + const gatewayDatasetPids = await dataUseRegisterUtil.getDatasetsByPids(gatewayDatasets); + const gatewayApplicantIDs = await dataUseRegisterUtil.getAppplicantByIds(gatewayApplicants); + const { gatewayToolIDs, gatewayPaperIDs } = await dataUseRegisterUtil.getSafeOutputsByIds(gatewayOutputs || []); + + let gatewayApplicantIDsList = []; + gatewayApplicantIDs.forEach(applicant => { + gatewayApplicantIDsList.push(applicant._id); + }); + if (!isUndefined(gatewayApplicants) && !isEqual(gatewayApplicantIDsList, dataUseRegister.gatewayApplicants)) + updateObj.gatewayApplicants = gatewayApplicantIDsList; + + let gatewayOutputsToolIDsList = [], + gatewayOutputsToolIDsListRelatedResource = []; + gatewayToolIDs.forEach(tool => { + gatewayOutputsToolIDsList.push(tool.id); + gatewayOutputsToolIDsListRelatedResource.push({ id: tool.id.toString() }); + }); + if (!isUndefined(gatewayOutputs) && !isEqual(gatewayOutputsToolIDsList, dataUseRegister.gatewayOutputsTools)) + updateObj.gatewayOutputsTools = gatewayOutputsToolIDsList; + + let gatewayOutputsPaperIDsList = [], + gatewayOutputsPaperIDsListRelatedResource = []; + gatewayPaperIDs.forEach(paper => { + gatewayOutputsPaperIDsList.push(paper.id); + gatewayOutputsPaperIDsListRelatedResource.push({ id: paper.id.toString() }); + }); + if (!isUndefined(gatewayOutputs) && !isEqual(gatewayOutputsPaperIDsList, dataUseRegister.gatewayOutputsPapers)) + updateObj.gatewayOutputsPapers = gatewayOutputsPaperIDsList; + + let gatewayDatasetPidsListRelatedResource = []; + gatewayDatasetPids.forEach(dataset => { + gatewayDatasetPidsListRelatedResource.push({ id: dataset.datasetid, pid: dataset.pid }); + }); + + let automaticRelatedResources = [ + ...dataUseRegisterUtil.buildRelatedObjects(user, 'dataset', gatewayDatasetPidsListRelatedResource, false, true), + ...dataUseRegisterUtil.buildRelatedObjects(user, 'tool', gatewayOutputsToolIDsListRelatedResource, false, true), + ...dataUseRegisterUtil.buildRelatedObjects(user, 'paper', gatewayOutputsPaperIDsListRelatedResource, false, true), + ]; + + //dataUseRegister.relatedObjects + + //Loop through automaticRelatedResources to see if it exists, if not add to another array + + let newAutomaticRelatedResources = []; + automaticRelatedResources.forEach(automaticResource => { + if (!dataUseRegister.relatedObjects.find(resource => resource.objectId === automaticResource.objectId)) { + newAutomaticRelatedResources.push(automaticResource); + } + }); + + let newManualRelatedResources = []; + relatedObjects.forEach(manualResource => { + if (!dataUseRegister.relatedObjects.find(resource => resource.objectId === manualResource.objectId)) { + if (!manualResource.isLocked) newManualRelatedResources.push(manualResource); + } + }); + + let relatedResourcesWithRemovedOldAutomaticEntries = []; + dataUseRegister.relatedObjects.forEach(resource => { + if (resource.isLocked && automaticRelatedResources.find(automaticResource => automaticResource.objectId === resource.objectId)) { + relatedResourcesWithRemovedOldAutomaticEntries.push(resource); + } else if (!resource.isLocked) { + relatedResourcesWithRemovedOldAutomaticEntries.push(resource); + } + }); + + //relatedObjects + + updateObj.relatedObjects = [ + ...relatedResourcesWithRemovedOldAutomaticEntries, + ...newAutomaticRelatedResources, + ...newManualRelatedResources, + ]; + + const fundersAndSponsorsList = + fundersAndSponsors && + fundersAndSponsors + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }); + if (!isEmpty(fundersAndSponsorsList) && !isEqual(fundersAndSponsorsList, dataUseRegister.fundersAndSponsors)) + updateObj.fundersAndSponsors = fundersAndSponsorsList; + + const otherApprovalCommitteesList = + otherApprovalCommittees && + otherApprovalCommittees + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }); + if (!isEmpty(otherApprovalCommitteesList) && !isEqual(otherApprovalCommitteesList, dataUseRegister.otherApprovalCommittees)) + updateObj.otherApprovalCommittees = otherApprovalCommitteesList; + + if (!isUndefined(activeflag) && !isEqual(activeflag, dataUseRegister.activeflag)) updateObj.activeflag = activeflag; + if (!isUndefined(rejectionReason) && !isEqual(rejectionReason, dataUseRegister.rejectionReason)) + updateObj.rejectionReason = rejectionReason; + if (!isUndefined(discourseTopicId) && !isEqual(discourseTopicId, dataUseRegister.discourseTopicId)) + updateObj.discourseTopicId = discourseTopicId; + if (!isUndefined(keywords) && !isEqual(keywords, dataUseRegister.keywords)) updateObj.keywords = keywords; + if (!isUndefined(projectTitle) && !isEqual(projectTitle, dataUseRegister.projectTitle)) updateObj.projectTitle = projectTitle; + if (!isUndefined(projectId) && !isEqual(projectId, dataUseRegister.projectId)) updateObj.projectId = projectId; + if (!isUndefined(projectIdText) && !isEqual(projectIdText, dataUseRegister.projectIdText)) updateObj.projectIdText = projectIdText; + if (!isUndefined(datasetTitles) && !isEqual(datasetTitles, dataUseRegister.datasetTitles)) updateObj.datasetTitles = datasetTitles; + if (!isUndefined(gatewayDatasets) && !isEqual(gatewayDatasets, dataUseRegister.gatewayDatasets)) + updateObj.gatewayDatasets = gatewayDatasets; + if (!isUndefined(nonGatewayDatasets) && !isEqual(nonGatewayDatasets, dataUseRegister.nonGatewayDatasets)) + updateObj.nonGatewayDatasets = nonGatewayDatasets; + if (!isUndefined(projectTitle) && !isEqual(projectTitle, dataUseRegister.projectTitle)) updateObj.projectTitle = projectTitle; + if (!isUndefined(organisationName) && !isEqual(organisationName, dataUseRegister.organisationName)) + updateObj.organisationName = organisationName; + if (!isUndefined(organisationId) && !isEqual(organisationId, dataUseRegister.organisationId)) updateObj.organisationId = organisationId; + if (!isUndefined(organisationSector) && !isEqual(organisationSector, dataUseRegister.organisationSector)) + updateObj.organisationSector = organisationSector; + if (!isUndefined(nonGatewayApplicants) && !isEqual(nonGatewayApplicants, dataUseRegister.nonGatewayApplicants)) + updateObj.nonGatewayApplicants = nonGatewayApplicants; + if (!isUndefined(applicantId) && !isEqual(applicantId, dataUseRegister.applicantId)) updateObj.applicantId = applicantId; + if (!isUndefined(accreditedResearcherStatus) && !isEqual(accreditedResearcherStatus, dataUseRegister.accreditedResearcherStatus)) + updateObj.accreditedResearcherStatus = accreditedResearcherStatus; + if (!isUndefined(sublicenceArrangements) && !isEqual(sublicenceArrangements, dataUseRegister.sublicenceArrangements)) + updateObj.sublicenceArrangements = sublicenceArrangements; + if (!isUndefined(laySummary) && !isEqual(laySummary, dataUseRegister.laySummary)) updateObj.laySummary = laySummary; + if (!isUndefined(publicBenefitStatement) && !isEqual(publicBenefitStatement, dataUseRegister.publicBenefitStatement)) + updateObj.publicBenefitStatement = publicBenefitStatement; + if (!isUndefined(requestCategoryType) && !isEqual(requestCategoryType, dataUseRegister.requestCategoryType)) + updateObj.requestCategoryType = requestCategoryType; + if (!isUndefined(technicalSummary) && !isEqual(technicalSummary, dataUseRegister.technicalSummary)) + updateObj.technicalSummary = technicalSummary; + if ( + !isEmpty(projectStartDate) && + !isEqual(moment(projectStartDate).format('YYYY-MM-DD'), moment(dataUseRegister.projectStartDate).format('YYYY-MM-DD')) + ) + updateObj.projectStartDate = moment(projectStartDate, 'YYYY-MM-DD'); + if ( + !isEmpty(projectEndDate) && + !isEqual(moment(projectEndDate).format('YYYY-MM-DD'), moment(dataUseRegister.projectEndDate).format('YYYY-MM-DD')) + ) + updateObj.projectEndDate = moment(projectEndDate, 'YYYY-MM-DD'); + if ( + !isEmpty(latestApprovalDate) && + !isEqual(moment(latestApprovalDate).format('YYYY-MM-DD'), moment(dataUseRegister.latestApprovalDate).format('YYYY-MM-DD')) + ) + updateObj.projectStartDate = moment(latestApprovalDate, 'YYYY-MM-DD'); + if (!isUndefined(dataSensitivityLevel) && !isEqual(dataSensitivityLevel, dataUseRegister.dataSensitivityLevel)) + updateObj.dataSensitivityLevel = dataSensitivityLevel; + if (!isUndefined(legalBasisForDataArticle6) && !isEqual(legalBasisForDataArticle6, dataUseRegister.legalBasisForDataArticle6)) + updateObj.legalBasisForDataArticle6 = legalBasisForDataArticle6; + if (!isUndefined(legalBasisForDataArticle9) && !isEqual(legalBasisForDataArticle9, dataUseRegister.legalBasisForDataArticle9)) + updateObj.legalBasisForDataArticle9 = legalBasisForDataArticle9; + if (!isUndefined(dutyOfConfidentiality) && !isEqual(dutyOfConfidentiality, dataUseRegister.dutyOfConfidentiality)) + updateObj.dutyOfConfidentiality = dutyOfConfidentiality; + if (!isUndefined(nationalDataOptOut) && !isEqual(nationalDataOptOut, dataUseRegister.nationalDataOptOut)) + updateObj.nationalDataOptOut = nationalDataOptOut; + if (!isUndefined(requestFrequency) && !isEqual(requestFrequency, dataUseRegister.requestFrequency)) + updateObj.requestFrequency = requestFrequency; + if (!isUndefined(datasetLinkageDescription) && !isEqual(datasetLinkageDescription, dataUseRegister.datasetLinkageDescription)) + updateObj.datasetLinkageDescription = datasetLinkageDescription; + if (!isUndefined(confidentialDataDescription) && !isEqual(confidentialDataDescription, dataUseRegister.confidentialDataDescription)) + updateObj.confidentialDataDescription = confidentialDataDescription; + if (!isEmpty(accessDate) && !isEqual(moment(accessDate).format('YYYY-MM-DD'), moment(dataUseRegister.accessDate).format('YYYY-MM-DD'))) + updateObj.accessDate = moment(accessDate, 'YYYY-MM-DD'); + if (!isUndefined(accessType) && !isEqual(accessType, dataUseRegister.accessType)) updateObj.accessType = accessType; + if (!isUndefined(privacyEnhancements) && !isEqual(privacyEnhancements, dataUseRegister.privacyEnhancements)) + updateObj.privacyEnhancements = privacyEnhancements; + if (!isUndefined(nonGatewayOutputs) && !isEqual(nonGatewayOutputs, dataUseRegister.nonGatewayOutputs)) + updateObj.nonGatewayOutputs = nonGatewayOutputs; + + return updateObj; + } +} diff --git a/src/resources/dataUseRegister/dataUseRegister.util.js b/src/resources/dataUseRegister/dataUseRegister.util.js new file mode 100644 index 00000000..19891239 --- /dev/null +++ b/src/resources/dataUseRegister/dataUseRegister.util.js @@ -0,0 +1,349 @@ +import moment from 'moment'; +import { isEmpty } from 'lodash'; +import DataUseRegister from './dataUseRegister.entity'; +import { getUsersByIds } from '../user/user.repository'; +import { toolService } from '../tool/v2/dependency'; +import { paperService } from '../paper/dependency'; +import { datasetService } from '../dataset/dependency'; + +/** + * Build Data Use Registers + * + * @desc Accepts a creator user object, the custodian/publisher team identifier to create the data use registers against and an array of data use POJOs to map to data use models. + * The function drops out invalid dates, empty fields and removes white space from all strings before constructing the model instances. + * @param {String} creatorUser User object from the authenticated request who is creating the data use registers + * @param {String} teamId Custodian/publisher team identifier to identify who to create the data uses against + * @param {String} dataUses Array of data use register shaped POJOs to map to data use models + * @returns {Array} Array of data use register models + */ +const buildDataUseRegisters = async (creatorUser, teamId, dataUses = []) => { + const dataUseRegisters = []; + + for (const obj of dataUses) { + // Handle dataset linkages + const { linkedDatasets = [], namedDatasets = [] } = await getLinkedDatasets( + obj.datasetNames && + obj.datasetNames + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + const datasetTitles = [...linkedDatasets.map(dataset => dataset.name), ...namedDatasets]; + + // Handle applicant linkages + const { gatewayApplicants, nonGatewayApplicants } = await getLinkedApplicants( + obj.applicantNames && + obj.applicantNames + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + const { gatewayOutputsTools, gatewayOutputsPapers, nonGatewayOutputs } = await getLinkedOutputs( + obj.researchOutputs && + obj.researchOutputs + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }) + ); + + // Create related objects + let relatedObjects = [ + ...buildRelatedObjects(creatorUser, 'dataset', linkedDatasets), + ...buildRelatedObjects(creatorUser, 'tool', gatewayOutputsTools), + ...buildRelatedObjects(creatorUser, 'paper', gatewayOutputsPapers), + ]; + + // Handle comma separated fields + const fundersAndSponsors = + obj.fundersAndSponsors && + obj.fundersAndSponsors + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }); + const otherApprovalCommittees = + obj.otherApprovalCommittees && + obj.otherApprovalCommittees + .toString() + .split(',') + .map(el => { + if (!isEmpty(el)) return el.trim(); + }); + + // Handle expected dates + const projectStartDate = moment(obj.projectStartDate, 'YYYY-MM-DD'); + const projectEndDate = moment(obj.projectEndDate, 'YYYY-MM-DD'); + const latestApprovalDate = moment(obj.latestApprovalDate, 'YYYY-MM-DD'); + const accessDate = moment(obj.accessDate, 'YYYY-MM-DD'); + + // Clean and assign to model + dataUseRegisters.push( + new DataUseRegister({ + ...(obj.projectTitle && { projectTitle: obj.projectTitle.toString().trim() }), + ...(obj.projectIdText && { projectIdText: obj.projectIdText.toString().trim() }), + ...(obj.organisationName && { organisationName: obj.organisationName.toString().trim() }), + ...(obj.organisationId && { organisationId: obj.organisationId.toString().trim() }), + ...(obj.organisationSector && { organisationSector: obj.organisationSector.toString().trim() }), + ...(obj.applicantId && { applicantId: obj.applicantId.toString().trim() }), + ...(obj.accreditedResearcherStatus && { accreditedResearcherStatus: obj.accreditedResearcherStatus.toString().trim() }), + ...(obj.sublicenceArrangements && { sublicenceArrangements: obj.sublicenceArrangements.toString().trim() }), + ...(obj.laySummary && { laySummary: obj.laySummary.toString().trim() }), + ...(obj.publicBenefitStatement && { publicBenefitStatement: obj.publicBenefitStatement.toString().trim() }), + ...(obj.requestCategoryType && { requestCategoryType: obj.requestCategoryType.toString().trim() }), + ...(obj.technicalSummary && { technicalSummary: obj.technicalSummary.toString().trim() }), + ...(obj.dataSensitivityLevel && { dataSensitivityLevel: obj.dataSensitivityLevel.toString().trim() }), + ...(obj.legalBasisForDataArticle6 && { legalBasisForDataArticle6: obj.legalBasisForDataArticle6.toString().trim() }), + ...(obj.legalBasisForDataArticle9 && { legalBasisForDataArticle9: obj.legalBasisForDataArticle9.toString().trim() }), + ...(obj.nationalDataOptOut && { nationalDataOptOut: obj.nationalDataOptOut.toString().trim() }), + ...(obj.requestFrequency && { requestFrequency: obj.requestFrequency.toString().trim() }), + ...(obj.datasetLinkageDescription && { datasetLinkageDescription: obj.datasetLinkageDescription.toString().trim() }), + ...(obj.confidentialDataDescription && { confidentialDataDescription: obj.confidentialDataDescription.toString().trim() }), + ...(obj.accessType && { accessType: obj.accessType.toString().trim() }), + ...(obj.privacyEnhancements && { privacyEnhancements: obj.privacyEnhancements.toString().trim() }), + ...(obj.dutyOfConfidentiality && { dutyOfConfidentiality: obj.dutyOfConfidentiality.toString().trim() }), + ...(projectStartDate.isValid() && { projectStartDate }), + ...(projectEndDate.isValid() && { projectEndDate }), + ...(latestApprovalDate.isValid() && { latestApprovalDate }), + ...(accessDate.isValid() && { accessDate }), + ...(!isEmpty(datasetTitles) && { datasetTitles }), + ...(!isEmpty(linkedDatasets) && { gatewayDatasets: linkedDatasets.map(dataset => dataset.pid) }), + ...(!isEmpty(namedDatasets) && { nonGatewayDatasets: namedDatasets }), + ...(!isEmpty(gatewayApplicants) && { gatewayApplicants: gatewayApplicants.map(gatewayApplicant => gatewayApplicant._id) }), + ...(!isEmpty(nonGatewayApplicants) && { nonGatewayApplicants }), + ...(!isEmpty(fundersAndSponsors) && { fundersAndSponsors }), + ...(!isEmpty(gatewayOutputsTools) && { gatewayOutputsTools: gatewayOutputsTools.map(tool => tool.id) }), + ...(!isEmpty(gatewayOutputsPapers) && { gatewayOutputsPapers: gatewayOutputsPapers.map(paper => paper.id) }), + ...(!isEmpty(nonGatewayOutputs) && { nonGatewayOutputs: nonGatewayOutputs }), + ...(!isEmpty(otherApprovalCommittees) && { otherApprovalCommittees }), + ...(!isEmpty(relatedObjects) && { relatedObjects }), + activeflag: 'inReview', + publisher: teamId, + user: creatorUser._id, + updatedon: Date.now(), + lastActivity: Date.now(), + manualUpload: true, + }) + ); + } + + return dataUseRegisters; +}; + +/** + * Get Linked Datasets + * + * @desc Accepts a comma separated string containing dataset names which can be in the form of text based names or URLs belonging to the Gateway which resolve to a dataset page, or a mix of both. + * The function separates URLs and uses regex to locate a suspected dataset PID to use in a search against the Gateway database. If a match is found, the entry is considered a linked dataset. + * Entries which cannot be matched are returned as named datasets. + * @param {String} datasetNames A comma separated string representation of the dataset names to attempt to find and link to existing Gateway datasets + * @returns {Object} An object containing linked and named datasets in separate arrays + */ +const getLinkedDatasets = async (datasetNames = []) => { + const unverifiedDatasetPids = []; + const namedDatasets = []; + const validLinkRegexp = new RegExp(`^${process.env.homeURL}\/dataset\/([a-f|\\d|-]+)\/?$`, 'i'); + + for (const datasetName of datasetNames) { + const [, datasetPid] = validLinkRegexp.exec(datasetName) || []; + if (datasetPid) { + unverifiedDatasetPids.push(datasetPid); + } else { + let foundDataset = await datasetService.getDatasetsByName(datasetName); + if (foundDataset) { + unverifiedDatasetPids.push(foundDataset.pid); + } else { + namedDatasets.push(datasetName); + } + } + } + + const linkedDatasets = isEmpty(unverifiedDatasetPids) + ? [] + : (await datasetService.getDatasetsByPids(unverifiedDatasetPids)).map(dataset => { + return { id: dataset.datasetid, name: dataset.name, pid: dataset.pid }; + }); + + return { linkedDatasets, namedDatasets }; +}; + +/** + * Get Linked Applicants + * + * @desc Accepts a comma separated string containing applicant names which can be in the form of text based names or URLs belonging to the Gateway which resolve to a users profile page, or a mix of both. + * The function separates URLs and uses regex to locate a suspected user ID to use in a search against the Gateway database. If a match is found, the entry is considered a Gateway applicant. + * Entries which cannot be matched are returned as non Gateway applicants. Failed attempts at adding URLs which do not resolve are excluded. + * @param {String} datasetNames A comma separated string representation of the applicant(s) names to attempt to find and link to existing Gateway users + * @returns {Object} An object containing Gateway applicants and non Gateway applicants in separate arrays + */ +const getLinkedApplicants = async (applicantNames = []) => { + const unverifiedUserIds = []; + const nonGatewayApplicants = []; + const validLinkRegexp = new RegExp(`^${process.env.homeURL}\/person\/(\\d+)\/?$`, 'i'); + + for (const applicantName of applicantNames) { + const [, userId] = validLinkRegexp.exec(applicantName) || []; + if (userId) { + unverifiedUserIds.push(userId); + } else { + nonGatewayApplicants.push(applicantName); + } + } + + const gatewayApplicants = isEmpty(unverifiedUserIds) + ? [] + : (await getUsersByIds(unverifiedUserIds)).map(el => { + return { _id: el._id, id: el.id, firstname: el.firstname, lastname: el.lastname }; + }); + + return { gatewayApplicants, nonGatewayApplicants }; +}; + +/** + * Get Linked Outputs + * + * @desc Accepts a comma separated string containing tools or papers which can be in the form of text based names or URLs belonging to the Gateway which resolve to a users profile page, or a mix of both. + * The function separates URLs and uses regex to locate a suspected user ID to use in a search against the Gateway database. If a match is found, the entry is considered a Gateway tool or paper. + * Entries which cannot be matched are returned as non Gateway tools or papers. Failed attempts at adding URLs which do not resolve are excluded. + * @param {String} outputs A comma separated string representation of the tools or papers names to attempt to find and link to existing Gateway tools or papers + * @returns {Object} An object containing Gateway tools or papers and non Gateway tools or papers in separate arrays + */ +const getLinkedOutputs = async (outputs = []) => { + const unverifiedOutputsToolIds = [], + unverifiedOutputsPaperIds = [], + nonGatewayOutputs = []; + const validLinkRegexpTool = new RegExp(`^${process.env.homeURL}\/tool\/(\\d+)\/?$`, 'i'); + const validLinkRegexpPaper = new RegExp(`^${process.env.homeURL}\/paper\/(\\d+)\/?$`, 'i'); + + for (const output of outputs) { + const [, toolId] = validLinkRegexpTool.exec(output) || []; + if (toolId) { + unverifiedOutputsToolIds.push(toolId); + } else { + const [, paperId] = validLinkRegexpPaper.exec(output) || []; + if (paperId) { + unverifiedOutputsPaperIds.push(paperId); + } else { + nonGatewayOutputs.push(output); + } + } + } + + const gatewayOutputsTools = isEmpty(unverifiedOutputsToolIds) + ? [] + : (await toolService.getToolsByIds(unverifiedOutputsToolIds)).map(tool => { + return { id: tool.id, name: tool.name }; + }); + + const gatewayOutputsPapers = isEmpty(unverifiedOutputsPaperIds) + ? [] + : (await paperService.getPapersByIds(unverifiedOutputsPaperIds)).map(paper => { + return { id: paper.id, name: paper.name }; + }); + + return { gatewayOutputsTools, gatewayOutputsPapers, nonGatewayOutputs }; +}; + +/** + * Build Related Objects for datause + * + * @desc Accepts an array of objects to relate and outputs an array of related objects which can be assigned to an entity to show the relationship to the object. + * Related objects contain the 'objectId' (object identifier), 'pid', 'objectType' (dataset), 'updated' date and 'user' that created the linkage. + * @param {Object} creatorUser A user object to allow the assignment of their name to the creator of the linkage + * @param {String} type The type of object that is being passed in + * @param {Array} objects An array of objects containing the necessary properties to assemble a related object record reference + + */ +const buildRelatedObjects = (creatorUser, type, objects = [], manualUpload = true, addedViaEdit = false) => { + const { firstname, lastname } = creatorUser; + return objects.map(object => { + const { id: objectId, pid } = object; + return { + objectId, + pid, + objectType: type, + user: `${firstname} ${lastname}`, + updated: moment().format('DD MMM YYYY'), + isLocked: true, + reason: manualUpload + ? `This ${type} was added automatically during the manual upload of this data use register` + : addedViaEdit + ? `This ${type} was added via an edit of this data use register` + : `This ${type} was added automatically from an approved data access request`, + }; + }); +}; + +/** + * Extract Form Applicants + * + * @desc Accepts an array of authors and object containing answers from a Data Access Request application and extracts the names of non Gateway applicants as provided in the form, + * and extracts registered Gateway applicants, combining them before de-duplicating where match is found. + * @param {Array} authors An array of user documents representing contributors and the main applicant to a Data Access Request application + * @param {Object} applicationQuestionAnswers An object of key pairs containing the question identifiers and answers to the questions taken from a Data Access Request application + * @returns {Object} An object containing two arrays, the first being representative of registered Gateway users in the form of their identifying _id + * and the second array being the names of applicants who were extracted from the question answers object passed in but did not match any of the registered users provided in authors + */ +const extractFormApplicants = (authors = [], applicationQuestionAnswers = {}) => { + const gatewayApplicants = authors.map(el => el._id); + const gatewayApplicantsNames = authors.map(el => `${el.firstname.trim()} ${el.lastname.trim()}`); + + const nonGatewayApplicants = Object.keys(applicationQuestionAnswers) + .filter( + key => + (key.includes('safepeopleprimaryapplicantfullname') || key.includes('safepeopleotherindividualsfullname')) && + !gatewayApplicantsNames.includes(applicationQuestionAnswers[key].trim()) + ) + .map(key => applicationQuestionAnswers[key]); + + return { gatewayApplicants, nonGatewayApplicants }; +}; + +/** + * Extract Funders And Sponsors + * + * @desc Accepts an object containing answers from a Data Access Request application and extracts funders and sponsors names from the specific sections where these questions are asked. + * @param {Object} applicationQuestionAnswers An object of key pairs containing the question identifiers and answers to the questions taken from a Data Access Request application + * @returns {Array} An array containing the organisation names provided as funders and sponsors + */ +const extractFundersAndSponsors = (applicationQuestionAnswers = {}) => { + return Object.keys(applicationQuestionAnswers) + .filter( + key => + key.includes('safeprojectfunderinformationprojecthasfundername') || + key.includes('safeprojectsponsorinformationprojecthassponsororganisationname') + ) + .map(key => applicationQuestionAnswers[key]); +}; + +const getDatasetsByPids = async datasetPids => { + return await datasetService.getDatasetsByPids(datasetPids); +}; + +const getAppplicantByIds = async applicantIds => { + return await getUsersByIds(applicantIds); +}; + +const getSafeOutputsByIds = async outputIds => { + return { gatewayToolIDs: await toolService.getToolsByIds(outputIds), gatewayPaperIDs: await paperService.getPapersByIds(outputIds) }; +}; + +export default { + buildDataUseRegisters, + getLinkedDatasets, + getLinkedApplicants, + getLinkedOutputs, + buildRelatedObjects, + extractFormApplicants, + extractFundersAndSponsors, + getDatasetsByPids, + getAppplicantByIds, + getSafeOutputsByIds, +}; diff --git a/src/resources/dataUseRegister/dependency.js b/src/resources/dataUseRegister/dependency.js new file mode 100644 index 00000000..2c22d9f8 --- /dev/null +++ b/src/resources/dataUseRegister/dependency.js @@ -0,0 +1,5 @@ +import DataUseRegisterRepository from './dataUseRegister.repository'; +import DataUseRegisterService from './dataUseRegister.service'; + +export const dataUseRegisterRepository = new DataUseRegisterRepository(); +export const dataUseRegisterService = new DataUseRegisterService(dataUseRegisterRepository); diff --git a/src/resources/datarequest/amendment/amendment.controller.js b/src/resources/datarequest/amendment/amendment.controller.js index fc4b09f7..2347b26a 100644 --- a/src/resources/datarequest/amendment/amendment.controller.js +++ b/src/resources/datarequest/amendment/amendment.controller.js @@ -252,7 +252,7 @@ export default class AmendmentController extends Controller { } else { // 10. Send update request notifications let fullAccessRecord = await this.dataRequestService.getApplicationById(id); - await this.activityLogService.logActivity(constants.activityLogEvents.UPDATE_REQUESTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.UPDATE_REQUESTED, { accessRequest: fullAccessRecord, user: req.user, }); diff --git a/src/resources/datarequest/datarequest.controller.js b/src/resources/datarequest/datarequest.controller.js index fa558f58..21ec5c40 100644 --- a/src/resources/datarequest/datarequest.controller.js +++ b/src/resources/datarequest/datarequest.controller.js @@ -19,7 +19,7 @@ const logCategory = 'Data Access Request'; const bpmController = require('../bpmnworkflow/bpmnworkflow.controller'); export default class DataRequestController extends Controller { - constructor(dataRequestService, workflowService, amendmentService, topicService, messageService, activityLogService) { + constructor(dataRequestService, workflowService, amendmentService, topicService, messageService, activityLogService, dataUseRegisterService) { super(dataRequestService); this.dataRequestService = dataRequestService; this.workflowService = workflowService; @@ -27,6 +27,7 @@ export default class DataRequestController extends Controller { this.activityLogService = activityLogService; this.topicService = topicService; this.messageService = messageService; + this.dataUseRegisterService = dataUseRegisterService } // ###### APPLICATION CRUD OPERATIONS ####### @@ -130,13 +131,8 @@ export default class DataRequestController extends Controller { const countAmendments = this.amendmentService.countAmendments(accessRecord, userType, isLatestMinorVersion); // 8. Get the workflow status for the requested application version for the requesting user - const { - inReviewMode, - reviewSections, - hasRecommended, - isManager, - workflow, - } = this.workflowService.getApplicationWorkflowStatusForUser(accessRecord, requestingUserObjectId); + const { inReviewMode, reviewSections, hasRecommended, isManager, workflow } = + this.workflowService.getApplicationWorkflowStatusForUser(accessRecord, requestingUserObjectId); // 9. Get role type for requesting user, applicable for only Custodian users i.e. Manager/Reviewer role const userRole = @@ -362,7 +358,7 @@ export default class DataRequestController extends Controller { switch (accessRecord.applicationType) { case constants.submissionTypes.AMENDED: accessRecord = await this.dataRequestService.doAmendSubmission(accessRecord, description); - await this.activityLogService.logActivity(constants.activityLogEvents.AMENDMENT_SUBMITTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.AMENDMENT_SUBMITTED, { accessRequest: accessRecord, user: requestingUser, }); @@ -371,7 +367,7 @@ export default class DataRequestController extends Controller { case constants.submissionTypes.INITIAL: default: accessRecord = await this.dataRequestService.doInitialSubmission(accessRecord); - await this.activityLogService.logActivity(constants.activityLogEvents.APPLICATION_SUBMITTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.APPLICATION_SUBMITTED, { accessRequest: accessRecord, user: requestingUser, }); @@ -384,7 +380,7 @@ export default class DataRequestController extends Controller { ) { accessRecord = await this.amendmentService.doResubmission(accessRecord, requestingUserObjectId.toString()); await this.dataRequestService.syncRelatedVersions(accessRecord.versionTree); - await this.activityLogService.logActivity(constants.activityLogEvents.UPDATES_SUBMITTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.UPDATES_SUBMITTED, { accessRequest: accessRecord, user: requestingUser, }); @@ -642,7 +638,7 @@ export default class DataRequestController extends Controller { // If save has succeeded - send notifications // Send notifications to added/removed contributors if (contributorChange) { - await this.createNotifications( + this.createNotifications( constants.notificationTypes.CONTRIBUTORCHANGE, { newAuthors, currentAuthors }, accessRecord, @@ -651,7 +647,7 @@ export default class DataRequestController extends Controller { let addedAuthors = [...newAuthors].filter(author => !currentAuthors.includes(author)); await addedAuthors.forEach(addedAuthor => - this.activityLogService.logActivity(constants.activityLogEvents.COLLABORATOR_ADDEDD, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.COLLABORATOR_ADDEDD, { accessRequest: accessRecord, user: req.user, collaboratorId: addedAuthor, @@ -660,7 +656,7 @@ export default class DataRequestController extends Controller { let removedAuthors = [...currentAuthors].filter(author => !newAuthors.includes(author)); await removedAuthors.forEach(removedAuthor => - this.activityLogService.logActivity(constants.activityLogEvents.COLLABORATOR_REMOVED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.COLLABORATOR_REMOVED, { accessRequest: accessRecord, user: req.user, collaboratorId: removedAuthor, @@ -671,25 +667,31 @@ export default class DataRequestController extends Controller { //Update any connected version trees this.dataRequestService.updateVersionStatus(accessRecord, accessRecord.applicationStatus); - if (accessRecord.applicationStatus === constants.applicationStatuses.APPROVED) - await this.activityLogService.logActivity(constants.activityLogEvents.APPLICATION_APPROVED, { + if (accessRecord.applicationStatus === constants.applicationStatuses.APPROVED) { + await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.APPLICATION_APPROVED, { accessRequest: accessRecord, user: req.user, }); + } else if (accessRecord.applicationStatus === constants.applicationStatuses.APPROVEDWITHCONDITIONS) { - await this.activityLogService.logActivity(constants.activityLogEvents.APPLICATION_APPROVED_WITH_CONDITIONS, { - accessRequest: accessRecord, - user: req.user, - }); + await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); + await this.activityLogService.logActivity( + constants.activityLogEvents.data_access_request.APPLICATION_APPROVED_WITH_CONDITIONS, + { + accessRequest: accessRecord, + user: req.user, + } + ); } else if (accessRecord.applicationStatus === constants.applicationStatuses.REJECTED) { - await this.activityLogService.logActivity(constants.activityLogEvents.APPLICATION_REJECTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.APPLICATION_REJECTED, { accessRequest: accessRecord, user: req.user, }); } // Send notifications to custodian team, main applicant and contributors regarding status change - await this.createNotifications( + this.createNotifications( constants.notificationTypes.STATUSCHANGE, { applicationStatus, applicationStatusDesc }, accessRecord, @@ -1385,13 +1387,13 @@ export default class DataRequestController extends Controller { this.createNotifications(constants.notificationTypes.WORKFLOWASSIGNED, emailContext, accessRecord, requestingUser); //Create activity log - this.activityLogService.logActivity(constants.activityLogEvents.WORKFLOW_ASSIGNED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.WORKFLOW_ASSIGNED, { accessRequest: accessRecord, user: req.user, }); //Create activity log - this.activityLogService.logActivity(constants.activityLogEvents.REVIEW_PHASE_STARTED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.REVIEW_PHASE_STARTED, { accessRequest: accessRecord, user: req.user, }); @@ -1498,7 +1500,7 @@ export default class DataRequestController extends Controller { // Create notifications to managers that the application is awaiting final approval relevantStepIndex = activeStepIndex; relevantNotificationType = constants.notificationTypes.FINALDECISIONREQUIRED; - this.activityLogService.logActivity(constants.activityLogEvents.FINAL_DECISION_REQUIRED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.FINAL_DECISION_REQUIRED, { accessRequest: accessRecord, user: requestingUser, }); @@ -1506,7 +1508,7 @@ export default class DataRequestController extends Controller { // Create notifications to reviewers of the next step that has been activated relevantStepIndex = activeStepIndex + 1; relevantNotificationType = constants.notificationTypes.REVIEWSTEPSTART; - this.activityLogService.logActivity(constants.activityLogEvents.REVIEW_PHASE_STARTED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.REVIEW_PHASE_STARTED, { accessRequest: accessRecord, user: requestingUser, }); @@ -1647,13 +1649,13 @@ export default class DataRequestController extends Controller { }); if (approved) { - this.activityLogService.logActivity(constants.activityLogEvents.RECOMMENDATION_WITH_NO_ISSUE, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_NO_ISSUE, { comments, accessRequest: accessRecord, user: requestingUser, }); } else { - this.activityLogService.logActivity(constants.activityLogEvents.RECOMMENDATION_WITH_ISSUE, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.RECOMMENDATION_WITH_ISSUE, { comments, accessRequest: accessRecord, user: requestingUser, @@ -1667,7 +1669,7 @@ export default class DataRequestController extends Controller { // Create notifications to reviewers of the next step that has been activated relevantStepIndex = activeStepIndex + 1; relevantNotificationType = constants.notificationTypes.REVIEWSTEPSTART; - this.activityLogService.logActivity(constants.activityLogEvents.REVIEW_PHASE_STARTED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.REVIEW_PHASE_STARTED, { accessRequest: accessRecord, user: requestingUser, }); @@ -1675,7 +1677,7 @@ export default class DataRequestController extends Controller { // Create notifications to managers that the application is awaiting final approval relevantStepIndex = activeStepIndex; relevantNotificationType = constants.notificationTypes.FINALDECISIONREQUIRED; - this.activityLogService.logActivity(constants.activityLogEvents.FINAL_DECISION_REQUIRED, { + this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.FINAL_DECISION_REQUIRED, { accessRequest: accessRecord, user: requestingUser, }); @@ -1771,7 +1773,7 @@ export default class DataRequestController extends Controller { } // 11. Log event in the activity log - await this.activityLogService.logActivity(constants.activityLogEvents.REVIEW_PROCESS_STARTED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.REVIEW_PROCESS_STARTED, { accessRequest: accessRecord, user: req.user, }); @@ -1816,7 +1818,7 @@ export default class DataRequestController extends Controller { // 4. Send emails based on deadline elapsed or approaching if (emailContext.deadlineElapsed) { this.createNotifications(constants.notificationTypes.DEADLINEPASSED, emailContext, accessRecord, requestingUser); - await this.activityLogService.logActivity(constants.activityLogEvents.DEADLINE_PASSED, { + await this.activityLogService.logActivity(constants.activityLogEvents.data_access_request.DEADLINE_PASSED, { accessRequest: accessRecord, }); } else { @@ -1889,7 +1891,7 @@ export default class DataRequestController extends Controller { async createNotifications(type, context, accessRecord, user) { // Project details from about application if 5 Safes let { aboutApplication = {} } = accessRecord; - let { projectName = 'No project name set' } = aboutApplication; + let { projectName } = aboutApplication; let { projectId, _id, workflow = {}, dateSubmitted = '', jsonSchema, questionAnswers, createdAt } = accessRecord; if (_.isEmpty(projectId)) { projectId = _id; @@ -1971,7 +1973,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( [user], constants.hdrukEmail, - `Data Access Request in progress for ${datasetTitles}`, + `Data Access Request in progress for ${projectName || datasetTitles}`, html, false, attachments @@ -1988,7 +1990,7 @@ export default class DataRequestController extends Controller { let statusChangeUserIds = [...custodianManagers, ...stepReviewers].map(user => user.id); await notificationBuilder.triggerNotificationMessage( statusChangeUserIds, - `${appFirstName} ${appLastName}'s Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${firstname} ${lastname}`, + `${appFirstName} ${appLastName}'s Data Access Request for ${projectName || datasetTitles} was ${context.applicationStatus} by ${firstname} ${lastname}`, 'data access request', accessRecord._id ); @@ -1996,7 +1998,7 @@ export default class DataRequestController extends Controller { // Create applicant notification await notificationBuilder.triggerNotificationMessage( [accessRecord.userId], - `Your Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + `Your Data Access Request for ${projectName || datasetTitles} was ${context.applicationStatus} by ${publisher}`, 'data access request', accessRecord._id ); @@ -2005,7 +2007,7 @@ export default class DataRequestController extends Controller { if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( authors.map(author => author.id), - `A Data Access Request you are contributing to for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + `A Data Access Request you are contributing to for ${projectName || datasetTitles} was ${context.applicationStatus} by ${publisher}`, 'data access request', accessRecord._id ); @@ -2033,7 +2035,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( emailRecipients, constants.hdrukEmail, - `Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + `Data Access Request for ${projectName || datasetTitles} was ${context.applicationStatus} by ${publisher}`, html, false ); @@ -2048,7 +2050,7 @@ export default class DataRequestController extends Controller { custodianUserIds = custodianManagers.map(user => user.id); await notificationBuilder.triggerNotificationMessage( custodianUserIds, - `A Data Access Request has been submitted to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, + `A Data Access Request has been submitted to ${publisher} for ${projectName || datasetTitles} by ${appFirstName} ${appLastName}`, 'data access request received', accessRecord._id, accessRecord.datasets[0].publisher._id.toString() @@ -2060,7 +2062,7 @@ export default class DataRequestController extends Controller { // Applicant notification await notificationBuilder.triggerNotificationMessage( [accessRecord.userId], - `Your Data Access Request for ${datasetTitles} was successfully submitted to ${publisher}`, + `Your Data Access Request for ${projectName || datasetTitles} was successfully submitted to ${publisher}`, 'data access request', accessRecord._id ); @@ -2068,7 +2070,7 @@ export default class DataRequestController extends Controller { if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( accessRecord.authors.map(author => author.id), - `A Data Access Request you are contributing to for ${datasetTitles} was successfully submitted to ${publisher} by ${firstname} ${lastname}`, + `A Data Access Request you are contributing to for ${projectName || datasetTitles} was successfully submitted to ${publisher} by ${firstname} ${lastname}`, 'data access request', accessRecord._id ); @@ -2116,7 +2118,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( emailRecipients, constants.hdrukEmail, - `Data Access Request has been submitted to ${publisher} for ${datasetTitles}`, + `Data Access Request has been submitted to ${publisher} for ${projectName || datasetTitles}`, html, false, attachments @@ -2133,7 +2135,7 @@ export default class DataRequestController extends Controller { custodianUserIds = custodianManagers.map(user => user.id); await notificationBuilder.triggerNotificationMessage( custodianUserIds, - `A Data Access Request has been resubmitted with updates to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, + `A Data Access Request has been resubmitted with updates to ${publisher} for ${projectName || datasetTitles} by ${appFirstName} ${appLastName}`, 'data access request', accessRecord._id ); @@ -2144,7 +2146,7 @@ export default class DataRequestController extends Controller { // Applicant notification await notificationBuilder.triggerNotificationMessage( [accessRecord.userId], - `Your Data Access Request for ${datasetTitles} was successfully resubmitted with updates to ${publisher}`, + `Your Data Access Request for ${projectName || datasetTitles} was successfully resubmitted with updates to ${publisher}`, 'data access request', accessRecord._id ); @@ -2152,7 +2154,7 @@ export default class DataRequestController extends Controller { if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( accessRecord.authors.map(author => author.id), - `A Data Access Request you are contributing to for ${datasetTitles} was successfully resubmitted with updates to ${publisher} by ${firstname} ${lastname}`, + `A Data Access Request you are contributing to for ${projectName || datasetTitles} was successfully resubmitted with updates to ${publisher} by ${firstname} ${lastname}`, 'data access request', accessRecord._id ); @@ -2200,7 +2202,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( emailRecipients, constants.hdrukEmail, - `Data Access Request to ${publisher} for ${datasetTitles} has been updated`, + `Data Access Request to ${publisher} for ${projectName || datasetTitles} has been updated`, html, false, attachments @@ -2482,7 +2484,7 @@ export default class DataRequestController extends Controller { // 1. Create notifications await notificationBuilder.triggerNotificationMessage( [accessRecord.userId], - `Your Data Access Request for ${datasetTitles} was successfully duplicated + `Your Data Access Request for ${projectName || datasetTitles} was successfully duplicated ${ _.isEmpty(newDatasetTitles) ? `from an existing form, which can now be edited` @@ -2495,7 +2497,7 @@ export default class DataRequestController extends Controller { if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( authors.map(author => author.id), - `A Data Access Request you contributed to for ${datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, + `A Data Access Request you contributed to for ${projectName || datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, 'data access request unlinked', newApplicationId ); @@ -2520,7 +2522,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( emailRecipients, constants.hdrukEmail, - `Data Access Request for ${datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, + `Data Access Request for ${projectName || datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, html, false ); @@ -2529,7 +2531,7 @@ export default class DataRequestController extends Controller { // 1. Create notifications await notificationBuilder.triggerNotificationMessage( [accessRecord.userId], - `Your Data Access Request for ${datasetTitles} was successfully deleted`, + `Your Data Access Request for ${projectName || datasetTitles} was successfully deleted`, 'data access request unlinked', accessRecord._id ); @@ -2537,7 +2539,7 @@ export default class DataRequestController extends Controller { if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( authors.map(author => author.id), - `A draft Data Access Request you contributed to for ${datasetTitles} has been deleted by ${firstname} ${lastname}`, + `A draft Data Access Request you contributed to for ${projectName || datasetTitles} has been deleted by ${firstname} ${lastname}`, 'data access request unlinked', accessRecord._id ); @@ -2646,7 +2648,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( emailRecipients, constants.hdrukEmail, - `Data Access Request to ${publisher} for ${datasetTitles} has been amended with updates`, + `Data Access Request to ${publisher} for ${projectName || datasetTitles} has been amended with updates`, html, false, attachments @@ -2655,7 +2657,6 @@ export default class DataRequestController extends Controller { } break; case constants.notificationTypes.MESSAGESENT: - let title = projectName !== 'No project name set' ? projectName : datasetTitles; if (userType === constants.userTypes.APPLICANT) { const custodianManagers = teamController.getTeamMembersByRole(accessRecord.publisherObj.team, constants.roleTypes.MANAGER); const custodianManagersIds = custodianManagers.map(user => user.id); @@ -2664,7 +2665,7 @@ export default class DataRequestController extends Controller { await notificationBuilder.triggerNotificationMessage( [...custodianManagersIds, ...custodianReviewersIds, ...accessRecord.authors.map(author => author.id)], - `There is a new message for the application ${title} from ${user.firstname} ${user.lastname}`, + `There is a new message for the application ${projectName || datasetTitles} from ${user.firstname} ${user.lastname}`, 'data access message sent', accessRecord._id ); @@ -2682,14 +2683,14 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( [...custodianManagers, ...custodianReviewers, ...accessRecord.authors], constants.hdrukEmail, - `There is a new message for the application ${title} from ${user.firstname} ${user.lastname}`, + `There is a new message for the application ${projectName || datasetTitles} from ${user.firstname} ${user.lastname}`, html, false ); } else if (userType === constants.userTypes.CUSTODIAN) { await notificationBuilder.triggerNotificationMessage( [accessRecord.userId, ...accessRecord.authors.map(author => author.id)], - `There is a new message for the application ${title} from ${user.firstname} ${user.lastname} from ${accessRecord.publisherObj.name}`, + `There is a new message for the application ${projectName || datasetTitles} from ${user.firstname} ${user.lastname} from ${accessRecord.publisherObj.name}`, 'data access message sent', accessRecord._id ); @@ -2707,7 +2708,7 @@ export default class DataRequestController extends Controller { await emailGenerator.sendEmail( [accessRecord.mainApplicant, ...accessRecord.authors], constants.hdrukEmail, - `There is a new message for the application ${title} from ${user.firstname} ${user.lastname}`, + `There is a new message for the application ${projectName || datasetTitles} from ${user.firstname} ${user.lastname}`, html, false ); @@ -2907,8 +2908,8 @@ export default class DataRequestController extends Controller { this.activityLogService.logActivity( messageType === constants.DARMessageTypes.DARMESSAGE - ? constants.activityLogEvents.CONTEXTUAL_MESSAGE - : constants.activityLogEvents.NOTE, + ? constants.activityLogEvents.data_access_request.CONTEXTUAL_MESSAGE + : constants.activityLogEvents.data_access_request.NOTE, { accessRequest: accessRecord, user: req.user, diff --git a/src/resources/datarequest/datarequest.route.js b/src/resources/datarequest/datarequest.route.js index 6c753df3..5d2f0151 100644 --- a/src/resources/datarequest/datarequest.route.js +++ b/src/resources/datarequest/datarequest.route.js @@ -8,6 +8,7 @@ import { logger } from '../utilities/logger'; import DataRequestController from './datarequest.controller'; import AmendmentController from './amendment/amendment.controller'; import { dataRequestService, workflowService, amendmentService, topicService, messageService, activityLogService } from './dependency'; +import { dataUseRegisterService } from '../dataUseRegister/dependency'; const fs = require('fs'); const path = './tmp'; @@ -27,7 +28,8 @@ const dataRequestController = new DataRequestController( amendmentService, topicService, messageService, - activityLogService + activityLogService, + dataUseRegisterService ); const amendmentController = new AmendmentController(amendmentService, dataRequestService, activityLogService); const router = express.Router(); diff --git a/src/resources/datarequest/datarequest.service.js b/src/resources/datarequest/datarequest.service.js index d9ea1246..bc3ae68b 100644 --- a/src/resources/datarequest/datarequest.service.js +++ b/src/resources/datarequest/datarequest.service.js @@ -141,7 +141,7 @@ export default class DataRequestService { topic.save(err => { if (!err) { // Create activity log entries based on existing messages in topic - activityLogService.logActivity(constants.activityLogEvents.PRESUBMISSION_MESSAGE, { + activityLogService.logActivity(constants.activityLogEvents.data_access_request.PRESUBMISSION_MESSAGE, { messages: topic.topicMessages, applicationId, publisher, diff --git a/src/resources/dataset/__tests__/dataset.controller.test.js b/src/resources/dataset/__tests__/dataset.controller.test.js index 4f9e6eb1..779fb458 100644 --- a/src/resources/dataset/__tests__/dataset.controller.test.js +++ b/src/resources/dataset/__tests__/dataset.controller.test.js @@ -35,18 +35,6 @@ describe('DatasetController', function () { expect(json.calledWith({ success: true, ...stubValue })).toBe(true); }); - it('should return a bad request response if no dataset id is provided', async function () { - req = { params: {} }; - - const serviceStub = sinon.stub(datasetService, 'getDataset').returns({}); - datasetController = new DatasetController(datasetService); - await datasetController.getDataset(req, res); - - expect(serviceStub.notCalled).toBe(true); - expect(status.calledWith(400)).toBe(true); - expect(json.calledWith({ success: false, message: 'You must provide a dataset identifier' })).toBe(true); - }); - it('should return a not found response if no dataset could be found for the id provided', async function () { req = { params: { id: faker.random.number({ min: 1, max: 999999999 }) } }; @@ -75,7 +63,7 @@ describe('DatasetController', function () { describe('getDatasets', function () { let req, res, status, json, datasetService, datasetController; - req = { params: {} }; + req = { params: {} }; beforeEach(() => { status = sinon.stub(); diff --git a/src/resources/dataset/dataset.controller.js b/src/resources/dataset/dataset.controller.js index f6829c03..e8fbf990 100644 --- a/src/resources/dataset/dataset.controller.js +++ b/src/resources/dataset/dataset.controller.js @@ -2,63 +2,57 @@ import Controller from '../base/controller'; export default class DatasetController extends Controller { constructor(datasetService) { - super(datasetService); + super(datasetService); this.datasetService = datasetService; } async getDataset(req, res) { try { - // Extract id parameter from query string + // Extract id parameter from query string const { id } = req.params; - // If no id provided, it is a bad request - if (!id) { - return res.status(400).json({ - success: false, - message: 'You must provide a dataset identifier', - }); - } - // Find the dataset + + // Find the dataset const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; let dataset = await this.datasetService.getDataset(id, req.query, options); - // Return if no dataset found + // Return if no dataset found if (!dataset) { return res.status(404).json({ success: false, message: 'A dataset could not be found with the provided id', }); - } - // Return the dataset + } + // Return the dataset return res.status(200).json({ success: true, - ...dataset + ...dataset, }); } catch (err) { - // Return error response if something goes wrong - console.error(err.message); - return res.status(500).json({ + // Return error response if something goes wrong + console.error(err.message); + return res.status(500).json({ success: false, message: 'A server error occurred, please try again', }); - } - } - - async getDatasets(req, res) { + } + } + + async getDatasets(req, res) { try { - // Find the datasets + // Find the datasets const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; - let datasets = await this.datasetService.getDatasets(req.query, options); - // Return the datasets + let datasets = await this.datasetService.getDatasets(req.query, options); + // Return the datasets return res.status(200).json({ success: true, - datasets + datasets, }); } catch (err) { - // Return error response if something goes wrong - console.error(err.message); - return res.status(500).json({ + // Return error response if something goes wrong + console.error(err.message); + return res.status(500).json({ success: false, message: 'A server error occurred, please try again', }); - } + } } } diff --git a/src/resources/dataset/dataset.repository.js b/src/resources/dataset/dataset.repository.js index 44f01a3d..ff7a13c7 100644 --- a/src/resources/dataset/dataset.repository.js +++ b/src/resources/dataset/dataset.repository.js @@ -34,4 +34,13 @@ export default class DatasetRepository extends Repository { return obj; }, {}); } + + getDatasetsByPids(pids) { + return this.dataset.aggregate([ + { $match: { pid: { $in: pids } } }, + { $project: { pid: 1, datasetid: 1, name: 1, createdAt: 1 } }, + { $sort: { createdAt: -1 } }, + { $group: { _id: '$pid', pid: { $first: '$pid' }, datasetid: { $first: '$datasetid' }, name: { $first: '$name' } } }, + ]); + } } diff --git a/src/resources/dataset/dataset.service.js b/src/resources/dataset/dataset.service.js index 5bd76087..d5063e4b 100644 --- a/src/resources/dataset/dataset.service.js +++ b/src/resources/dataset/dataset.service.js @@ -37,7 +37,7 @@ export default class DatasetService { return dataset; } - async getDatasets(query = {}, options = {} ) { + async getDatasets(query = {}, options = {}) { return this.datasetRepository.getDatasets(query, options); } @@ -110,4 +110,12 @@ export default class DatasetService { }); return dataset; } + + getDatasetsByPids(pids) { + return this.datasetRepository.getDatasetsByPids(pids); + } + + getDatasetsByName(name) { + return this.datasetRepository.getDataset({ name, fields: 'pid' }, { lean: true }); + } } diff --git a/src/resources/dataset/datasetonboarding.controller.js b/src/resources/dataset/datasetonboarding.controller.js index 5b89d53f..bad8df69 100644 --- a/src/resources/dataset/datasetonboarding.controller.js +++ b/src/resources/dataset/datasetonboarding.controller.js @@ -1,3 +1,10 @@ +import axios from 'axios'; +import FormData from 'form-data'; +import moment from 'moment'; +import * as Sentry from '@sentry/node'; +var fs = require('fs'); +import _ from 'lodash'; + import { Data } from '../tool/data.model'; import { PublisherModel } from '../publisher/publisher.model'; import { filtersService } from '../filters/dependency'; @@ -5,11 +12,9 @@ import constants from '../utilities/constants.util'; import datasetonboardingUtil from './utils/datasetonboarding.util'; import { v4 as uuidv4 } from 'uuid'; import { isEmpty, isNil, escapeRegExp } from 'lodash'; -import axios from 'axios'; -import FormData from 'form-data'; -import moment from 'moment'; -import * as Sentry from '@sentry/node'; -var fs = require('fs'); +import { activityLogService } from '../activitylog/dependency'; + +const readEnv = process.env.ENV || 'prod'; module.exports = { //GET api/v1/dataset-onboarding @@ -24,10 +29,10 @@ module.exports = { //Build query, if the publisherId is admin then only return the inReview datasets let query = {}; - if (publisherID === 'admin') { + if (publisherID === constants.userTypes.ADMIN) { // get all datasets in review for admin query = { - activeflag: 'inReview', + activeflag: { $in: ['active', 'inReview', 'draft', 'rejected', 'archive'] }, type: 'dataset', }; } else { @@ -57,6 +62,9 @@ module.exports = { const versionDetails = { _id, datasetVersion, activeflag }; arr[datasetIdx].listOfVersions = [...arr[datasetIdx].listOfVersions, versionDetails]; } + if (publisherID === constants.userTypes.ADMIN) { + arr = arr.filter(dataset => dataset.activeflag === constants.applicationStatuses.INREVIEW); + } return arr; }, []); @@ -203,7 +211,9 @@ module.exports = { data.id = uniqueID; data.datasetid = 'New dataset version'; data.name = datasetToCopy.name; - data.datasetv2 = publisherObject; + data.datasetv2 = datasetToCopy.datasetv2; + data.datasetv2.identifier = ''; + data.datasetv2.version = ''; data.type = 'dataset'; data.activeflag = 'draft'; data.source = 'HDRUK MDC'; @@ -312,15 +322,44 @@ module.exports = { return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } + let dataset = await Data.findOne({ _id: id }); + dataset.questionAnswers = JSON.parse(dataset.questionAnswers); + + let datasetv2Object = await datasetonboardingUtil.buildv2Object(dataset); + //update dataset to inreview - constants.datatsetStatuses.INREVIEW let updatedDataset = await Data.findOneAndUpdate( { _id: id }, - { activeflag: constants.datatsetStatuses.INREVIEW, 'timestamps.updated': Date.now(), 'timestamps.submitted': Date.now() } + { + datasetv2: datasetv2Object, + activeflag: constants.datatsetStatuses.INREVIEW, + 'timestamps.updated': Date.now(), + 'timestamps.submitted': Date.now(), + } ); - //emails / notifications + // emails / notifications await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETSUBMITTED, updatedDataset); + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_SUBMITTED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); + + if (updatedDataset.datasetVersion !== '1.0.0') { + let datasetv2DifferenceObject = datasetonboardingUtil.datasetv2ObjectComparison(datasetv2Object, dataset.datasetv2); + + if (!_.isEmpty(datasetv2DifferenceObject)) { + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_UPDATES_SUBMITTED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + differences: datasetv2DifferenceObject, + }); + } + } + return res.status(200).json({ status: 'success' }); } catch (err) { console.error(err.message); @@ -351,6 +390,7 @@ module.exports = { } let dataset = await Data.findOne({ _id: id }); + if (!dataset) return res.status(404).json({ status: 'error', message: 'Dataset could not be found.' }); dataset.questionAnswers = JSON.parse(dataset.questionAnswers); @@ -419,94 +459,7 @@ module.exports = { }); // Adding to DB - let observations = await datasetonboardingUtil.buildObservations(dataset.questionAnswers); - - let datasetv2Object = { - identifier: newDatasetVersionId, - version: dataset.datasetVersion, - issued: moment(Date.now()).format('DD/MM/YYYY'), - modified: moment(Date.now()).format('DD/MM/YYYY'), - revisions: [], - summary: { - title: dataset.questionAnswers['properties/summary/title'] || '', - abstract: dataset.questionAnswers['properties/summary/abstract'] || '', - publisher: { - identifier: publisherData[0]._id.toString(), - name: publisherData[0].publisherDetails.name, - logo: publisherData[0].publisherDetails.logo || '', - description: publisherData[0].publisherDetails.description || '', - contactPoint: publisherData[0].publisherDetails.contactPoint || [], - memberOf: publisherData[0].publisherDetails.memberOf, - accessRights: publisherData[0].publisherDetails.accessRights || [], - deliveryLeadTime: publisherData[0].publisherDetails.deliveryLeadTime || '', - accessService: publisherData[0].publisherDetails.accessService || '', - accessRequestCost: publisherData[0].publisherDetails.accessRequestCost || '', - dataUseLimitation: publisherData[0].publisherDetails.dataUseLimitation || [], - dataUseRequirements: publisherData[0].publisherDetails.dataUseRequirements || [], - }, - contactPoint: dataset.questionAnswers['properties/summary/contactPoint'] || '', - keywords: dataset.questionAnswers['properties/summary/keywords'] || [], - alternateIdentifiers: dataset.questionAnswers['properties/summary/alternateIdentifiers'] || [], - doiName: dataset.questionAnswers['properties/summary/doiName'] || '', - }, - documentation: { - description: dataset.questionAnswers['properties/documentation/description'] || '', - associatedMedia: dataset.questionAnswers['properties/documentation/associatedMedia'] || [], - isPartOf: dataset.questionAnswers['properties/documentation/isPartOf'] || [], - }, - coverage: { - spatial: dataset.questionAnswers['properties/coverage/spatial'] || [], - typicalAgeRange: dataset.questionAnswers['properties/coverage/typicalAgeRange'] || '', - physicalSampleAvailability: dataset.questionAnswers['properties/coverage/physicalSampleAvailability'] || [], - followup: dataset.questionAnswers['properties/coverage/followup'] || '', - pathway: dataset.questionAnswers['properties/coverage/pathway'] || '', - }, - provenance: { - origin: { - purpose: dataset.questionAnswers['properties/provenance/origin/purpose'] || [], - source: dataset.questionAnswers['properties/provenance/origin/source'] || [], - collectionSituation: dataset.questionAnswers['properties/provenance/origin/collectionSituation'] || [], - }, - temporal: { - accrualPeriodicity: dataset.questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', - distributionReleaseDate: dataset.questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', - startDate: dataset.questionAnswers['properties/provenance/temporal/startDate'] || '', - endDate: dataset.questionAnswers['properties/provenance/temporal/endDate'] || '', - timeLag: dataset.questionAnswers['properties/provenance/temporal/timeLag'] || '', - }, - }, - accessibility: { - usage: { - dataUseLimitation: dataset.questionAnswers['properties/accessibility/usage/dataUseLimitation'] || [], - dataUseRequirements: dataset.questionAnswers['properties/accessibility/usage/dataUseRequirements'] || [], - resourceCreator: dataset.questionAnswers['properties/accessibility/usage/resourceCreator'] || '', - investigations: dataset.questionAnswers['properties/accessibility/usage/investigations'] || [], - isReferencedBy: dataset.questionAnswers['properties/accessibility/usage/isReferencedBy'] || [], - }, - access: { - accessRights: dataset.questionAnswers['properties/accessibility/access/accessRights'] || [], - accessService: dataset.questionAnswers['properties/accessibility/access/accessService'] || '', - accessRequestCost: dataset.questionAnswers['properties/accessibility/access/accessRequestCost'] || '', - deliveryLeadTime: dataset.questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', - jurisdiction: dataset.questionAnswers['properties/accessibility/access/jurisdiction'] || [], - dataProcessor: dataset.questionAnswers['properties/accessibility/access/dataProcessor'] || '', - dataController: dataset.questionAnswers['properties/accessibility/access/dataController'] || '', - }, - formatAndStandards: { - vocabularyEncodingScheme: - dataset.questionAnswers['properties/accessibility/formatAndStandards/vocabularyEncodingScheme'] || [], - conformsTo: dataset.questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] || [], - language: dataset.questionAnswers['properties/accessibility/formatAndStandards/language'] || [], - format: dataset.questionAnswers['properties/accessibility/formatAndStandards/format'] || [], - }, - }, - enrichmentAndLinkage: { - qualifiedRelation: dataset.questionAnswers['properties/enrichmentAndLinkage/qualifiedRelation'] || [], - derivation: dataset.questionAnswers['properties/enrichmentAndLinkage/derivation'] || [], - tools: dataset.questionAnswers['properties/enrichmentAndLinkage/tools'] || [], - }, - observations: observations, - }; + let datasetv2Object = await datasetonboardingUtil.buildv2Object(dataset, newDatasetVersionId); let previousDataset = await Data.findOneAndUpdate({ pid: dataset.pid, activeflag: 'active' }, { activeflag: 'archive' }); let previousCounter = 0; @@ -570,8 +523,25 @@ module.exports = { filtersService.optimiseFilters('dataset'); + let datasetv2DifferenceObject = datasetonboardingUtil.datasetv2ObjectComparison(datasetv2Object, dataset.datasetv2); + + if (!_.isEmpty(datasetv2DifferenceObject)) { + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_UPDATES_SUBMITTED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + differences: datasetv2DifferenceObject, + }); + } + //emails / notifications await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETAPPROVED, updatedDataset); + + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_APPROVED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); }) .catch(err => { console.error('Error when trying to create new dataset on the MDC - ' + err.message); @@ -606,6 +576,12 @@ module.exports = { //emails / notifications await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETREJECTED, updatedDataset); + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_REJECTED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); + return res.status(200).json({ status: 'success' }); } else if (applicationStatus === 'archive') { let dataset = await Data.findOne({ _id: id }).lean(); @@ -644,10 +620,17 @@ module.exports = { console.error('Error when trying to logout of the MDC - ' + err.message); }); } - await Data.findOneAndUpdate( + let updatedDataset = await Data.findOneAndUpdate( { _id: id }, { activeflag: constants.datatsetStatuses.ARCHIVE, 'timestamps.updated': Date.now(), 'timestamps.archived': Date.now() } ); + + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_ARCHIVED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); + return res.status(200).json({ status: 'success' }); } else if (applicationStatus === 'unarchive') { let dataset = await Data.findOne({ _id: id }).lean(); @@ -693,7 +676,14 @@ module.exports = { flagIs = 'active'; } - await Data.findOneAndUpdate({ _id: id }, { activeflag: flagIs }); //active or draft + const updatedDataset = await Data.findOneAndUpdate({ _id: id }, { activeflag: flagIs }); //active or draft + + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_UNARCHIVED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); + return res.status(200).json({ status: 'success' }); } } catch (err) { @@ -852,7 +842,9 @@ module.exports = { return res.status(400).json({ success: false, message: 'No metadata found' }); } } catch (err) { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(err.message); return res.status(500).json({ success: false, message: 'Bulk upload of metadata failed', error: err.message }); } diff --git a/src/resources/dataset/utils/__mocks__/datasetobjects.js b/src/resources/dataset/utils/__mocks__/datasetobjects.js new file mode 100644 index 00000000..60fe422e --- /dev/null +++ b/src/resources/dataset/utils/__mocks__/datasetobjects.js @@ -0,0 +1,138 @@ +import { ObjectID } from 'mongodb'; + +export const datasetQuestionAnswersMocks = { + 'properties/summary/abstract': 'test', + 'properties/summary/contactPoint': 'test@test.com', + 'properties/summary/keywords': ['testKeywordBowel', 'testKeywordCancer'], + 'properties/provenance/temporal/accrualPeriodicity': 'DAILY', + 'properties/provenance/temporal/startDate': '25/12/2021', + 'properties/provenance/temporal/timeLag': 'NOT APPLICABLE', + 'properties/accessibility/access/accessRights': ['http://www.google.com'], + 'properties/accessibility/access/jurisdiction': ['GB-GB'], + 'properties/accessibility/access/dataController': 'testtesttesttesttesttest', + 'properties/accessibility/formatAndStandards/vocabularyEncodingScheme': ['LOCAL'], + 'properties/accessibility/formatAndStandards/conformsTo': ['NHS SCOTLAND DATA DICTIONARY'], + 'properties/accessibility/formatAndStandards/language': ['ab'], + 'properties/accessibility/formatAndStandards/format': ['testtesttest'], + 'properties/observation/observedNode': 'PERSONS', + 'properties/observation/measuredValue': '25', + 'properties/observation/disambiguatingDescription': 'testtesttest', + 'properties/observation/observationDate': '03/09/2021', + 'properties/observation/measuredProperty': 'Count', + 'properties/summary/title': 'Test title', + 'properties/provenance/origin/purpose': ['STUDY'], + 'properties/coverage/physicalSampleAvailability': ['NOT AVAILABLE'], + 'properties/enrichmentAndLinkage/qualifiedRelation': ['https://google.com', 'https://google.com'], + 'properties/observation/observedNode_1xguo': 'EVENTS', + 'properties/observation/measuredValue_1xguo': '100', + 'properties/observation/disambiguatingDescription_1xguo': 'testtesttest', + 'properties/observation/observationDate_1xguo': '03/11/2021', + 'properties/observation/measuredProperty_1xguo': 'Count', +}; + +export const datasetv2ObjectMock = { + identifier: '', + version: '2.0.0', + revisions: [], + summary: { + title: 'Test title', + abstract: 'test', + publisher: { + identifier: '5f3f98068af2ef61552e1d75', + name: 'SAIL', + logo: '', + description: '', + contactPoint: [], + memberOf: 'ALLIANCE', + accessRights: [], + deliveryLeadTime: '', + accessService: '', + accessRequestCost: '', + dataUseLimitation: [], + dataUseRequirements: [], + }, + contactPoint: 'test@test.com', + keywords: ['testKeywordBowel', 'testKeywordCancer'], + alternateIdentifiers: [], + doiName: '', + }, + documentation: { description: '', associatedMedia: [], isPartOf: [] }, + coverage: { + spatial: [], + typicalAgeRange: '', + physicalSampleAvailability: ['NOT AVAILABLE'], + followup: '', + pathway: '', + }, + provenance: { + origin: { purpose: ['STUDY'], source: [], collectionSituation: [] }, + temporal: { + accrualPeriodicity: 'DAILY', + distributionReleaseDate: '', + startDate: '25/12/2021', + endDate: '', + timeLag: 'NOT APPLICABLE', + }, + }, + accessibility: { + usage: { + dataUseLimitation: [], + dataUseRequirements: [], + resourceCreator: [], + investigations: [], + isReferencedBy: [], + }, + access: { + accessRights: ['http://www.google.com'], + accessService: '', + accessRequestCost: [], + deliveryLeadTime: '', + jurisdiction: ['GB-GB'], + dataProcessor: '', + dataController: 'testtesttesttesttesttest', + }, + formatAndStandards: { + vocabularyEncodingScheme: ['LOCAL'], + conformsTo: ['NHS SCOTLAND DATA DICTIONARY'], + language: ['ab'], + format: ['testtesttest'], + }, + }, + enrichmentAndLinkage: { + qualifiedRelation: ['https://google.com', 'https://google.com'], + derivation: [], + tools: [], + }, + observations: [ + { + observedNode: 'PERSONS', + measuredValue: '25', + disambiguatingDescription: 'testtesttest', + observationDate: '03/09/2021', + measuredProperty: 'Count', + }, + { + observedNode: 'EVENTS', + measuredValue: '100', + disambiguatingDescription: 'testtesttest', + observationDate: '03/11/2021', + measuredProperty: 'Count', + }, + ], +}; + +export const publisherDetailsMock = [ + { + _id: ObjectID('5f3f98068af2ef61552e1d75'), + name: 'ALLIANCE > SAIL', + active: true, + imageURL: '', + dataRequestModalContent: {}, + allowsMessaging: true, + workflowEnabled: true, + allowAccessRequestManagement: true, + publisherDetails: { name: 'SAIL', memberOf: 'ALLIANCE' }, + uses5Safes: true, + mdcFolderId: 'c4f50de0-2188-426b-a6cd-6b11a8d6c3cb', + }, +]; diff --git a/src/resources/dataset/utils/__tests__/datasetonboarding.util.test.js b/src/resources/dataset/utils/__tests__/datasetonboarding.util.test.js new file mode 100644 index 00000000..84b04da9 --- /dev/null +++ b/src/resources/dataset/utils/__tests__/datasetonboarding.util.test.js @@ -0,0 +1,66 @@ +import dbHandler from '../../../../config/in-memory-db'; +import datasetonboardingUtil from '../datasetonboarding.util'; +import { datasetQuestionAnswersMocks, datasetv2ObjectMock, publisherDetailsMock } from '../__mocks__/datasetobjects'; + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ publishers: publisherDetailsMock }); +}); + +afterAll(async () => await dbHandler.closeDatabase()); + +describe('Dataset onboarding utility', () => { + describe('buildv2Object', () => { + it('Should return a correctly formatted V2 object when supplied with questionAnswers', async () => { + let datasetv2Object = await datasetonboardingUtil.buildv2Object({ + questionAnswers: datasetQuestionAnswersMocks, + datasetVersion: '2.0.0', + datasetv2: { + summary: { + publisher: { + identifier: '5f3f98068af2ef61552e1d75', + }, + }, + }, + }); + + delete datasetv2Object.issued; + delete datasetv2Object.modified; + + expect(datasetv2Object).toStrictEqual(datasetv2ObjectMock); + }); + }); + + describe('datasetv2ObjectComparison', () => { + it('Should return a correctly formatted diff array', async () => { + let datasetv2DiffObject = await datasetonboardingUtil.datasetv2ObjectComparison( + { + summary: { title: 'Title 2' }, + provenance: { temporal: { updated: 'ONCE WEEKLY', updatedDates: ['1/1/1'] } }, + observations: [ + { observedNode: 'Obs2', observationDate: '3/3/3', measuredValue: '', disambiguatingDescription: '', measuredProperty: '' }, + { observedNode: 'Obs3', observationDate: '4/4/4', measuredValue: '', disambiguatingDescription: '', measuredProperty: '' }, + ], + }, + { + summary: { title: 'Title 1' }, + provenance: { temporal: { updated: 'TWICE WEEKLY', updatedDates: ['1/1/1', '2/2/2'] } }, + observations: [ + { observedNode: 'Obs1', observationDate: '3/3/3', measuredValue: '', disambiguatingDescription: '', measuredProperty: '' }, + ], + } + ); + + const diffArray = [ + { 'summary/title': { updatedAnswer: 'Title 2', previousAnswer: 'Title 1' } }, + { 'provenance/temporal/updated': { updatedAnswer: 'ONCE WEEKLY', previousAnswer: 'TWICE WEEKLY' } }, + { 'provenance/temporal/updatedDates': { updatedAnswer: '1/1/1', previousAnswer: '1/1/1, 2/2/2' } }, + { 'observations/1/observedNode': { updatedAnswer: 'Obs2', previousAnswer: 'Obs1' } }, + { 'observations/2/observedNode': { updatedAnswer: 'Obs3', previousAnswer: '' } }, + { 'observations/2/observationDate': { updatedAnswer: '4/4/4', previousAnswer: '' } }, + ]; + + expect(datasetv2DiffObject).toStrictEqual(diffArray); + }); + }); +}); diff --git a/src/resources/dataset/utils/datasetonboarding.util.js b/src/resources/dataset/utils/datasetonboarding.util.js index 296a727c..b67c11a7 100644 --- a/src/resources/dataset/utils/datasetonboarding.util.js +++ b/src/resources/dataset/utils/datasetonboarding.util.js @@ -4,13 +4,14 @@ import { PublisherModel } from '../../publisher/publisher.model'; import { UserModel } from '../../user/user.model'; import notificationBuilder from '../../utilities/notificationBuilder'; import emailGenerator from '../../utilities/emailGenerator.util'; -import { isEmpty, isNil, cloneDeep, isString, map, groupBy, orderBy } from 'lodash'; +import _, { isEmpty, isNil, cloneDeep, isString, map, groupBy, orderBy } from 'lodash'; import constants from '../../utilities/constants.util'; import moment from 'moment'; import randomstring from 'randomstring'; import Ajv from 'ajv'; import addFormats from 'ajv-formats'; var fs = require('fs'); +import { flatten } from 'flat'; /** * Checks to see if the user has the correct permissions to access the dataset @@ -1099,6 +1100,189 @@ const buildBulkUploadObject = async arrayOfDraftDatasets => { } }; +/** + * Build the datasetV2 object from dataset.questionAnswers + * + * @param {Object} dataset [dataset.questionAnswers object] + * + * @return {Object} [return datasetv2 object] + */ +const buildv2Object = async (dataset, newDatasetVersionId = '') => { + const publisherData = await PublisherModel.find({ _id: dataset.datasetv2.summary.publisher.identifier }).lean(); + const questionAnswers = dataset.questionAnswers; + const observations = await buildObservations(dataset.questionAnswers); + + let datasetv2Object = { + identifier: newDatasetVersionId || '', + version: dataset.datasetVersion, + issued: moment(Date.now()).format('DD/MM/YYYY'), + modified: moment(Date.now()).format('DD/MM/YYYY'), + revisions: [], + summary: { + title: questionAnswers['properties/summary/title'] || '', + abstract: questionAnswers['properties/summary/abstract'] || '', + publisher: { + identifier: publisherData[0]._id.toString(), + name: publisherData[0].publisherDetails.name, + logo: publisherData[0].publisherDetails.logo || '', + description: publisherData[0].publisherDetails.description || '', + contactPoint: publisherData[0].publisherDetails.contactPoint || [], + memberOf: publisherData[0].publisherDetails.memberOf, + accessRights: publisherData[0].publisherDetails.accessRights || [], + deliveryLeadTime: publisherData[0].publisherDetails.deliveryLeadTime || '', + accessService: publisherData[0].publisherDetails.accessService || '', + accessRequestCost: publisherData[0].publisherDetails.accessRequestCost || '', + dataUseLimitation: publisherData[0].publisherDetails.dataUseLimitation || [], + dataUseRequirements: publisherData[0].publisherDetails.dataUseRequirements || [], + }, + contactPoint: questionAnswers['properties/summary/contactPoint'] || '', + keywords: questionAnswers['properties/summary/keywords'] || [], + alternateIdentifiers: questionAnswers['properties/summary/alternateIdentifiers'] || [], + doiName: questionAnswers['properties/summary/doiName'] || '', + }, + documentation: { + description: questionAnswers['properties/documentation/description'] || '', + associatedMedia: questionAnswers['properties/documentation/associatedMedia'] || [], + isPartOf: questionAnswers['properties/documentation/isPartOf'] || [], + }, + coverage: { + spatial: questionAnswers['properties/coverage/spatial'] || [], + typicalAgeRange: questionAnswers['properties/coverage/typicalAgeRange'] || '', + physicalSampleAvailability: questionAnswers['properties/coverage/physicalSampleAvailability'] || [], + followup: questionAnswers['properties/coverage/followup'] || '', + pathway: questionAnswers['properties/coverage/pathway'] || '', + }, + provenance: { + origin: { + purpose: questionAnswers['properties/provenance/origin/purpose'] || [], + source: questionAnswers['properties/provenance/origin/source'] || [], + collectionSituation: questionAnswers['properties/provenance/origin/collectionSituation'] || [], + }, + temporal: { + accrualPeriodicity: questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', + distributionReleaseDate: questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', + startDate: questionAnswers['properties/provenance/temporal/startDate'] || '', + endDate: questionAnswers['properties/provenance/temporal/endDate'] || '', + timeLag: questionAnswers['properties/provenance/temporal/timeLag'] || '', + }, + }, + accessibility: { + usage: { + dataUseLimitation: questionAnswers['properties/accessibility/usage/dataUseLimitation'] || [], + dataUseRequirements: questionAnswers['properties/accessibility/usage/dataUseRequirements'] || [], + resourceCreator: questionAnswers['properties/accessibility/usage/resourceCreator'] || [], + investigations: questionAnswers['properties/accessibility/usage/investigations'] || [], + isReferencedBy: questionAnswers['properties/accessibility/usage/isReferencedBy'] || [], + }, + access: { + accessRights: questionAnswers['properties/accessibility/access/accessRights'] || [], + accessService: questionAnswers['properties/accessibility/access/accessService'] || '', + accessRequestCost: questionAnswers['properties/accessibility/access/accessRequestCost'] || [], + deliveryLeadTime: questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', + jurisdiction: questionAnswers['properties/accessibility/access/jurisdiction'] || [], + dataProcessor: questionAnswers['properties/accessibility/access/dataProcessor'] || '', + dataController: questionAnswers['properties/accessibility/access/dataController'] || '', + }, + formatAndStandards: { + vocabularyEncodingScheme: questionAnswers['properties/accessibility/formatAndStandards/vocabularyEncodingScheme'] || [], + conformsTo: questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] || [], + language: questionAnswers['properties/accessibility/formatAndStandards/language'] || [], + format: questionAnswers['properties/accessibility/formatAndStandards/format'] || [], + }, + }, + enrichmentAndLinkage: { + qualifiedRelation: questionAnswers['properties/enrichmentAndLinkage/qualifiedRelation'] || [], + derivation: questionAnswers['properties/enrichmentAndLinkage/derivation'] || [], + tools: questionAnswers['properties/enrichmentAndLinkage/tools'] || [], + }, + observations: observations, + }; + return datasetv2Object; +}; + +const datasetv2ObjectComparison = (updatedJSON, previousJSON) => { + updatedJSON = flatten(updatedJSON, { safe: true, delimiter: '/' }); + previousJSON = flatten(previousJSON, { safe: true, delimiter: '/' }); + + // Remove fields which change automatically between datasets + const unusedKeys = ['identifier', 'version', 'issued', 'modified']; + unusedKeys.forEach(key => { + delete updatedJSON[key]; + delete previousJSON[key]; + }); + + let result = []; + const datasetv2Keys = [...new Set(Object.keys(updatedJSON).concat(Object.keys(previousJSON)))]; + datasetv2Keys.forEach(key => { + if ( + previousJSON[key] !== updatedJSON[key] && + !_.isArray(updatedJSON[key], previousJSON[key]) && + !_.isObject(updatedJSON[key], previousJSON[key]) && + key !== 'observations' + ) { + let arrayObject = {}; + arrayObject[key] = { previousAnswer: previousJSON[key], updatedAnswer: updatedJSON[key] }; + result.push(arrayObject); + } + if ((_.isArray(previousJSON[key]) || _.isArray(updatedJSON[key])) && key !== 'observations') { + let previousAnswer = _.isArray(previousJSON[key]) ? previousJSON[key].join(', ') : previousJSON[key]; + let updatedAnswer = _.isArray(updatedJSON[key]) ? updatedJSON[key].join(', ') : updatedJSON[key]; + if (!_.isEqual(updatedAnswer, previousAnswer)) { + let arrayObject = {}; + arrayObject[key] = { + previousAnswer: previousAnswer, + updatedAnswer: updatedAnswer, + }; + result.push(arrayObject); + } + } + }); + + // Compute diff of 'observations' separately, which can be an array of objects + const observationKeys = ['observedNode', 'measuredValue', 'disambiguatingDescription', 'observationDate', 'measuredProperty']; + + const maxObservationLength = Math.max(previousJSON['observations'].length, updatedJSON['observations'].length); + let resultObservations = {}; + for (let i = 0; i < maxObservationLength; i++) { + let observationNumberKey = 'observations/' + (i + 1).toString() + '/'; + resultObservations[observationNumberKey] = {}; + if (updatedJSON['observations'][i] === undefined) { + updatedJSON['observations'][i] = {}; + observationKeys.forEach(key => { + updatedJSON['observations'][i][key] = ''; + }); + } + + if (previousJSON['observations'][i] === undefined) { + previousJSON['observations'][i] = {}; + observationKeys.forEach(key => { + previousJSON['observations'][i][key] = ''; + }); + } + + observationKeys.forEach(key => { + if (updatedJSON['observations'][i][key] === undefined) updatedJSON['observations'][i][key] = ''; + if (previousJSON['observations'][i][key] === undefined) previousJSON['observations'][i][key] = ''; + if (!_.isEqual(updatedJSON['observations'][i][key], previousJSON['observations'][i][key])) { + resultObservations[observationNumberKey + key] = { + previousAnswer: previousJSON['observations'][i][key], + updatedAnswer: updatedJSON['observations'][i][key], + }; + } + }); + if (_.isEmpty(resultObservations[observationNumberKey])) delete resultObservations[observationNumberKey]; + } + + // Append observation diff to previous result array + Object.keys(resultObservations).forEach(key => { + let arrayObject = {}; + arrayObject[key] = resultObservations[key]; + result.push(arrayObject); + }); + + return result; +}; + export default { getUserPermissionsForDataset, populateQuestionAnswers, @@ -1112,4 +1296,6 @@ export default { buildMetadataQuality, createNotifications, buildBulkUploadObject, + buildv2Object, + datasetv2ObjectComparison, }; diff --git a/src/resources/dataset/v1/dataset.route.js b/src/resources/dataset/v1/dataset.route.js index 16fd6086..63a09df5 100644 --- a/src/resources/dataset/v1/dataset.route.js +++ b/src/resources/dataset/v1/dataset.route.js @@ -5,6 +5,7 @@ import { getAllTools } from '../../tool/data.repository'; import { isEmpty, isNil } from 'lodash'; import escape from 'escape-html'; import { Course } from '../../course/course.model'; +import { DataUseRegister } from '../../dataUseRegister/dataUseRegister.model'; import { filtersService } from '../../filters/dependency'; import * as Sentry from '@sentry/node'; const router = express.Router(); @@ -16,6 +17,8 @@ const datasetLimiter = rateLimit({ message: 'Too many calls have been made to this api from this IP, please try again after an hour', }); +const readEnv = process.env.ENV || 'prod'; + router.post('/', async (req, res) => { try { // Check to see if header is in json format @@ -43,7 +46,9 @@ router.post('/', async (req, res) => { // Return response indicating job has started (do not await async import) return res.status(200).json({ success: true, message: 'Caching started' }); } catch (err) { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(err.message); return res.status(500).json({ success: false, message: 'Caching failed' }); } @@ -73,7 +78,9 @@ router.post('/updateServices', async (req, res) => { return res.status(200).json({ success: true, message: 'Services Update started' }); } catch (err) { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(err.message); return res.status(500).json({ success: false, message: 'Services update failed' }); } @@ -175,7 +182,23 @@ router.get('/:datasetID', async (req, res) => { activeflag: 'active', }); - relatedData = [...relatedData, ...relatedDataFromCourses]; + let relatedDataFromDatauses = await DataUseRegister.find({ + relatedObjects: { + $elemMatch: { + $or: [ + { + objectId: { $in: dataVersionsArray }, + }, + { + pid: pid, + }, + ], + }, + }, + activeflag: 'active', + }); + + relatedData = [...relatedData, ...relatedDataFromCourses, ...relatedDataFromDatauses]; relatedData.forEach(dat => { dat.relatedObjects.forEach(relatedObject => { diff --git a/src/resources/dataset/v1/dataset.service.js b/src/resources/dataset/v1/dataset.service.js index 30de3722..3d28c2be 100644 --- a/src/resources/dataset/v1/dataset.service.js +++ b/src/resources/dataset/v1/dataset.service.js @@ -16,6 +16,8 @@ let metadataQualityList = [], datasetsMDCIDs = [], counter = 0; +const readEnv = process.env.ENV || 'prod'; + export async function updateExternalDatasetServices(services) { for (let service of services) { if (service === 'phenotype') { @@ -24,12 +26,14 @@ export async function updateExternalDatasetServices(services) { timeout: 10000, }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata quality value ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata quality value ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get metadata quality value ' + err.message); }); @@ -41,12 +45,14 @@ export async function updateExternalDatasetServices(services) { const dataUtilityList = await axios .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json', { timeout: 10000 }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get data utility ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get data utility ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get data utility ' + err.message); }); @@ -193,12 +199,14 @@ async function importMetadataFromCatalogue(baseUri, dataModelExportRoute, source await logoutCatalogue(baseUri); await loginCatalogue(baseUri, credentials); await loadDatasets(baseUri, dataModelExportRoute, datasetsMDCList.items, datasetsMDCList.count, source, limit).catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: `Unable to complete the metadata import for ${source} ${err.message}`, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: `Unable to complete the metadata import for ${source} ${err.message}`, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error(`Unable to complete the metadata import for ${source} ${err.message}`); }); await logoutCatalogue(baseUri); @@ -232,12 +240,14 @@ async function loadDatasets(baseUri, dataModelExportRoute, datasetsToImport, dat timeout: 60000, }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get dataset JSON ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get dataset JSON ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get metadata JSON ' + err.message); }); @@ -249,22 +259,26 @@ async function loadDatasets(baseUri, dataModelExportRoute, datasetsToImport, dat timeout: 10000, }) .catch(err => { + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata schema ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } + console.error('Unable to get metadata schema ' + err.message); + }); + + const versionLinksCall = axios.get(`${baseUri}/api/catalogueItems/${datasetMDC.id}/semanticLinks`, { timeout: 10000 }).catch(err => { + if (readEnv === 'test' || readEnv === 'prod') { Sentry.addBreadcrumb({ category: 'Caching', - message: 'Unable to get metadata schema ' + err.message, + message: 'Unable to get version links ' + err.message, level: Sentry.Severity.Error, }); Sentry.captureException(err); - console.error('Unable to get metadata schema ' + err.message); - }); - - const versionLinksCall = axios.get(`${baseUri}/api/catalogueItems/${datasetMDC.id}/semanticLinks`, { timeout: 10000 }).catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get version links ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + } console.error('Unable to get version links ' + err.message); }); @@ -491,12 +505,14 @@ async function getDataUtilityExport() { return await axios .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json', { timeout: 10000 }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get data utility ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get data utility ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get data utility ' + err.message); }); } @@ -511,12 +527,14 @@ async function getPhenotypesExport() { return await axios .get('https://raw.githubusercontent.com/spiros/hdr-caliber-phenome-portal/master/_data/dataset2phenotypes.json', { timeout: 10000 }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata quality value ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata quality value ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get metadata quality value ' + err.message); }); } @@ -531,12 +549,14 @@ async function getMetadataQualityExport() { return await axios .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/metadata_quality.json', { timeout: 10000 }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata quality value ' + err.message, - level: Sentry.Severity.Error, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata quality value ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + } console.error('Unable to get metadata quality value ' + err.message); }); } @@ -549,12 +569,14 @@ async function getDataModels(baseUri) { resolve(response.data); }) .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'The caching run has failed because it was unable to get a count from the MDC', - level: Sentry.Severity.Fatal, - }); - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'The caching run has failed because it was unable to get a count from the MDC', + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(err); + } reject(err); }); }).catch(() => { @@ -567,14 +589,16 @@ async function checkDifferentialValid(incomingMetadataCount, source, override) { const datasetsHDRCount = await Data.countDocuments({ type: 'dataset', activeflag: 'active', source }); if ((incomingMetadataCount / datasetsHDRCount) * 100 < 90 && !override) { - Sentry.addBreadcrumb({ - category: 'Caching', - message: `The caching run has failed because the counts from the MDC (${incomingMetadataCount}) where ${ - 100 - (incomingMetadataCount / datasetsHDRCount) * 100 - }% lower than the number stored in the DB (${datasetsHDRCount})`, - level: Sentry.Severity.Fatal, - }); - Sentry.captureException(); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Caching', + message: `The caching run has failed because the counts from the MDC (${incomingMetadataCount}) where ${ + 100 - (incomingMetadataCount / datasetsHDRCount) * 100 + }% lower than the number stored in the DB (${datasetsHDRCount})`, + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(); + } return false; } return true; diff --git a/src/resources/dataset/v2/dataset.route.js b/src/resources/dataset/v2/dataset.route.js index b0df0f01..a6f90244 100644 --- a/src/resources/dataset/v2/dataset.route.js +++ b/src/resources/dataset/v2/dataset.route.js @@ -1,6 +1,8 @@ import express from 'express'; import DatasetController from '../dataset.controller'; import { datasetService } from '../dependency'; +import { resultLimit } from '../../../config/middleware'; +import { checkIDMiddleware } from './../../../middlewares'; const router = express.Router(); const datasetController = new DatasetController(datasetService); @@ -8,11 +10,11 @@ const datasetController = new DatasetController(datasetService); // @route GET /api/v2/datasets/id // @desc Returns a dataset based on dataset ID provided // @access Public -router.get('/:id', (req, res) => datasetController.getDataset(req, res)); +router.get('/:id', checkIDMiddleware, (req, res) => datasetController.getDataset(req, res)); // @route GET /api/v2/datasets // @desc Returns a collection of datasets based on supplied query parameters // @access Public -router.get('/', (req, res) => datasetController.getDatasets(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => datasetController.getDatasets(req, res)); module.exports = router; diff --git a/src/resources/filters/dependency.js b/src/resources/filters/dependency.js index 6244ad43..050bb6ab 100644 --- a/src/resources/filters/dependency.js +++ b/src/resources/filters/dependency.js @@ -6,6 +6,7 @@ import ProjectRepository from '../project/project.repository'; import PaperRepository from '../paper/paper.repository'; import CollectionsRepository from '../collections/v2/collection.repository'; import CourseRepository from '../course/v2/course.repository'; +import DataUseRegisterRepository from '../dataUseRegister/dataUseRegister.repository'; const datasetRepository = new DatasetRepository(); const toolRepository = new ToolRepository(); @@ -13,6 +14,7 @@ const projectRepository = new ProjectRepository(); const paperRepository = new PaperRepository(); const collectionsRepository = new CollectionsRepository(); const courseRepository = new CourseRepository(); +const dataUseRegisterRepository = new DataUseRegisterRepository(); export const filtersRepository = new FiltersRepository(); export const filtersService = new FiltersService( @@ -22,5 +24,6 @@ export const filtersService = new FiltersService( projectRepository, paperRepository, collectionsRepository, - courseRepository + courseRepository, + dataUseRegisterRepository ); diff --git a/src/resources/filters/filters.mapper.js b/src/resources/filters/filters.mapper.js index 3f245d93..478672ff 100644 --- a/src/resources/filters/filters.mapper.js +++ b/src/resources/filters/filters.mapper.js @@ -854,3 +854,81 @@ export const courseFilters = [ beta: false, }, ]; + +export const dataUseRegisterFilters = [ + { + id: 1, + label: 'Data custodian', + key: 'publisher', + alias: 'datausedatacustodian', + dataPath: 'publisherDetails.name', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + beta: false, + }, + { + id: 2, + label: 'Lead applicant organisation', + key: 'organisationName', + alias: 'datauseorganisationname', + dataPath: 'organisationName', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + beta: false, + }, + { + id: 3, + label: 'Organisation sector', + key: 'organisationSector', + alias: 'datauserganisationsector', + dataPath: 'organisationSector', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + beta: false, + }, + { + id: 4, + label: 'Funders/Sponsor', + key: 'fundersAndSponsors', + alias: 'datausefundersandsponsors', + dataPath: 'fundersAndSponsors', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + beta: false, + }, + { + id: 5, + label: 'Keywords', + key: 'keywords', + alias: 'datausekeywords', + dataPath: 'keywords', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + beta: false, + }, +]; diff --git a/src/resources/filters/filters.service.js b/src/resources/filters/filters.service.js index 51152334..101061e0 100644 --- a/src/resources/filters/filters.service.js +++ b/src/resources/filters/filters.service.js @@ -10,7 +10,8 @@ export default class FiltersService { projectRepository, paperRepository, collectionRepository, - courseRepository + courseRepository, + DataUseRegisterRepository ) { this.filtersRepository = filtersRepository; this.datasetRepository = datasetRepository; @@ -19,6 +20,7 @@ export default class FiltersService { this.paperRepository = paperRepository; this.collectionRepository = collectionRepository; this.courseRepository = courseRepository; + this.DataUseRegisterRepository = DataUseRegisterRepository; } async getFilters(id, query = {}) { @@ -143,9 +145,13 @@ export default class FiltersService { entities = await this.collectionRepository.getCollections({ ...query, fields }, { aggregate: true }); break; case 'course': - fields = `courseOptions.startDate, provider,location,courseOptions.studyMode,award,entries.level,domains,keywords,competencyFramework,nationalPriority`; + fields = `courseOptions.startDate,provider,location,courseOptions.studyMode,award,entries.level,domains,keywords,competencyFramework,nationalPriority`; entities = await this.courseRepository.getCourses({ ...query, fields }, { lean: true, dateFormat: 'DD MMM YYYY' }); break; + case 'dataUseRegister': + fields = `organisationName,organisationSector,keywords,publisherDetails.name,fundersAndSponsors`; + entities = await this.DataUseRegisterRepository.getDataUseRegisters({ ...query, fields }, { aggregate: true }); + break; } // 3. Loop over each entity entities.forEach(entity => { @@ -176,12 +182,12 @@ export default class FiltersService { Object.keys(filters).forEach(filterKey => { // 9. Set filter values to title case (all except publisher) / upper case (publisher) and remove white space if (filterKey === 'publisher') { - filters[filterKey] = filters[filterKey].map(value => value.includes(">") - ? value.split(" > ")[1].toString().toUpperCase().trim() - : value.toString().toUpperCase().trim()); + filters[filterKey] = filters[filterKey].map(value => + value.includes('>') ? value.split(' > ')[1].toString().toUpperCase().trim() : value.toString().toUpperCase().trim() + ); } else { filters[filterKey] = filters[filterKey].map(value => helper.toTitleCase(value.toString().trim())); - }; + } // 10. Distinct filter values const distinctFilter = uniq(filters[filterKey]); // 11. Sort filter values and update final object @@ -304,6 +310,20 @@ export default class FiltersService { }; break; } + case 'dataUseRegister': { + // 2. Extract all properties used for filtering + let { keywords = [], organisationName = '', organisationSector = '', publisherDetails = '', fundersAndSponsors = [] } = entity; + + // 3. Create flattened filter props object + filterValues = { + keywords, + organisationName, + organisationSector, + publisher: publisherDetails[0].name, + fundersAndSponsors, + }; + break; + } } // 4. Return filter values return filterValues; diff --git a/src/resources/linkchecker/linkchecker.router.js b/src/resources/linkchecker/linkchecker.router.js index 7e0ba433..2cb2f4e9 100644 --- a/src/resources/linkchecker/linkchecker.router.js +++ b/src/resources/linkchecker/linkchecker.router.js @@ -8,6 +8,7 @@ import _ from 'lodash'; const sgMail = require('@sendgrid/mail'); const hdrukEmail = `enquiry@healthdatagateway.org`; +const readEnv = process.env.ENV || 'prod'; const axios = require('axios'); const router = express.Router(); @@ -111,7 +112,7 @@ router.post('/', async (req, res) => { }; await sgMail.send(msg, false, err => { - if (err) { + if (err && (readEnv === 'test' || readEnv === 'prod')) { Sentry.addBreadcrumb({ category: 'SendGrid', message: 'Sending email failed', diff --git a/src/resources/message/message.controller.js b/src/resources/message/message.controller.js index bf9db3d9..f45a32c3 100644 --- a/src/resources/message/message.controller.js +++ b/src/resources/message/message.controller.js @@ -163,7 +163,7 @@ module.exports = { // 20. Update activity log if there is a linked DAR if (topicObj.linkedDataAccessApplication) { - activityLogService.logActivity(constants.activityLogEvents.PRESUBMISSION_MESSAGE, { + activityLogService.logActivity(constants.activityLogEvents.data_access_request.PRESUBMISSION_MESSAGE, { messages: [messageObj], applicationId: topicObj.linkedDataAccessApplication, publisher: publisher.name, diff --git a/src/resources/paper/paper.repository.js b/src/resources/paper/paper.repository.js index 346529b1..8a108366 100644 --- a/src/resources/paper/paper.repository.js +++ b/src/resources/paper/paper.repository.js @@ -17,4 +17,9 @@ export default class PaperRepository extends Repository { const options = { lean: true }; return this.find(query, options); } + + async getPapersByIds(paperIds) { + const options = { lean: true }; + return this.find({ id: { $in: paperIds } }, options); + } } diff --git a/src/resources/paper/paper.service.js b/src/resources/paper/paper.service.js index 33d2cce5..6193adaf 100644 --- a/src/resources/paper/paper.service.js +++ b/src/resources/paper/paper.service.js @@ -10,4 +10,8 @@ export default class PaperService { getPapers(query = {}) { return this.paperRepository.getPapers(query); } + + getPapersByIds(paperIds) { + return this.paperRepository.getPapersByIds(paperIds); + } } diff --git a/src/resources/paper/v2/paper.route.js b/src/resources/paper/v2/paper.route.js index 40c14ab0..ef60cfbf 100644 --- a/src/resources/paper/v2/paper.route.js +++ b/src/resources/paper/v2/paper.route.js @@ -1,6 +1,7 @@ import express from 'express'; import PaperController from '../paper.controller'; import { paperService } from '../dependency'; +import { resultLimit } from '../../../config/middleware'; const router = express.Router(); const paperController = new PaperController(paperService); @@ -13,6 +14,6 @@ router.get('/:id', (req, res) => paperController.getPaper(req, res)); // @route GET /api/v2/papers // @desc Returns a collection of papers based on supplied query parameters // @access Public -router.get('/', (req, res) => paperController.getPapers(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => paperController.getPapers(req, res)); module.exports = router; diff --git a/src/resources/project/v2/project.route.js b/src/resources/project/v2/project.route.js index fed79dcb..a96a8ede 100644 --- a/src/resources/project/v2/project.route.js +++ b/src/resources/project/v2/project.route.js @@ -1,6 +1,7 @@ import express from 'express'; import ProjectController from '../project.controller'; import { projectService } from '../dependency'; +import { resultLimit } from '../../../config/middleware'; const router = express.Router(); const projectController = new ProjectController(projectService); @@ -13,6 +14,6 @@ router.get('/:id', (req, res) => projectController.getProject(req, res)); // @route GET /api/v2/projects // @desc Returns a collection of projects based on supplied query parameters // @access Public -router.get('/', (req, res) => projectController.getProjects(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => projectController.getProjects(req, res)); module.exports = router; diff --git a/src/resources/relatedobjects/relatedobjects.route.js b/src/resources/relatedobjects/relatedobjects.route.js index f9c51f96..1bb69004 100644 --- a/src/resources/relatedobjects/relatedobjects.route.js +++ b/src/resources/relatedobjects/relatedobjects.route.js @@ -2,6 +2,7 @@ import express from 'express'; import _ from 'lodash'; import { Data } from '../tool/data.model'; import { Course } from '../course/course.model'; +import { DataUseRegister } from '../dataUseRegister/dataUseRegister.model'; const router = express.Router(); @@ -73,7 +74,7 @@ router.get('/:id', async (req, res) => { } }); -router.get('/course/:id', async (req, res) => { +/* router.get('/course/:id', async (req, res) => { var id = req.params.id; var q = Course.aggregate([ @@ -84,6 +85,96 @@ router.get('/course/:id', async (req, res) => { if (err) return res.json({ success: false, error: err }); return res.json({ success: true, data: data }); }); +}); */ + +router.get('/:type/:id', async (req, res) => { + let { id, type } = req.params; + + if (type === 'course') { + let q = Course.aggregate([{ $match: { $and: [{ id: parseInt(id) }] } }]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + } else if (type === 'dataUseRegister') { + let q = DataUseRegister.aggregate([ + { $match: { $and: [{ id: parseInt(id) }] } }, + { + $lookup: { + from: 'publishers', + localField: 'publisher', + foreignField: '_id', + as: 'publisherDetails', + }, + }, + { + $lookup: { + from: 'tools', + let: { + listOfGatewayDatasets: '$gatewayDatasets', + }, + pipeline: [ + { + $match: { + $expr: { + $and: [ + { $in: ['$pid', '$$listOfGatewayDatasets'] }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $project: { pid: 1, name: 1 } }, + ], + as: 'gatewayDatasetsInfo', + }, + }, + { + $addFields: { + publisherInfo: { name: '$publisherDetails.name' }, + }, + }, + ]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + } else { + if (!isNaN(id)) { + let q = Data.aggregate([ + { $match: { $and: [{ id: parseInt(id) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + } else { + try { + // Get related dataset + let dataVersion = await Data.findOne({ datasetid: id }); + + if (!_.isNil(dataVersion)) { + id = dataVersion.pid; + } + + let data = await Data.findOne({ pid: id, activeflag: 'active' }); + + if (_.isNil(data)) { + data = await Data.findOne({ pid: id, activeflag: 'archive' }).sort({ createdAt: -1 }); + if (_.isNil(data)) { + data = dataVersion; + } + } + + return res.json({ success: true, data: [data] }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } + } }); module.exports = router; diff --git a/src/resources/search/filter.route.js b/src/resources/search/filter.route.js index b8c97f92..9e4cb71c 100644 --- a/src/resources/search/filter.route.js +++ b/src/resources/search/filter.route.js @@ -1,84 +1,48 @@ import express from 'express'; import { getObjectFilters, getFilter } from './search.repository'; import { filtersService } from '../filters/dependency'; -import { isEqual, lowerCase, isEmpty } from 'lodash'; +import { isEqual, isEmpty } from 'lodash'; const router = express.Router(); +const typeMapper = { + Datasets: 'dataset', + Tools: 'tool', + Projects: 'project', + Papers: 'paper', + People: 'person', + Courses: 'course', + Collections: 'collection', + Datauses: 'dataUseRegister', +}; + // @route GET api/v1/search/filter // @desc GET Get filters // @access Public router.get('/', async (req, res) => { let searchString = req.query.search || ''; //If blank then return all let tab = req.query.tab || ''; //If blank then return all - if (tab === '') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); - - await Promise.all([ - getFilter(searchString, 'tool', 'tags.topic', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'tags.features', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'categories.category', false, getObjectFilters(searchQuery, req, 'tool')), - - getFilter(searchString, 'project', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'project', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'project', 'categories.category', false, getObjectFilters(searchQuery, req, 'project')), - - getFilter(searchString, 'paper', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'paper', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')), - ]).then(values => { - return res.json({ - success: true, - allFilters: { - toolTopicFilter: values[0][0], - toolFeatureFilter: values[1][0], - toolLanguageFilter: values[2][0], - toolCategoryFilter: values[3][0], - - projectTopicFilter: values[4][0], - projectFeatureFilter: values[5][0], - projectCategoryFilter: values[6][0], - - paperTopicFilter: values[7][0], - paperFeatureFilter: values[8][0], - }, - filterOptions: { - toolTopicsFilterOptions: values[0][1], - featuresFilterOptions: values[1][1], - programmingLanguageFilterOptions: values[2][1], - toolCategoriesFilterOptions: values[3][1], - - projectTopicsFilterOptions: values[4][1], - projectFeaturesFilterOptions: values[5][1], - projectCategoriesFilterOptions: values[6][1], - - paperTopicsFilterOptions: values[7][1], - paperFeaturesFilterOptions: values[8][1], - }, - }); - }); - } else { - const type = !isEmpty(tab) && typeof tab === 'string' ? lowerCase(tab.substring(0, tab.length - 1)) : ''; - let defaultQuery = { $and: [{ activeflag: 'active' }] }; - if (type === 'collection') { - defaultQuery['$and'].push({ publicflag: true }); - } else if (type === 'course') { - defaultQuery['$and'].push({ - $or: [{ 'courseOptions.startDate': { $gte: new Date(Date.now()) } }, { 'courseOptions.flexibleDates': true }], - }); - } - - if (searchString.length > 0) defaultQuery['$and'].push({ $text: { $search: searchString } }); - const filterQuery = getObjectFilters(defaultQuery, req, type); - const useCachedFilters = isEqual(defaultQuery, filterQuery) && searchString.length === 0; - - const filters = await filtersService.buildFilters(type, filterQuery, useCachedFilters); - return res.json({ - success: true, - filters, + + const type = !isEmpty(tab) && typeof tab === 'string' ? typeMapper[`${tab}`] : ''; + + let defaultQuery = { $and: [{ activeflag: 'active' }] }; + if (type === 'collection') { + defaultQuery['$and'].push({ publicflag: true }); + } else if (type === 'course') { + defaultQuery['$and'].push({ + $or: [{ 'courseOptions.startDate': { $gte: new Date(Date.now()) } }, { 'courseOptions.flexibleDates': true }], }); } + + if (searchString.length > 0) defaultQuery['$and'].push({ $text: { $search: searchString } }); + const filterQuery = getObjectFilters(defaultQuery, req, type); + const useCachedFilters = isEqual(defaultQuery, filterQuery) && searchString.length === 0; + + const filters = await filtersService.buildFilters(type, filterQuery, useCachedFilters); + return res.json({ + success: true, + filters, + }); }); // @route GET api/v1/search/filter/topic/:type diff --git a/src/resources/search/record.search.model.js b/src/resources/search/record.search.model.js index c2d62137..a166a32d 100644 --- a/src/resources/search/record.search.model.js +++ b/src/resources/search/record.search.model.js @@ -9,6 +9,8 @@ const RecordSearchSchema = new Schema( project: Number, paper: Number, person: Number, + datause: Number, + course: Number, }, datesearched: Date, }, diff --git a/src/resources/search/search.repository.js b/src/resources/search/search.repository.js index 54448236..cff33322 100644 --- a/src/resources/search/search.repository.js +++ b/src/resources/search/search.repository.js @@ -1,8 +1,17 @@ import { Data } from '../tool/data.model'; import { Course } from '../course/course.model'; import { Collections } from '../collections/collections.model'; +import { DataUseRegister } from '../dataUseRegister/dataUseRegister.model'; import { findNodeInTree } from '../filters/utils/filters.util'; -import { datasetFilters, toolFilters, projectFilters, paperFilters, collectionFilters, courseFilters } from '../filters/filters.mapper'; +import { + datasetFilters, + toolFilters, + projectFilters, + paperFilters, + collectionFilters, + courseFilters, + dataUseRegisterFilters, +} from '../filters/filters.mapper'; import _ from 'lodash'; import moment from 'moment'; import helperUtil from '../utilities/helper.util'; @@ -13,6 +22,8 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, collection = Course; } else if (type === 'collection') { collection = Collections; + } else if (type === 'dataUseRegister') { + collection = DataUseRegister; } // ie copy deep object let newSearchQuery = _.cloneDeep(searchQuery); @@ -65,13 +76,12 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, }, ]; } else if (type === 'collection') { - - const searchTerm = newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text'])) || {}; + const searchTerm = (newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text']))) || {}; - if(searchTerm) { + if (searchTerm) { newSearchQuery['$and'] = newSearchQuery['$and'].filter(exp => !exp['$text']); } - + queryObject = [ { $match: searchTerm }, { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, @@ -119,6 +129,73 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, }, }, ]; + } else if (type === 'dataUseRegister') { + const searchTerm = (newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text']))) || {}; + + if (searchTerm) { + newSearchQuery['$and'] = newSearchQuery['$and'].filter(exp => !exp['$text']); + } + + queryObject = [ + { $match: searchTerm }, + { + $lookup: { + from: 'publishers', + localField: 'publisher', + foreignField: '_id', + as: 'publisherDetails', + }, + }, + { + $lookup: { + from: 'tools', + let: { + listOfGatewayDatasets: '$gatewayDatasets', + }, + pipeline: [ + { + $match: { + $expr: { + $and: [ + { $in: ['$pid', '$$listOfGatewayDatasets'] }, + { + $eq: ['$activeflag', 'active'], + }, + ], + }, + }, + }, + { $project: { pid: 1, name: 1 } }, + ], + as: 'gatewayDatasetsInfo', + }, + }, + { + $addFields: { + publisherInfo: { name: '$publisherDetails.name' }, + }, + }, + { $match: newSearchQuery }, + { + $project: { + _id: 0, + id: 1, + projectTitle: 1, + organisationName: 1, + keywords: 1, + datasetTitles: 1, + publisherInfo: 1, + publisherDetails: 1, + gatewayDatasetsInfo: 1, + nonGatewayDatasets: 1, + activeflag: 1, + counter: 1, + type: 1, + latestUpdate: '$lastActivity', + relatedresources: { $cond: { if: { $isArray: '$relatedObjects' }, then: { $size: '$relatedObjects' }, else: 0 } }, + }, + }, + ]; } else if (type === 'dataset') { queryObject = [ { $match: newSearchQuery }, @@ -219,7 +296,7 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, activeflag: 1, counter: 1, - 'datasetfields.metadataquality.quality_score': 1, + 'datasetfields.metadataquality.weighted_quality_score': 1, latestUpdate: '$timestamps.updated', relatedresources: { @@ -288,7 +365,7 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, activeflag: 1, counter: 1, - 'datasetfields.metadataquality.quality_score': 1, + 'datasetfields.metadataquality.weighted_quality_score': 1, latestUpdate: { $cond: { if: { $gte: ['$createdAt', '$updatedon'] }, @@ -315,7 +392,7 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, if (sort === '') { if (type === 'dataset') { - if (searchAll) queryObject.push({ $sort: { 'datasetfields.metadataquality.quality_score': -1, name: 1 } }); + if (searchAll) queryObject.push({ $sort: { 'datasetfields.metadataquality.weighted_quality_score': -1, name: 1 } }); else queryObject.push({ $sort: { score: { $meta: 'textScore' } } }); } else if (type === 'paper') { if (searchAll) queryObject.push({ $sort: { journalYear: -1 } }); @@ -348,8 +425,8 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, else queryObject.push({ $sort: { counter: -1, score: { $meta: 'textScore' } } }); } } else if (sort === 'metadata') { - if (searchAll) queryObject.push({ $sort: { 'datasetfields.metadataquality.quality_score': -1, name: 1 } }); - else queryObject.push({ $sort: { 'datasetfields.metadataquality.quality_score': -1, score: { $meta: 'textScore' } } }); + if (searchAll) queryObject.push({ $sort: { 'datasetfields.metadataquality.weighted_quality_score': -1, name: 1 } }); + else queryObject.push({ $sort: { 'datasetfields.metadataquality.weighted_quality_score': -1, score: { $meta: 'textScore' } } }); } else if (sort === 'startdate') { if (form === 'true' && searchAll) { queryObject.push({ $sort: { myEntity: -1, 'courseOptions.startDate': 1 } }); @@ -374,9 +451,13 @@ export async function getObjectResult(type, searchAll, searchQuery, startIndex, } // Get paged results based on query params - const searchResults = await collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)).catch(err => { - console.log(err); - }); + const searchResults = await collection + .aggregate(queryObject) + .skip(parseInt(startIndex)) + .limit(parseInt(maxResults)) + .catch(err => { + console.log(err); + }); // Return data return { data: searchResults }; } @@ -387,6 +468,8 @@ export function getObjectCount(type, searchAll, searchQuery) { collection = Course; } else if (type === 'collection') { collection = Collections; + } else if (type === 'dataUseRegister') { + collection = DataUseRegister; } let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); if (type !== 'collection') { @@ -451,12 +534,12 @@ export function getObjectCount(type, searchAll, searchQuery) { .sort({ score: { $meta: 'textScore' } }); } } else if (type === 'collection') { - const searchTerm = newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text'])) || {}; + const searchTerm = (newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text']))) || {}; - if(searchTerm) { + if (searchTerm) { newSearchQuery['$and'] = newSearchQuery['$and'].filter(exp => !exp['$text']); } - + if (searchAll) { q = collection.aggregate([ { $match: searchTerm }, @@ -532,6 +615,52 @@ export function getObjectCount(type, searchAll, searchQuery) { ]) .sort({ score: { $meta: 'textScore' } }); } + } else if (type === 'dataUseRegister') { + const searchTerm = (newSearchQuery && newSearchQuery['$and'] && newSearchQuery['$and'].find(exp => !_.isNil(exp['$text']))) || {}; + + if (searchTerm) { + newSearchQuery['$and'] = newSearchQuery['$and'].filter(exp => !exp['$text']); + } + + q = collection.aggregate([ + { $match: searchTerm }, + { + $lookup: { + from: 'publishers', + localField: 'publisher', + foreignField: '_id', + as: 'publisherDetails', + }, + }, + { + $addFields: { + publisherDetails: { + $map: { + input: '$publisherDetails', + as: 'row', + in: { + name: '$$row.name', + }, + }, + }, + }, + }, + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]); } else { if (searchAll) { q = collection.aggregate([ @@ -723,6 +852,8 @@ export function getObjectFilters(searchQueryStart, req, type) { filterNode = findNodeInTree(collectionFilters, key); } else if (type === 'course') { filterNode = findNodeInTree(courseFilters, key); + } else if (type === 'dataUseRegister') { + filterNode = findNodeInTree(dataUseRegisterFilters, key); } if (filterNode) { @@ -777,6 +908,8 @@ export const getFilter = async (searchString, type, field, isArray, activeFilter collection = Course; } else if (type === 'collection') { collection = Collections; + } else if (type === 'datause') { + collection = DataUseRegister; } let q = '', p = ''; diff --git a/src/resources/search/search.router.js b/src/resources/search/search.router.js index 60547789..cd8a461d 100644 --- a/src/resources/search/search.router.js +++ b/src/resources/search/search.router.js @@ -56,6 +56,7 @@ router.get('/', async (req, res) => { People: 'person', Courses: 'course', Collections: 'collection', + Datauses: 'dataUseRegister', }; const entityType = typeMapper[`${tab}`]; @@ -125,6 +126,14 @@ router.get('/', async (req, res) => { req.query.maxResults || 40, req.query.collectionSort || '' ), + getObjectResult( + 'dataUseRegister', + searchAll, + getObjectFilters(searchQuery, req, 'dataUseRegister'), + req.query.dataUseRegisterIndex || 0, + req.query.maxResults || 40, + req.query.dataUseRegisterSort || '' + ), ]); } else { const sort = entityType === 'course' ? 'startdate' : req.query[`${entityType}Sort`] || ''; @@ -146,6 +155,7 @@ router.get('/', async (req, res) => { getObjectCount('person', searchAll, searchQuery), getObjectCount('course', searchAll, getObjectFilters(searchQuery, req, 'course')), getObjectCount('collection', searchAll, getObjectFilters(searchQuery, req, 'collection')), + getObjectCount('dataUseRegister', searchAll, getObjectFilters(searchQuery, req, 'dataUseRegister')), ]); const summary = { @@ -156,6 +166,7 @@ router.get('/', async (req, res) => { personCount: summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0, courseCount: summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0, collectionCount: summaryCounts[6][0] !== undefined ? summaryCounts[6][0].count : 0, + dataUseRegisterCount: summaryCounts[7][0] !== undefined ? summaryCounts[7][0].count : 0, }; let myEntitiesSummary = {}; @@ -184,6 +195,7 @@ router.get('/', async (req, res) => { recordSearchData.returned.person = summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0; recordSearchData.returned.course = summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0; recordSearchData.returned.collection = summaryCounts[6][0] !== undefined ? summaryCounts[6][0].count : 0; + recordSearchData.returned.datause = summaryCounts[7][0] !== undefined ? summaryCounts[7][0].count : 0; recordSearchData.datesearched = Date.now(); recordSearchData.save(err => {}); @@ -197,6 +209,7 @@ router.get('/', async (req, res) => { personResults: allResults[4].data, courseResults: allResults[5].data, collectionResults: allResults[6].data, + dataUseRegisterResults: allResults[7].data, summary: summary, myEntitiesSummary: myEntitiesSummary, }); diff --git a/src/resources/stats/stats.repository.js b/src/resources/stats/stats.repository.js index 034c7b2b..be62bebd 100644 --- a/src/resources/stats/stats.repository.js +++ b/src/resources/stats/stats.repository.js @@ -5,6 +5,7 @@ import { RecordSearchData } from '../search/record.search.model'; import { DataRequestModel } from '../datarequest/datarequest.model'; import { Course } from '../course/course.model'; import { MessagesModel } from '../message/message.model'; +import { DataUseRegister } from '../dataUseRegister/dataUseRegister.model'; import constants from '../utilities/constants.util'; export default class StatsRepository extends Repository { @@ -322,15 +323,15 @@ export default class StatsRepository extends Repository { await Promise.all([ this.getObjectResult('dataset', searchQuery), this.getObjectResult('tool', searchQuery), - this.getObjectResult('project', searchQuery), this.getObjectResult('paper', searchQuery), this.getObjectResult('course', searchQuery), + this.getObjectResult('dataUseRegister', searchQuery), ]).then(resources => { topSearch.datasets = resources[0][0] !== undefined && resources[0][0].count !== undefined ? resources[0][0].count : 0; topSearch.tools = resources[1][0] !== undefined && resources[1][0].count !== undefined ? resources[1][0].count : 0; - topSearch.projects = resources[2][0] !== undefined && resources[2][0].count !== undefined ? resources[2][0].count : 0; - topSearch.papers = resources[3][0] !== undefined && resources[3][0].count !== undefined ? resources[3][0].count : 0; - topSearch.course = resources[4][0] !== undefined && resources[4][0].count !== undefined ? resources[4][0].count : 0; + topSearch.papers = resources[2][0] !== undefined && resources[2][0].count !== undefined ? resources[2][0].count : 0; + topSearch.courses = resources[3][0] !== undefined && resources[3][0].count !== undefined ? resources[3][0].count : 0; + topSearch.dataUseRegisters = resources[4][0] !== undefined && resources[4][0].count !== undefined ? resources[4][0].count : 0; }); return topSearch; }) @@ -345,7 +346,17 @@ export default class StatsRepository extends Repository { newSearchQuery['$and'].push({ type }); var q = ''; - q = Data.aggregate([ + const typeMapper = { + dataset: Data, + tool: Data, + paper: Data, + course: Course, + dataUseRegister: DataUseRegister, + }; + + const model = typeMapper[type]; + + q = model.aggregate([ { $match: newSearchQuery }, { $group: { @@ -390,7 +401,8 @@ export default class StatsRepository extends Repository { maxTools: { $max: '$returned.tool' }, maxPapers: { $max: '$returned.paper' }, maxCourses: { $max: '$returned.course' }, - maxPeople: { $max: '$returned.people' }, + maxPeople: { $max: '$returned.person' }, + maxDataUses: { $max: '$returned.datause' }, entity: { $max: entityType }, }, }, @@ -469,10 +481,44 @@ export default class StatsRepository extends Repository { ]); } + async getPopularDataUses() { + return DataUseRegister.find( + { + activeflag: 'active', + counter: { + $gt: 0, + }, + }, + { + _id: 0, + type: 1, + projectTitle: 1, + organisationName: 1, + keywords: 1, + datasetTitles: 1, + publisher: 1, + id: 1, + counter: 1, + updatedon: 1, + } + ) + .populate({ + path: 'publisherInfo', + select: { name: 1, _id: 0 }, + }) + .sort({ counter: -1, title: 1 }) + .limit(10) + .lean(); + } + async getActiveCourseCount() { return Course.countDocuments({ activeflag: 'active' }); } + async getActiveDataUsesCount() { + return DataUseRegister.countDocuments({ activeflag: 'active' }); + } + async getPopularEntitiesByType(entityType) { let entityTypeFilter = {}; if (entityType) entityTypeFilter = { type: entityType }; @@ -604,6 +650,31 @@ export default class StatsRepository extends Repository { .lean(); } + async getRecentlyUpdatedDataUses() { + return DataUseRegister.find( + { activeflag: 'active' }, + { + _id: 0, + type: 1, + projectTitle: 1, + organisationName: 1, + keywords: 1, + datasetTitles: 1, + publisher: 1, + id: 1, + counter: 1, + updatedon: 1, + } + ) + .populate({ + path: 'publisherInfo', + select: { name: 1, _id: 0 }, + }) + .sort({ updatedon: -1, title: 1 }) + .limit(10) + .lean(); + } + async getRecentlyUpdatedEntitiesByType(entityType) { if (entityType) { return Data.find( diff --git a/src/resources/stats/stats.service.js b/src/resources/stats/stats.service.js index 726abc7e..a3346cf9 100644 --- a/src/resources/stats/stats.service.js +++ b/src/resources/stats/stats.service.js @@ -118,6 +118,8 @@ export default class StatsService { switch (entityType) { case 'course': return this.statsRepository.getPopularCourses(); + case 'dataUseRegister': + return this.statsRepository.getPopularDataUses(); default: return this.statsRepository.getPopularEntitiesByType(entityType); } @@ -127,12 +129,18 @@ export default class StatsService { return this.statsRepository.getActiveCourseCount(); } + async getActiveDataUsesCount() { + return this.statsRepository.getActiveDataUsesCount(); + } + async getRecentlyUpdatedEntitiesByType(entityType) { switch (entityType) { case 'course': return this.statsRepository.getRecentlyUpdatedCourses(); case 'dataset': return this.statsRepository.getRecentlyUpdatedDatasets(); + case 'dataUseRegister': + return this.statsRepository.getRecentlyUpdatedDataUses(); default: return this.statsRepository.getRecentlyUpdatedEntitiesByType(entityType); } @@ -162,6 +170,7 @@ const entityTypeMap = { Tools: 'tool', Projects: 'project', Courses: 'course', - Papers: 'papers', + Papers: 'paper', People: 'person', + DataUses: 'datause', }; diff --git a/src/resources/stats/v1/stats.route.js b/src/resources/stats/v1/stats.route.js index ebd0f9a6..4e339078 100644 --- a/src/resources/stats/v1/stats.route.js +++ b/src/resources/stats/v1/stats.route.js @@ -61,11 +61,12 @@ router.get('', logger.logRequestMiddleware({ logCategory, action: 'Viewed stats' break; default: - const [searchCounts, accessRequestCount, entityTotalCounts, coursesActiveCount] = await Promise.all([ + const [searchCounts, accessRequestCount, entityTotalCounts, coursesActiveCount, dataUsesActiveCount] = await Promise.all([ statsService.getTotalSearchesByUsers(), statsService.getDataAccessRequestStats(), statsService.getTotalEntityCounts(), statsService.getActiveCourseCount(), + statsService.getActiveDataUsesCount(), ]).catch(err => { logger.logError(err, logCategory); }); @@ -75,6 +76,7 @@ router.get('', logger.logRequestMiddleware({ logCategory, action: 'Viewed stats' ...entityTotalCounts, accessRequests: accessRequestCount, course: coursesActiveCount, + dataUses: dataUsesActiveCount, }, daycounts: searchCounts, }; diff --git a/src/resources/tool/v2/tool.repository.js b/src/resources/tool/v2/tool.repository.js index c224e376..7304a131 100644 --- a/src/resources/tool/v2/tool.repository.js +++ b/src/resources/tool/v2/tool.repository.js @@ -17,4 +17,9 @@ export default class ToolRepository extends Repository { const options = { lean: true }; return this.find(query, options); } + + async getToolsByIds(toolIds) { + const options = { lean: true }; + return this.find({ id: { $in: toolIds } }, options); + } } diff --git a/src/resources/tool/v2/tool.route.js b/src/resources/tool/v2/tool.route.js index 4eb0a7de..ec4d1ca4 100644 --- a/src/resources/tool/v2/tool.route.js +++ b/src/resources/tool/v2/tool.route.js @@ -1,6 +1,7 @@ import express from 'express'; import ToolController from './tool.controller'; import { toolService } from './dependency'; +import { resultLimit } from '../../../config/middleware'; const router = express.Router(); const toolController = new ToolController(toolService); @@ -13,6 +14,6 @@ router.get('/:id', (req, res) => toolController.getTool(req, res)); // @route GET /api/v2/tools // @desc Returns a collection of tools based on supplied query parameters // @access Public -router.get('/', (req, res) => toolController.getTools(req, res)); +router.get('/', (req, res, next) => resultLimit(req, res, next, 100), (req, res) => toolController.getTools(req, res)); module.exports = router; diff --git a/src/resources/tool/v2/tool.service.js b/src/resources/tool/v2/tool.service.js index 86e1477c..76e19225 100644 --- a/src/resources/tool/v2/tool.service.js +++ b/src/resources/tool/v2/tool.service.js @@ -10,4 +10,8 @@ export default class ToolService { getTools(query = {}) { return this.toolRepository.getTools(query); } + + getToolsByIds(toolIds) { + return this.toolRepository.getToolsByIds(toolIds); + } } diff --git a/src/resources/user/user.repository.js b/src/resources/user/user.repository.js index a72ab0ed..3f7d4aae 100644 --- a/src/resources/user/user.repository.js +++ b/src/resources/user/user.repository.js @@ -25,6 +25,10 @@ export async function getUserByUserId(id) { return await UserModel.findOne({ id }).exec(); } +export async function getUsersByIds(userIds) { + return await UserModel.find({ id: { $in: userIds } }).lean(); +} + export async function getServiceAccountByClientCredentials(clientId, clientSecret) { // 1. Locate service account by clientId, return undefined if no document located const id = clientId.toString(); diff --git a/src/resources/utilities/constants.util.js b/src/resources/utilities/constants.util.js index 7ea7562a..6f389cad 100644 --- a/src/resources/utilities/constants.util.js +++ b/src/resources/utilities/constants.util.js @@ -15,6 +15,12 @@ const _activityLogNotifications = Object.freeze({ MANUALEVENTREMOVED: 'manualEventRemoved', }); +const _dataUseRegisterNotifications = Object.freeze({ + DATAUSEAPPROVED: 'dataUseApproved', + DATAUSEREJECTED: 'dataUseRejected', + DATAUSEPENDING: 'dataUsePending', +}); + const _teamNotificationTypes = Object.freeze({ DATAACCESSREQUEST: 'dataAccessRequest', METADATAONBOARDING: 'metaDataOnboarding', @@ -216,6 +222,7 @@ const _roleTypes = { REVIEWER: 'reviewer', METADATA_EDITOR: 'metadata_editor', ADMIN_DATASET: 'admin_dataset', + ADMIN_DATA_USE: 'admin_data_use', }; // @@ -248,34 +255,55 @@ const _logTypes = { USER: 'User', }; -// Activity log related enums +const _dataUseRegisterStatus = { + ACTIVE: 'active', + INREVIEW: 'inReview', + REJECTED: 'rejected', + ARCHIVED: 'archived', +}; +// Activity log related enums const _activityLogEvents = { - APPLICATION_SUBMITTED: 'applicationSubmitted', - REVIEW_PROCESS_STARTED: 'reviewProcessStarted', - UPDATES_SUBMITTED: 'updatesSubmitted', - AMENDMENT_SUBMITTED: 'amendmentSubmitted', - APPLICATION_APPROVED: 'applicationApproved', - APPLICATION_APPROVED_WITH_CONDITIONS: 'applicationApprovedWithConditions', - APPLICATION_REJECTED: 'applicationRejected', - COLLABORATOR_ADDEDD: 'collaboratorAdded', - COLLABORATOR_REMOVED: 'collaboratorRemoved', - PRESUBMISSION_MESSAGE: 'presubmissionMessage', - UPDATE_REQUESTED: 'updateRequested', - UPDATE_SUBMITTED: 'updateSubmitted', - WORKFLOW_ASSIGNED: 'workflowAssigned', - REVIEW_PHASE_STARTED: 'reviewPhaseStarted', - RECOMMENDATION_WITH_ISSUE: 'reccomendationWithIssue', - RECOMMENDATION_WITH_NO_ISSUE: 'reccomendationWithNoIssue', - FINAL_DECISION_REQUIRED: 'finalDecisionRequired', - DEADLINE_PASSED: 'deadlinePassed', - MANUAL_EVENT: 'manualEvent', - CONTEXTUAL_MESSAGE: 'contextualMessage', - NOTE: 'note', + data_access_request: { + APPLICATION_SUBMITTED: 'applicationSubmitted', + REVIEW_PROCESS_STARTED: 'reviewProcessStarted', + UPDATES_SUBMITTED: 'updatesSubmitted', + AMENDMENT_SUBMITTED: 'amendmentSubmitted', + APPLICATION_APPROVED: 'applicationApproved', + APPLICATION_APPROVED_WITH_CONDITIONS: 'applicationApprovedWithConditions', + APPLICATION_REJECTED: 'applicationRejected', + COLLABORATOR_ADDEDD: 'collaboratorAdded', + COLLABORATOR_REMOVED: 'collaboratorRemoved', + PRESUBMISSION_MESSAGE: 'presubmissionMessage', + UPDATE_REQUESTED: 'updateRequested', + UPDATE_SUBMITTED: 'updateSubmitted', + WORKFLOW_ASSIGNED: 'workflowAssigned', + REVIEW_PHASE_STARTED: 'reviewPhaseStarted', + RECOMMENDATION_WITH_ISSUE: 'reccomendationWithIssue', + RECOMMENDATION_WITH_NO_ISSUE: 'reccomendationWithNoIssue', + FINAL_DECISION_REQUIRED: 'finalDecisionRequired', + DEADLINE_PASSED: 'deadlinePassed', + MANUAL_EVENT: 'manualEvent', + CONTEXTUAL_MESSAGE: 'contextualMessage', + NOTE: 'note', + }, + dataset: { + DATASET_VERSION_SUBMITTED: 'newDatasetVersionSubmitted', + DATASET_VERSION_APPROVED: 'datasetVersionApproved', + DATASET_VERSION_REJECTED: 'datasetVersionRejected', + DATASET_VERSION_ARCHIVED: 'datasetVersionArchived', + DATASET_VERSION_UNARCHIVED: 'datasetVersionUnarchived', + DATASET_UPDATES_SUBMITTED: 'datasetUpdatesSubmitted', + }, + data_use_register: { + DATA_USE_REGISTER_UPDATED: 'dataUseRegisterUpdated', + }, }; const _activityLogTypes = { DATA_ACCESS_REQUEST: 'data_request', + DATA_USE_REGISTER: 'data_use_register', + DATASET: 'dataset', }; const _systemGeneratedUser = { @@ -312,4 +340,6 @@ export default { systemGeneratedUser: _systemGeneratedUser, activityLogNotifications: _activityLogNotifications, DARMessageTypes: _DARMessageTypes, + dataUseRegisterStatus: _dataUseRegisterStatus, + dataUseRegisterNotifications: _dataUseRegisterNotifications, }; diff --git a/src/resources/utilities/emailGenerator.util.js b/src/resources/utilities/emailGenerator.util.js index a5b0168e..40d06730 100644 --- a/src/resources/utilities/emailGenerator.util.js +++ b/src/resources/utilities/emailGenerator.util.js @@ -6,6 +6,7 @@ import constants from '../utilities/constants.util'; import * as Sentry from '@sentry/node'; const sgMail = require('@sendgrid/mail'); +const readEnv = process.env.ENV || 'prod'; let parent, qsId; let questionList = []; let excludedQuestionSetIds = ['addRepeatableSection', 'removeRepeatableSection']; @@ -200,7 +201,9 @@ const _getSubmissionDetails = ( let body = ` - + @@ -228,7 +231,9 @@ const _getSubmissionDetails = ( const amendBody = `
Project${projectName}${ + projectName || 'No project name set' + }
Related NCS project
- + @@ -319,19 +324,11 @@ const _getSubmissionDetails = ( * @return {String} Questions Answered */ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) => { - const { - userType, - userName, - userEmail, - datasetTitles, - initialDatasetTitles, - submissionType, - submissionDescription, - applicationId, - } = options; + const { userType, userName, userEmail, datasetTitles, initialDatasetTitles, submissionType, submissionDescription, applicationId } = + options; const dateSubmitted = moment().format('D MMM YYYY'); const year = moment().year(); - const { projectName = 'No project name set', isNationalCoreStudies = false, nationalCoreStudiesProjectId = '' } = aboutApplication; + const { projectName, isNationalCoreStudies = false, nationalCoreStudiesProjectId = '' } = aboutApplication; const linkNationalCoreStudies = nationalCoreStudiesProjectId === '' ? '' : `${process.env.homeURL}/project/${nationalCoreStudiesProjectId}`; @@ -348,7 +345,7 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) datasetTitles, initialDatasetTitles, submissionType, - projectName, + projectName || 'No project name set', isNationalCoreStudies, dateSubmitted, linkNationalCoreStudies @@ -356,7 +353,13 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) // Create json content payload for attaching to email const jsonContent = { - applicationDetails: { projectName, linkNationalCoreStudies, datasetTitles, dateSubmitted, applicantName: userName }, + applicationDetails: { + projectName: projectName || 'No project name set', + linkNationalCoreStudies, + datasetTitles, + dateSubmitted, + applicantName: userName, + }, questions: { ...fullQuestions }, answers: { ...questionAnswers }, }; @@ -632,18 +635,21 @@ const _displayActivityLogLink = (accessId, publisher) => { return `View activity log`; }; +const _displayDataUseRegisterLink = dataUseId => { + if (!dataUseId) return ''; + + const dataUseLink = `${process.env.homeURL}/datause/${dataUseId}`; + return `View data use`; +}; + +const _displayDataUseRegisterDashboardLink = () => { + const dataUseLink = `${process.env.homeURL}/account?tab=datause&team=admin`; + return `View all data uses for review `; +}; + const _generateDARStatusChangedEmail = options => { - let { - id, - applicationStatus, - applicationStatusDesc, - projectId, - projectName, - publisher, - datasetTitles, - dateSubmitted, - applicants, - } = options; + let { id, applicationStatus, applicationStatusDesc, projectId, projectName, publisher, datasetTitles, dateSubmitted, applicants } = + options; let body = `
Project${projectName}${ + projectName || 'No project name set' + }
Date of amendment submission
{ @@ -2397,6 +2405,85 @@ const _generateActivityLogManualEventDeleted = options => { return body; }; +const _generateDataUseRegisterApproved = options => { + const { id, projectTitle } = options; + const body = `
+
+

New active data use

+

A data use for ${projectTitle} has been approved by HDR UK and is now public and searchable on the Gateway. You can now edit and archive this data use directly in the Gateway.

+ ${_displayDataUseRegisterLink(id)} +
+
`; + + return body; +}; + +const _generateDataUseRegisterRejected = options => { + const { id, projectTitle, rejectionReason } = options; + const body = `
+ +
+

A data use has been rejected

+

A data use for ${projectTitle} has been rejected by HDR UK team.

+

Reason for rejection:

+

${rejectionReason}

+ ${_displayDataUseRegisterLink(id)} +
+
`; + return body; +}; + +const _generateDataUseRegisterPending = options => { + const { listOfProjectTitles, publisher } = options; + + const body = `
+
- ${publisher} has ${applicationStatus} your data access request application. + Your data access request for ${projectName || datasetTitles} has been approved with conditions by ${publisher}. + Summary information about your approved project will be included in the Gateway data use register. + You will be notified as soon as this becomes visible and searchable on the Gateway.
+ + + + + + + + + + + + + +
+ New data uses to review +
+ ${publisher} has submitted [${listOfProjectTitles.length}] data uses for review including: +
+ + + + + + + + + +
Project title${listOfProjectTitles.join( + ', ' + )}
Date and time submitted${moment().format( + 'DD/MM/YYYY, HH:mmA' + )}
+
+
+${_displayDataUseRegisterDashboardLink()} +
+`; + return body; +}; + /** * [_sendEmail] * @@ -2423,7 +2510,7 @@ const _sendEmail = async (to, from, subject, html, allowUnsubscribe = true, atta // 4. Send email using SendGrid await sgMail.send(msg, false, err => { - if (err) { + if (err && (readEnv === 'test' || readEnv === 'prod')) { Sentry.addBreadcrumb({ category: 'SendGrid', message: 'Sending email failed', @@ -2446,7 +2533,7 @@ const _sendIntroEmail = msg => { sgMail.setApiKey(process.env.SENDGRID_API_KEY); // 2. Send email using SendGrid sgMail.send(msg, false, err => { - if (err) { + if (err && (readEnv === 'test' || readEnv === 'prod')) { Sentry.addBreadcrumb({ category: 'SendGrid', message: 'Sending email failed - Intro', @@ -2547,7 +2634,7 @@ export default { generateMetadataOnboardingApproved: _generateMetadataOnboardingApproved, generateMetadataOnboardingRejected: _generateMetadataOnboardingRejected, generateMetadataOnboardingDraftDeleted: _generateMetadataOnboardingDraftDeleted, - generateMetadataOnboardingDuplicated: _generateMetadataOnboardingDuplicated, + generateMetadataOnboardingDuplicated: _generateMetadataOnboardingDuplicated, //generateMetadataOnboardingArchived: _generateMetadataOnboardingArchived, //generateMetadataOnboardingUnArchived: _generateMetadataOnboardingUnArchived, //Messages @@ -2556,4 +2643,8 @@ export default { //ActivityLog generateActivityLogManualEventCreated: _generateActivityLogManualEventCreated, generateActivityLogManualEventDeleted: _generateActivityLogManualEventDeleted, + //DataUseRegister + generateDataUseRegisterApproved: _generateDataUseRegisterApproved, + generateDataUseRegisterRejected: _generateDataUseRegisterRejected, + generateDataUseRegisterPending: _generateDataUseRegisterPending, }; diff --git a/src/resources/utilities/logger.js b/src/resources/utilities/logger.js index ad6a579f..3b937197 100644 --- a/src/resources/utilities/logger.js +++ b/src/resources/utilities/logger.js @@ -1,6 +1,8 @@ import * as Sentry from '@sentry/node'; import constants from './constants.util'; +const readEnv = process.env.ENV || 'prod'; + const logRequestMiddleware = options => { return (req, res, next) => { const { logCategory, action } = options; @@ -11,21 +13,25 @@ const logRequestMiddleware = options => { const logSystemActivity = options => { const { category = 'Action not categorised', action = 'Action not described' } = options; - Sentry.addBreadcrumb({ - category, - message: action, - level: Sentry.Severity.Info, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category, + message: action, + level: Sentry.Severity.Info, + }); + } // Save to database }; const logUserActivity = (user, category, type, context) => { const { action } = context; - Sentry.addBreadcrumb({ - category, - message: action, - level: Sentry.Severity.Info, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category, + message: action, + level: Sentry.Severity.Info, + }); + } console.log(`${action}`); // Log date/time // Log action @@ -35,11 +41,13 @@ const logUserActivity = (user, category, type, context) => { }; const logError = (err, category) => { - Sentry.captureException(err, { - tags: { - area: category, - }, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err, { + tags: { + area: category, + }, + }); + } console.error(`The following error occurred: ${err.message}`); }; diff --git a/src/resources/workflow/workflow.repository.js b/src/resources/workflow/workflow.repository.js index 508a34a1..0c81be16 100644 --- a/src/resources/workflow/workflow.repository.js +++ b/src/resources/workflow/workflow.repository.js @@ -54,7 +54,7 @@ export default class WorkflowRepository extends Repository { async assignWorkflowToApplication(accessRecord, workflowId) { // Retrieve workflow using ID from database - const workflow = await WorkflowRepository.getWorkflowById(workflowId, { lean: false }); + const workflow = await this.getWorkflowById(workflowId, { lean: false }); if (!workflow) { throw new Error('Workflow could not be found'); } diff --git a/src/services/hubspot/hubspot.js b/src/services/hubspot/hubspot.js index e91c462d..d1709e95 100644 --- a/src/services/hubspot/hubspot.js +++ b/src/services/hubspot/hubspot.js @@ -10,6 +10,7 @@ import { logger } from '../../resources/utilities/logger'; // Default service params const apiKey = process.env.HUBSPOT_API_KEY; const logCategory = 'Hubspot Integration'; +const readEnv = process.env.ENV || 'prod'; let hubspotClient; if (apiKey) hubspotClient = new Client({ apiKey, numberOfApiCallRetries: NumberOfRetries.Three }); @@ -140,11 +141,13 @@ const syncAllContacts = async () => { if (apiKey) { try { // Track attempted sync in Sentry using log - Sentry.addBreadcrumb({ - category: 'Hubspot', - message: `Syncing Gateway users with Hubspot contacts`, - level: Sentry.Severity.Log, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'Hubspot', + message: `Syncing Gateway users with Hubspot contacts`, + level: Sentry.Severity.Log, + }); + } // Batch import subscription changes from Hubspot await batchImportFromHubspot(); diff --git a/src/services/hubspot/hubspot.route.js b/src/services/hubspot/hubspot.route.js index 78998511..ef9f92c2 100644 --- a/src/services/hubspot/hubspot.route.js +++ b/src/services/hubspot/hubspot.route.js @@ -2,6 +2,7 @@ import express from 'express'; import * as Sentry from '@sentry/node'; import hubspotConnector from './hubspot'; const router = express.Router(); +const readEnv = process.env.ENV || 'prod'; // @router POST /api/v1/hubspot/sync // @desc Performs a two-way sync of contact details including communication opt in preferences between HubSpot and the Gateway database @@ -28,7 +29,9 @@ router.post('/sync', async (req, res) => { // Return response indicating job has started (do not await async import) return res.status(200).json({ success: true, message: 'Sync started' }); } catch (err) { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(err.message); return res.status(500).json({ success: false, message: 'Sync failed' }); } diff --git a/src/services/mailchimp/mailchimp.js b/src/services/mailchimp/mailchimp.js index c745642f..cc5b1d5b 100644 --- a/src/services/mailchimp/mailchimp.js +++ b/src/services/mailchimp/mailchimp.js @@ -13,6 +13,7 @@ let mailchimp; if (apiKey) mailchimp = new Mailchimp(apiKey); const tags = ['Gateway User']; const defaultSubscriptionStatus = constants.mailchimpSubscriptionStatuses.SUBSCRIBED; +const readEnv = process.env.ENV || 'prod'; /** * Create MailChimp Subscription Subscriber @@ -37,15 +38,19 @@ const addSubscriptionMember = async (subscriptionId, user, status) => { }, }; // 2. Track attempted update in Sentry using log - Sentry.addBreadcrumb({ - category: 'MailChimp', - message: `Adding subscription for user: ${id} to subscription: ${subscriptionId}`, - level: Sentry.Severity.Log, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'MailChimp', + message: `Adding subscription for user: ${id} to subscription: ${subscriptionId}`, + level: Sentry.Severity.Log, + }); + } // 3. POST to MailChimp Marketing API to add the Gateway user to the MailChimp subscription members const md5email = Crypto.createHash('md5').update(email).digest('hex'); await mailchimp.put(`lists/${subscriptionId}/members/${md5email}`, body).catch(err => { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(`Message: ${err.message} Errors: ${JSON.stringify(err.errors)}`); }); } @@ -100,16 +105,20 @@ const updateSubscriptionMembers = async (subscriptionId, members) => { update_existing: true, }; // 4. Track attempted updates in Sentry using log - Sentry.addBreadcrumb({ - category: 'MailChimp', - message: `Updating subscribed for members: ${members.map( - member => `${member.userId} to ${member.status}` - )} against subscription: ${subscriptionId}`, - level: Sentry.Severity.Log, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'MailChimp', + message: `Updating subscribed for members: ${members.map( + member => `${member.userId} to ${member.status}` + )} against subscription: ${subscriptionId}`, + level: Sentry.Severity.Log, + }); + } // 5. POST to MailChimp Marketing API to update member statuses await mailchimp.post(`lists/${subscriptionId}`, body).catch(err => { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(`Message: ${err.message} Errors: ${JSON.stringify(err.errors)}`); }); } @@ -126,16 +135,20 @@ const updateSubscriptionMembers = async (subscriptionId, members) => { const syncSubscriptionMembers = async subscriptionId => { if (apiKey) { // 1. Track attempted sync in Sentry using log - Sentry.addBreadcrumb({ - category: 'MailChimp', - message: `Syncing users for subscription: ${subscriptionId}`, - level: Sentry.Severity.Log, - }); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.addBreadcrumb({ + category: 'MailChimp', + message: `Syncing users for subscription: ${subscriptionId}`, + level: Sentry.Severity.Log, + }); + } // 2. Get total member count to anticipate chunking required to process all contacts const { stats: { member_count: subscribedCount, unsubscribe_count: unsubscribedCount }, } = await mailchimp.get(`lists/${subscriptionId}?fields=stats.member_count,stats.unsubscribe_count`).catch(err => { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(`Message: ${err.message} Errors: ${JSON.stringify(err.errors)}`); }); const memberCount = subscribedCount + unsubscribedCount; diff --git a/src/services/mailchimp/mailchimp.route.js b/src/services/mailchimp/mailchimp.route.js index cd023d3a..7d3e451e 100644 --- a/src/services/mailchimp/mailchimp.route.js +++ b/src/services/mailchimp/mailchimp.route.js @@ -2,6 +2,7 @@ import express from 'express'; import * as Sentry from '@sentry/node'; import mailchimpConnector from './mailchimp'; const router = express.Router(); +const readEnv = process.env.ENV || 'prod'; // @router GET /api/v1/mailchimp/:subscriptionId/sync // @desc Performs a two-way sync of opt in preferences between MailChimp and the Gateway database @@ -31,7 +32,9 @@ router.post('/sync', async (req, res) => { // Return response indicating job has started (do not await async import) return res.status(200).json({ success: true, message: 'Sync started' }); } catch (err) { - Sentry.captureException(err); + if (readEnv === 'test' || readEnv === 'prod') { + Sentry.captureException(err); + } console.error(err.message); return res.status(500).json({ success: false, message: 'Sync failed' }); } diff --git a/swagger.yaml b/swagger.yaml deleted file mode 100644 index f572bea9..00000000 --- a/swagger.yaml +++ /dev/null @@ -1,2876 +0,0 @@ -openapi: 3.0.1 -info: - title: HDR UK API - description: API for Tools and artefacts repository. - version: 1.0.0 -servers: - - url: https://api.www.healthdatagateway.org/ - - url: http://localhost:3001/ - - url: https://api.{environment}.healthdatagateway.org:{port}/ - variables: - environment: - default: latest - description: The Environment name. - port: - enum: - - '443' - default: '443' -security: - - oauth2: [] -paths: - /oauth/token: - post: - tags: - - Authorization - description: OAuth2.0 token endpoint responsible for issuing short-lived json web tokens (JWT) for access to secure Gateway APIs. For client credentials grant flow, a valid client id and secret must be provided to identify your application and provide the expected permissions. This type of authentication is reserved for team based connectivity through client applications and is not provided for human user access. For more information, contact the HDR-UK team. - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - grant_type: - type: string - description: The OAuth2.0 grant type that will be used to provide authentication. - client_id: - type: string - description: A unique identifer provided to your team by the HDR-UK team at the time of onboarding to the Gateway. Contact the HDR-UK team for issue of new credentials. - client_secret: - type: string - description: A long (50 character) string provided by the HDR-UK team at the time of onboarding to the Gateway. Contact the HDR-UK team for issue of new credentials. - required: - - grant_type - - client_secret - - client_id - examples: - 'Client Credentials Grant Flow': - value: - { - 'grant_type': 'client_credentials', - 'client_id': '2ca1f61a90e3547', - 'client_secret': '3f80fecbf781b6da280a8d17aa1a22066fb66daa415d8befc1', - } - responses: - '200': - description: Successful response containing json web token (JWT) that will authorize an HTTP request against secured resources. - content: - application/json: - schema: - type: object - properties: - access_token: - type: string - description: The encoded json web token (JWT) that must be appended to the Authorization of subsequent API HTTP requests in order to access secured resources. - token_type: - type: string - description: The type of token issued, in this case, a json web token (JWT). - expires_in: - type: integer - description: The length of time in seconds before the issued JWT expires, defaulted to 900 seconds (15 minutes). - examples: - 'Client Credentials Grant Flow': - value: - { - 'access_token': 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjp7Il9pZCI6IjYwMGJmYzk5YzhiZjcwMGYyYzdkNWMzNiIsInRpbWVTdGFtcCI2MTYxMjM4MzkwMzE5Nn0sImlhdCI6MTYxMjM4MzkwMywiZXhwIjoxNjEyMzg0ODAzfQ.-YvUBdjtJvdrRacz6E8-cYPQlum4TrEmiCFl8jO5a-M', - 'token_type': 'jwt', - 'expires_in': 900, - } - '400': - description: Failure response caused by incomplete or invalid client credentials being passed to the endpoint. - content: - application/json: - schema: - type: object - properties: - success: - type: boolean - description: A field that indicates the API request failed. - message: - type: string - description: A message indicating that the request failed for a given reason. - examples: - 'Invalid Client Credentials': - value: { 'success': false, 'message': 'Invalid client credentials were provided for the authorisation attempt' } - 'Incomplete Client Credentials': - value: { 'success': false, 'message': 'Incomplete client credentials were provided for the authorisation attempt' } - 'Invalid Grant Type': - value: { 'success': false, 'message': 'An invalid grant type has been specified' } - - /api/v1/data-access-request/{id}: - get: - tags: - - Data Access Request - parameters: - - in: path - name: id - required: true - description: The unique identifier for a single data access request application. - schema: - type: string - example: 5ee249426136805fbf094eef - description: Retrieve a single Data Access Request application using a supplied identifer - responses: - '200': - description: Successful response containing a full data access request application. - content: - application/json: - schema: - type: object - properties: - status: - type: string - data: - type: object - properties: - id: - type: string - description: The unique identifier for the application. - aboutApplication: - description: An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not. - type: object - properties: - isNationalCoreStudies: - type: boolean - description: A flag to indicate if this application is in relation to a National Core Studies Project. - nationalCoreStudiesProjectId: - type: integer - description: The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project. - projectName: - type: string - description: The project name that has been assigned to the application by the applicant(s). - authorIds: - type: array - items: - type: integer - description: An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit. - datasetIds: - type: array - items: - type: string - description: An array of values correlating to datasets selected for the application via their identifier, which is unique per version. - datasetTitles: - type: array - items: - type: string - description: An array of strings correlating to the dataset titles that have been selected for the application. - applicationStatus: - type: string - enum: - - inProgress - - submitted - - inReview - - approved - - rejected - - approved with conditions - description: The current status of the application. - jsonSchema: - type: object - description: The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear. - questionAnswers: - type: object - description: The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array. - publisher: - type: string - description: The name of the Custodian that holds the dataset and is processing the application. - publisherObj: - type: object - description: The object containing details regarding the Custodian/publisher relating to the application. - userId: - type: integer - description: The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application. - schemaId: - type: string - description: The unique identifier that correlates to the schema from which the application form was generated. - files: - type: array - items: - type: object - description: An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem. - amendmentIterations: - type: array - items: - type: object - description: An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment. - createdAt: - type: string - description: The date and time that the application was started. - updatedAt: - type: string - description: The date and time that the application was last updated by any party. - projectId: - type: string - description: The unique identifier for the application converted to a more human friendly format in uppercase and hypenated. - dateSubmitted: - type: string - description: The date and time that the application was originally submitted by the applicant(s) to the Custodian for review. - dateReviewStart: - type: string - description: The date and time that the review process was commenced by a Custodian manager. The review starts from the moment the manager opens the application to triage it. - dateFinalStatus: - type: string - description: The date and time that the Custodian triggered a status change to the application once a final decision was made. E.g. when application was approved. This date can be used in conjunction with the dateReviewStart date to calculate the length of time the Custodian took to make a decision through their review process. - datasets: - type: array - items: - type: object - description: An array containing the full metadata for each of the datasets that have been applied for through this application. - mainApplicant: - type: object - description: An object containing the details of the main applicant of the application as referenced by the userId field. - authors: - type: array - items: - type: object - description: An array containing the details of the contributors of the application as referenced by the authorIds field. - readOnly: - type: boolean - description: A value to indicate if the requesting party is able to modify the application in its present state. For example, this will be false for a Custodian, but true for applicants if the applicant(s) are working on resubmitting the application following a request for amendments. - unansweredAmendments: - type: integer - description: The number of amendments that have been requested by the Custodian in the current amendment iteration. - answeredAmendments: - type: integer - description: The number of requested amendments that the applicant(s) have fixed in the current amendment iteration. - userType: - type: string - enum: - - custodian - - applicant - description: The type of user that has requested the Data Access Request application based on their permissions. It is either an applicant or a Custodian user. - activeParty: - type: string - enum: - - custodian - - applicant - description: The party that is currently handling the application. This is the applicant during presubmission, then the Custodian following submission. The active party then fluctuates between parties during amendment iterations. - inReviewMode: - type: boolean - description: A flag to indicate if the current user is a reviewer of the application. This value will be false unless the requesting user is an assigned reviewer to a currently active workflow step. When this value is true, the requesting user is able to recommend approval or rejection of the application. - reviewSections: - type: array - items: - type: string - description: An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data'] - hasRecommended: - type: boolean - description: A flag to indicate if the current user as a reviewer of the current workflow phase has submitted their recommendation for approval or rejection based on their review of the review sections assigned to them. - workflow: - type: object - description: The full details of the workflow that has been assigned to the Data Access Request application. This includes information such as the review phases that the application will pass through and associated metadata. - examples: - 'Approved Application': - value: - { - 'status': 'success', - 'data': - { - 'aboutApplication': - { - 'selectedDatasets': - [ - { - '_id': '5fc31a18d98e4f4cff7e9315', - 'datasetId': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'name': 'HDR UK Papers & Preprints', - 'description': "Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint", - 'abstract': 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'publisherObj': - { - 'dataRequestModalContent': { 'header': ' ', 'body': '{omitted for brevity...}', 'footer': '' }, - 'active': true, - 'allowsMessaging': true, - 'workflowEnabled': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'name': 'OTHER > HEALTH DATA RESEARCH UK', - 'imageURL': '', - 'team': - { - 'active': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'members': - [ - { 'roles': ['manager'], 'memberid': '5f1a98861a821b4a53e44d15' }, - { 'roles': ['manager'], 'memberid': '600bfc99c8bf700f2c7d5c36' }, - ], - 'type': 'publisher', - '__v': 3, - 'createdAt': '2020-11-30T21:12:40.855Z', - 'updatedAt': '2020-12-02T13:33:45.232Z', - }, - }, - }, - ], - 'isNationalCoreStudies': true, - 'nationalCoreStudiesProjectId': '4324836585275824', - 'projectName': 'Test application title', - 'completedDatasetSelection': true, - 'completedInviteCollaborators': true, - 'completedReadAdvice': true, - 'completedCommunicateAdvice': true, - 'completedApprovalsAdvice': true, - 'completedSubmitAdvice': true, - }, - 'authorIds': [], - 'datasetIds': ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], - 'datasetTitles': [], - 'applicationStatus': 'approved', - 'jsonSchema': '{omitted for brevity...}', - 'questionAnswers': - { - 'fullname-892140ec730145dc5a28b8fe139c2876': 'James Smith', - 'jobtitle-ff1d692a04b4bb9a2babe9093339136f': 'Consultant', - 'organisation-65c06905b8319ffa29919732a197d581': 'Consulting Inc.', - }, - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - '_id': '60142c5b4316a0e0fcd47c56', - 'version': 1, - 'userId': 9190228196797084, - 'schemaId': '5f55e87e780ba204b0a98eb8', - 'files': [], - 'amendmentIterations': [], - 'createdAt': '2021-01-29T15:40:11.943Z', - 'updatedAt': '2021-02-03T14:38:22.688Z', - '__v': 0, - 'projectId': '6014-2C5B-4316-A0E0-FCD4-7C56', - 'dateSubmitted': '2021-01-29T16:30:27.351Z', - 'dateReviewStart': '2021-02-03T14:36:22.341Z', - 'dateFinalStatus': '2021-02-03T14:38:22.680Z', - 'datasets': ['{omitted for brevity...}'], - 'dataset': null, - 'mainApplicant': { '_id': '5f1a98861a821b4a53e44d15', 'firstname': 'James', 'lastname': 'Smith' }, - 'authors': [], - 'id': '60142c5b4316a0e0fcd47c56', - 'readOnly': true, - 'unansweredAmendments': 0, - 'answeredAmendments': 0, - 'userType': 'custodian', - 'activeParty': 'custodian', - 'inReviewMode': false, - 'reviewSections': [], - 'hasRecommended': false, - 'workflow': {}, - }, - } - '404': - description: Failed to find the application requested. - content: - application/json: - schema: - type: object - properties: - status: - type: string - message: - type: string - examples: - 'Not Found': - value: { 'status': 'error', 'message': 'Application not found.' } - '401': - description: Unauthorised attempt to access an application. - content: - application/json: - schema: - type: object - properties: - status: - type: string - message: - type: string - examples: - 'Unauthorised': - value: { 'status': 'failure', 'message': 'Unauthorised' } - put: - tags: - - Data Access Request - parameters: - - in: path - name: id - required: true - description: The unique identifier for a single Data Access Request application. - schema: - type: string - example: 5ee249426136805fbf094eef - description: Update a single Data Access Request application. - requestBody: - content: - application/json: - schema: - type: object - properties: - applicationStatus: - type: string - applicationStatusDesc: - type: string - examples: - 'Update Application Status': - value: { 'applicationStatus': 'approved', 'applicationStatusDesc': 'This application meets all the requirements.' } - responses: - '200': - description: Successful response containing the full, updated data access request application. - content: - application/json: - schema: - type: object - properties: - status: - type: string - data: - type: object - properties: - id: - type: string - description: The unique identifier for the application. - aboutApplication: - description: An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not. - type: object - properties: - isNationalCoreStudies: - type: boolean - description: A flag to indicate if this application is in relation to a National Core Studies Project. - nationalCoreStudiesProjectId: - type: integer - description: The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project. - projectName: - type: string - description: The project name that has been assigned to the application by the applicant(s). - authorIds: - type: array - items: - type: integer - description: An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit. - datasetIds: - type: array - items: - type: string - description: An array of values correlating to datasets selected for the application via their identifier, which is unique per version. - datasetTitles: - type: array - items: - type: string - description: An array of strings correlating to the dataset titles that have been selected for the application. - applicationStatus: - type: string - enum: - - inProgress - - submitted - - inReview - - approved - - rejected - - approved with conditions - description: The current status of the application. - jsonSchema: - type: object - description: The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear. - questionAnswers: - type: object - description: The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array. - publisher: - type: string - description: The name of the Custodian that holds the dataset and is processing the application. - publisherObj: - type: object - description: The object containing details regarding the Custodian/publisher relating to the application. - userId: - type: integer - description: The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application. - schemaId: - type: string - description: The unique identifier that correlates to the schema from which the application form was generated. - files: - type: array - items: - type: object - description: An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem. - amendmentIterations: - type: array - items: - type: object - description: An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment. - createdAt: - type: string - description: The date and time that the application was started. - updatedAt: - type: string - description: The date and time that the application was last updated by any party. - projectId: - type: string - description: The unique identifier for the application converted to a more human friendly format in uppercase and hypenated. - dateSubmitted: - type: string - description: The date and time that the application was originally submitted by the applicant(s) to the Custodian for review. - dateReviewStart: - type: string - description: The date and time that the review process was commenced by a Custodian manager. The review starts from the moment the manager opens the application to triage it. - dateFinalStatus: - type: string - description: The date and time that the Custodian triggered a status change to the application once a final decision was made. E.g. when application was approved. This date can be used in conjunction with the dateReviewStart date to calculate the length of time the Custodian took to make a decision through their review process. - datasets: - type: array - items: - type: object - description: An array containing the full metadata for each of the datasets that have been applied for through this application. - mainApplicant: - type: object - description: An object containing the details of the main applicant of the application as referenced by the userId field. - authors: - type: array - items: - type: object - description: An array containing the details of the contributors of the application as referenced by the authorIds field. - readOnly: - type: boolean - description: A value to indicate if the requesting party is able to modify the application in its present state. For example, this will be false for a Custodian, but true for applicants if the applicant(s) are working on resubmitting the application following a request for amendments. - unansweredAmendments: - type: integer - description: The number of amendments that have been requested by the Custodian in the current amendment iteration. - answeredAmendments: - type: integer - description: The number of requested amendments that the applicant(s) have fixed in the current amendment iteration. - userType: - type: string - enum: - - custodian - - applicant - description: The type of user that has requested the Data Access Request application based on their permissions. It is either an applicant or a Custodian user. - activeParty: - type: string - enum: - - custodian - - applicant - description: The party that is currently handling the application. This is the applicant during presubmission, then the Custodian following submission. The active party then fluctuates between parties during amendment iterations. - inReviewMode: - type: boolean - description: A flag to indicate if the current user is a reviewer of the application. This value will be false unless the requesting user is an assigned reviewer to a currently active workflow step. When this value is true, the requesting user is able to recommend approval or rejection of the application. - reviewSections: - type: array - items: - type: string - description: An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data'] - hasRecommended: - type: boolean - description: A flag to indicate if the current user as a reviewer of the current workflow phase has submitted their recommendation for approval or rejection based on their review of the review sections assigned to them. - workflow: - type: object - description: The full details of the workflow that has been assigned to the Data Access Request application. This includes information such as the review phases that the application will pass through and associated metadata. - examples: - 'Approved Application': - value: - { - 'status': 'success', - 'data': - { - 'aboutApplication': - { - 'selectedDatasets': - [ - { - '_id': '5fc31a18d98e4f4cff7e9315', - 'datasetId': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'name': 'HDR UK Papers & Preprints', - 'description': "Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint", - 'abstract': 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'publisherObj': - { - 'dataRequestModalContent': { 'header': ' ', 'body': '{omitted for brevity...}', 'footer': '' }, - 'active': true, - 'allowsMessaging': true, - 'workflowEnabled': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'name': 'OTHER > HEALTH DATA RESEARCH UK', - 'imageURL': '', - 'team': - { - 'active': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'members': - [ - { 'roles': ['manager'], 'memberid': '5f1a98861a821b4a53e44d15' }, - { 'roles': ['manager'], 'memberid': '600bfc99c8bf700f2c7d5c36' }, - ], - 'type': 'publisher', - '__v': 3, - 'createdAt': '2020-11-30T21:12:40.855Z', - 'updatedAt': '2020-12-02T13:33:45.232Z', - }, - }, - }, - ], - 'isNationalCoreStudies': true, - 'nationalCoreStudiesProjectId': '4324836585275824', - 'projectName': 'Test application title', - 'completedDatasetSelection': true, - 'completedInviteCollaborators': true, - 'completedReadAdvice': true, - 'completedCommunicateAdvice': true, - 'completedApprovalsAdvice': true, - 'completedSubmitAdvice': true, - }, - 'authorIds': [], - 'datasetIds': ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], - 'datasetTitles': [], - 'applicationStatus': 'approved', - 'jsonSchema': '{omitted for brevity...}', - 'questionAnswers': - { - 'fullname-892140ec730145dc5a28b8fe139c2876': 'James Smith', - 'jobtitle-ff1d692a04b4bb9a2babe9093339136f': 'Consultant', - 'organisation-65c06905b8319ffa29919732a197d581': 'Consulting Inc.', - }, - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - '_id': '60142c5b4316a0e0fcd47c56', - 'version': 1, - 'userId': 9190228196797084, - 'schemaId': '5f55e87e780ba204b0a98eb8', - 'files': [], - 'amendmentIterations': [], - 'createdAt': '2021-01-29T15:40:11.943Z', - 'updatedAt': '2021-02-03T14:38:22.688Z', - '__v': 0, - 'projectId': '6014-2C5B-4316-A0E0-FCD4-7C56', - 'dateSubmitted': '2021-01-29T16:30:27.351Z', - 'dateReviewStart': '2021-02-03T14:36:22.341Z', - 'dateFinalStatus': '2021-02-03T14:38:22.680Z', - 'datasets': ['{omitted for brevity...}'], - 'dataset': null, - 'mainApplicant': { '_id': '5f1a98861a821b4a53e44d15', 'firstname': 'James', 'lastname': 'Smith' }, - 'authors': [], - 'id': '60142c5b4316a0e0fcd47c56', - 'readOnly': true, - 'unansweredAmendments': 0, - 'answeredAmendments': 0, - 'userType': 'custodian', - 'activeParty': 'custodian', - 'inReviewMode': false, - 'reviewSections': [], - 'hasRecommended': false, - 'workflow': {}, - }, - } - '404': - description: Failed to find the application requested. - content: - application/json: - schema: - type: object - properties: - status: - type: string - message: - type: string - examples: - 'Not Found': - value: { 'status': 'error', 'message': 'Application not found.' } - '401': - description: Unauthorised attempt to update an application. - content: - application/json: - schema: - type: object - properties: - status: - type: string - message: - type: string - examples: - 'Unauthorised': - value: { 'status': 'error', 'message': 'Unauthorised to perform this update.' } - patch: - summary: Update a users question answers for access request. - security: - - cookieAuth: [] - tags: - - Data Access Request - parameters: - - in: path - name: id - required: true - description: The ID of the datset - schema: - type: string - example: 5ee249426136805fbf094eef - requestBody: - content: - application/json: - schema: - type: object - properties: - questionAnswers: - type: object - examples: - '0': - value: |- - { - "firstName": "Roger" - } - responses: - '200': - description: OK - - /api/v1/publishers/{publisher}/dataaccessrequests: - get: - tags: - - Publishers - parameters: - - in: path - name: publisher - required: true - description: The full name of the Custodian/Publisher, as registered on the Gateway. - schema: - type: string - example: OTHER > HEALTH DATA RESEARCH UK - description: Returns a collection of all Data Access Requests that have been submitted to the Custodian team for review. - responses: - '200': - description: Successful response containing a collection of Data Access Request applications. - content: - application/json: - schema: - type: object - properties: - avgDecisionTime: - type: string - description: The average number of days the Custodian has taken to process applications from submission to decision. - canViewSubmitted: - type: boolean - description: A flag to indicate if the requesting user has permissions to view submitted applications, which are visible only to managers of the Custodian team. Using OAuth2.0 client credentials will return this value as true. - status: - type: string - data: - type: array - items: - type: object - properties: - aboutApplication: - description: An object which holds data relating to the 'about application' section of the application form including details of whether the project is an NCS project or not. - type: object - properties: - isNationalCoreStudies: - type: boolean - description: A flag to indicate if this application is in relation to a National Core Studies Project. - nationalCoreStudiesProjectId: - type: integer - description: The unique identifier correlating to a Gateway Project entity indicating that this application is relating to a National Core Studies project. - projectName: - type: string - description: The project name that has been assigned to the application by the applicant(s). - amendmentIterations: - type: array - items: - type: object - description: An array containing an object with details for each iteration the application has passed through. An iteration is defined as an application which has been returned by the Custodian for correction, corrected by the applicant(s) and resubmitted. The object contains dates that the application was returned, and resubmitted as well as reference to any questions that were highlighted for amendment. - amendmentStatus: - type: string - description: A textual indicator of what state the application is in relating to updates made by the Custodian e.g. if it is awaiting updates from the applicant or if new updates have been submitted by the applicant(s). - applicants: - type: string - description: Concatenated list of applicants names who are contributing to the application. - applicationStatus: - type: string - enum: - - inProgress - - submitted - - inReview - - approved - - rejected - - approved with conditions - description: The current status of the application. - authorIds: - type: array - items: - type: integer - description: An array of values correlating to specific user's via their numeric identifiers. An author is also known as a contributor to an application and can view, edit or submit. - createdAt: - type: string - description: The date and time that the application was started. - datasetIds: - type: array - items: - type: string - description: An array of values correlating to datasets selected for the application via their identifier, which is unique per version. - datasetTitles: - type: array - items: - type: string - description: An array of strings correlating to the dataset titles that have been selected for the application. - datasets: - type: array - items: - type: object - description: An array containing the full metadata for each of the datasets that have been applied for through this application. - dateSubmitted: - type: string - description: The date and time that the application was originally submitted by the applicant(s) to the Custodian for review. - files: - type: array - items: - type: object - description: An array containing the links to files that have been uploaded to the application form and are held within the Gateway ecosystem. - id: - type: string - description: The unique identifier for the application. - - jsonSchema: - type: object - description: The object containing the json definition that renders the application form using the Winterfell library. This contains the details of questions, questions sets, question panels, headings and navigation items that appear. - questionAnswers: - type: object - description: The object containing the answers provided on the application form. This consists of a series of key pairs, where the key is the unqiue question Id, and the value the is the answer provided to the question. In the case of a multi select on the form, the value may be an array. - mainApplicant: - type: object - description: An object containing the details of the main applicant of the application as referenced by the userId field. - projectId: - type: string - description: The unique identifier for the application converted to a more human friendly format in uppercase and hypenated. - projectName: - type: string - description: The project name that has been assigned to the application by the applicant(s). - publisher: - type: string - description: The name of the Custodian that holds the dataset and is processing the application. - publisherObj: - type: object - description: The object containing details regarding the Custodian/publisher relating to the application. - reviewPanels: - type: array - items: - type: string - description: An array containing the sections of the application form that the current user is required to review if they are a reviewer of the current workflow step that the application is in. E.g. ['Safe People','Safe Data'] - schemaId: - type: string - description: The unique identifier that correlates to the schema from which the application form was generated. - updatedAt: - type: string - description: The date and time that the application was last updated by any party. - userId: - type: integer - description: The unique identifier that correlates to the user account of the main applicant. This is always the user that started the application. - deadlinePassed: - type: boolean - description: A flag to indicate if the deadline has passed for the current review phase for this application. - decisionApproved: - type: boolean - description: A flag to indicate if the request users decision as a reviewer of the current workflow phase was positive or negative. i.e. correlating to approval or rejection recommendation. - decisionComments: - type: string - description: A supporting note or comment made by the requesting user as context to their decision as a reviewer of the current workflow phase. - decisionDate: - type: string - description: The date that the requesting user made their decision as a reviewer of the current workflow phase. - decisionDuration: - type: integer - description: The number of days from submission until a final decision was made on the application. i.e. the application status was changed to a final status e.g. 'Approved'. - decisionMade: - type: boolean - description: A flag to indicate if the requesting user has made an expected decision as a reviewer of the current workflow phase. - decisionStatus: - type: string - description: A message indicating if the requesting user as a reviewer of the application has made a decision or is still required to make a decision for the current work flow. - isReviewer: - type: boolean - description: A flag to indicate if the requesting user is a reviewer of the current workflow step for the application. - remainingActioners: - type: array - items: - type: string - description: An array containing the names of Custodian team reviewers expected to complete a review for the current workflow phase, or a list of managers if the application is awaiting a final decision. - reviewStatus: - type: string - description: A message indicating the current status of the application review in relation to the assigned workflow. E.g. 'Final decision required' or 'Deadline is today'. This message changes based on the requesting user's relationship to the application. E.g. if they are a reviewer or manager. - stepName: - type: string - description: The name of the current workflow step that the application is in. - workflowCompleted: - type: boolean - description: A flag to indicate if the assigned workflow for the review process has been completed. - workflowName: - type: string - description: The name of the workflow the Custodian team have assigned to the application for the review process. - examples: - 'Single Request Received': - value: - { - 'success': true, - 'data': - [ - { - 'authorIds': [], - 'datasetIds': ['d5faf9c6-6c34-46d7-93c4-7706a5436ed9'], - 'datasetTitles': [], - 'applicationStatus': 'submitted', - 'jsonSchema': '{omitted for brevity...}', - 'questionAnswers': '{omitted for brevity...}', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - '_id': '601853db22dc004f9adfaa24', - 'version': 1, - 'userId': 7584453789581072, - 'schemaId': '5f55e87e780ba204b0a98eb8', - 'files': - [ - { - 'error': '', - '_id': '601aacf8ecdfa66e5cbc2742', - 'status': 'UPLOADED', - 'description': 'QuestionAnswers', - 'fileId': '9e76ee1a676f423b9b5c7aabf59c69db', - 'size': 509984, - 'name': 'QuestionAnswersFlags.png', - 'owner': '5ec7f1b39219d627e5cafae3', - }, - { - 'error': '', - '_id': '601aadbcecdfa6c532bc2743', - 'status': 'UPLOADED', - 'description': 'Notifications', - 'fileId': 'adb1718dcc094b9cb4b0ab347ad2ee94', - 'size': 54346, - 'name': 'HQIP-Workflow-Assigned-Notification.png', - 'owner': '5ec7f1b39219d627e5cafae3', - }, - ], - 'amendmentIterations': [], - 'createdAt': '2021-02-01T19:17:47.470Z', - 'updatedAt': '2021-02-03T16:36:36.720Z', - '__v': 2, - 'projectId': '6018-53DB-22DC-004F-9ADF-AA24', - 'aboutApplication': - { - 'selectedDatasets': - [ - { - '_id': '5fc31a18d98e4f4cff7e9315', - 'datasetId': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'name': 'HDR UK Papers & Preprints', - 'description': "Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint", - 'abstract': 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'publisherObj': - { - 'dataRequestModalContent': { 'header': ' ', 'body': '{omitted for brevity...}', 'footer': '' }, - 'active': true, - 'allowsMessaging': true, - 'workflowEnabled': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'name': 'OTHER > HEALTH DATA RESEARCH UK', - 'imageURL': '', - 'team': - { - 'active': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'members': - [ - { 'roles': ['manager'], 'memberid': '5f1a98861a821b4a53e44d15' }, - { 'roles': ['manager'], 'memberid': '600bfc99c8bf700f2c7d5c36' }, - ], - 'type': 'publisher', - '__v': 3, - 'createdAt': '2020-11-30T21:12:40.855Z', - 'updatedAt': '2020-12-02T13:33:45.232Z', - }, - }, - }, - ], - 'isNationalCoreStudies': true, - 'nationalCoreStudiesProjectId': '4324836585275824', - 'projectName': 'Test application title', - 'completedDatasetSelection': true, - 'completedInviteCollaborators': true, - 'completedReadAdvice': true, - 'completedCommunicateAdvice': true, - 'completedApprovalsAdvice': true, - 'completedSubmitAdvice': true, - }, - 'dateSubmitted': '2021-02-03T16:37:36.081Z', - 'datasets': - [ - { - 'categories': { 'programmingLanguage': [] }, - 'tags': { 'features': ['Preprints', 'Papers', 'HDR UK'], 'topics': [] }, - 'datasetfields': - { - 'geographicCoverage': ['https://www.geonames.org/countries/GB/united-kingdom.html'], - 'physicalSampleAvailability': ['Not Available'], - 'technicaldetails': '{omitted for brevity...}', - 'versionLinks': - [ - { - 'id': '142b1618-2691-4019-97b4-16b1e27c5f95', - 'linkType': 'Superseded By', - 'domainType': 'CatalogueSemanticLink', - 'source': - { - 'id': '9e798632-442a-427b-8d0e-456f754d28dc', - 'domainType': 'DataModel', - 'label': 'HDR UK Papers & Preprints', - 'documentationVersion': '0.0.1', - }, - 'target': - { - 'id': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'domainType': 'DataModel', - 'label': 'HDR UK Papers & Preprints', - 'documentationVersion': '1.0.0', - }, - }, - ], - 'phenotypes': [], - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'abstract': 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', - 'releaseDate': '2020-11-27T00:00:00Z', - 'accessRequestDuration': 'Other', - 'conformsTo': 'OTHER', - 'accessRights': 'https://github.com/HDRUK/papers/blob/master/LICENSE', - 'jurisdiction': 'GB-ENG', - 'datasetStartDate': '2020-03-31', - 'datasetEndDate': '2022-04-30', - 'statisticalPopulation': '0', - 'ageBand': '0-0', - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'periodicity': 'Daily', - 'metadataquality': - { - 'id': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'title': 'HDR UK Papers & Preprints', - 'completeness_percent': 95.24, - 'weighted_completeness_percent': 100, - 'error_percent': 11.63, - 'weighted_error_percent': 19.05, - 'quality_score': 91.81, - 'quality_rating': 'Gold', - 'weighted_quality_score': 90.47, - 'weighted_quality_rating': 'Gold', - }, - 'datautility': - { - 'id': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'publisher': 'OTHER > HEALTH DATA RESEARCH UK', - 'title': 'HDR UK Papers & Preprints', - 'metadata_richness': 'Gold', - 'availability_of_additional_documentation_and_support': '', - 'data_model': '', - 'data_dictionary': '', - 'provenance': '', - 'data_quality_management_process': '', - 'dama_quality_dimensions': '', - 'pathway_coverage': '', - 'length_of_follow_up': '', - 'allowable_uses': '', - 'research_environment': '', - 'time_lag': '', - 'timeliness': '', - 'linkages': '', - 'data_enrichments': '', - }, - 'metadataschema': - { - '@context': 'http://schema.org/', - '@type': 'Dataset', - 'identifier': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'url': 'https://healthdatagateway.org/detail/d5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'name': 'HDR UK Papers & Preprints', - 'description': "Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint", - 'license': 'Open Access', - 'keywords': - [ - 'Preprints,Papers,HDR UK', - 'OTHER > HEALTH DATA RESEARCH UK', - 'NOT APPLICABLE', - 'GB-ENG', - 'https://www.geonames.org/countries/GB/united-kingdom.html', - ], - 'includedinDataCatalog': - [ - { - '@type': 'DataCatalog', - 'name': 'OTHER > HEALTH DATA RESEARCH UK', - 'url': 'hdr.hdr@hdruk.ac.uk', - }, - { - '@type': 'DataCatalog', - 'name': 'HDR UK Health Data Gateway', - 'url': 'http://healthdatagateway.org', - }, - ], - }, - }, - 'authors': [], - 'showOrganisation': false, - 'toolids': [], - 'datasetids': [], - '_id': '5fc31a18d98e4f4cff7e9315', - 'relatedObjects': [], - 'programmingLanguage': [], - 'pid': 'b7a62c6d-ed00-4423-ad27-e90b71222d8e', - 'datasetVersion': '1.0.0', - 'id': 9816147066244124, - 'datasetid': 'd5faf9c6-6c34-46d7-93c4-7706a5436ed9', - 'type': 'dataset', - 'activeflag': 'active', - 'name': 'HDR UK Papers & Preprints', - 'description': "Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations\n\nThis will include:\n- Papers\n- COVID-19 Papers\n- COVID-19 Preprint", - 'license': 'Open Access', - 'datasetv2': - { - 'identifier': '', - 'version': '', - 'issued': '', - 'modified': '', - 'revisions': [], - 'summary': - { - 'title': '', - 'abstract': 'Publications that mention HDR-UK (or any variant thereof) in Acknowledgements or Author Affiliations', - 'publisher': - { - 'identifier': '', - 'name': 'HEALTH DATA RESEARCH UK', - 'logo': '', - 'description': '', - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'memberOf': 'OTHER', - 'accessRights': [], - 'deliveryLeadTime': '', - 'accessService': '', - 'accessRequestCost': '', - 'dataUseLimitation': [], - 'dataUseRequirements': [], - }, - 'contactPoint': 'hdr.hdr@hdruk.ac.uk', - 'keywords': ['Preprints', 'Papers', 'HDR UK'], - 'alternateIdentifiers': [], - 'doiName': 'https://doi.org/10.5281/zenodo.326615', - }, - 'documentation': - { - 'description': '', - 'associatedMedia': ['https://github.com/HDRUK/papers'], - 'isPartOf': 'NOT APPLICABLE', - }, - 'coverage': - { - 'spatial': 'GB', - 'typicalAgeRange': '0-0', - 'physicalSampleAvailability': ['NOT AVAILABLE'], - 'followup': 'UNKNOWN', - 'pathway': 'NOT APPLICABLE', - }, - 'provenance': - { - 'origin': { 'purpose': 'OTHER', 'source': 'MACHINE GENERATED', 'collectionSituation': 'OTHER' }, - 'temporal': - { - 'accrualPeriodicity': 'DAILY', - 'distributionReleaseDate': '2020-11-27', - 'startDate': '2020-03-31', - 'endDate': '2022-04-30', - 'timeLag': 'NO TIMELAG', - }, - }, - 'accessibility': - { - 'usage': - { - 'dataUseLimitation': 'GENERAL RESEARCH USE', - 'dataUseRequirements': 'RETURN TO DATABASE OR RESOURCE', - 'resourceCreator': 'HDR UK Using Team', - 'investigations': ['https://github.com/HDRUK/papers'], - 'isReferencedBy': ['Not Available'], - }, - 'access': - { - 'accessRights': ['Open Access'], - 'accessService': 'https://github.com/HDRUK/papers', - 'accessRequestCost': 'Free', - 'deliveryLeadTime': 'OTHER', - 'jurisdiction': 'GB-ENG', - 'dataProcessor': 'HDR UK', - 'dataController': 'HDR UK', - }, - 'formatAndStandards': - { - 'vocabularyEncodingScheme': 'OTHER', - 'conformsTo': 'OTHER', - 'language': 'en', - 'format': ['csv', 'JSON'], - }, - }, - 'enrichmentAndLinkage': - { - 'qualifiedRelation': ['Not Available'], - 'derivation': ['Not Available'], - 'tools': ['https://github.com/HDRUK/papers'], - }, - 'observations': [], - }, - 'createdAt': '2020-11-29T03:48:41.794Z', - 'updatedAt': '2021-02-02T10:09:57.030Z', - '__v': 0, - 'counter': 20, - }, - ], - 'dataset': null, - 'mainApplicant': - { - 'isServiceAccount': false, - '_id': '5ec7f1b39219d627e5cafae3', - 'id': 7584453789581072, - 'providerId': '112563375053074694443', - 'provider': 'google', - 'firstname': 'Chris', - 'lastname': 'Marks', - 'email': 'chris.marks@paconsulting.com', - 'role': 'Admin', - '__v': 0, - 'redirectURL': '/tool/100000012', - 'discourseKey': '2f52ecaa21a0d0223a119da5a09f8f8b09459e7b69ec3f981102d09f66488d99', - 'discourseUsername': 'chris.marks', - 'updatedAt': '2021-02-01T12:39:56.372Z', - }, - 'publisherObj': - { - 'dataRequestModalContent': { 'header': '', 'body': '', 'footer': '' }, - 'active': true, - 'allowsMessaging': true, - 'workflowEnabled': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'name': 'OTHER > HEALTH DATA RESEARCH UK', - 'imageURL': '', - 'team': - { - 'active': true, - '_id': '5f7b1a2bce9f65e6ed83e7da', - 'members': - [ - { 'roles': ['manager'], 'memberid': '5f1a98861a821b4a53e44d15' }, - { 'roles': ['manager'], 'memberid': '600bfc99c8bf700f2c7d5c36' }, - ], - 'type': 'publisher', - '__v': 3, - 'createdAt': '2020-11-30T21:12:40.855Z', - 'updatedAt': '2020-12-02T13:33:45.232Z', - 'users': - [ - { '_id': '5f1a98861a821b4a53e44d15', 'firstname': 'Robin', 'lastname': 'Kavanagh' }, - { '_id': '600bfc99c8bf700f2c7d5c36', 'firstname': 'HDR-UK', 'lastname': 'Service Account' }, - ], - }, - }, - 'id': '601853db22dc004f9adfaa24', - 'projectName': 'PA Paper', - 'applicants': 'Chris Marks', - 'workflowName': '', - 'workflowCompleted': false, - 'decisionDuration': '', - 'decisionMade': false, - 'decisionStatus': '', - 'decisionComments': '', - 'decisionDate': '', - 'decisionApproved': false, - 'remainingActioners': 'Robin Kavanagh (you), HDR-UK Service Account', - 'stepName': '', - 'deadlinePassed': '', - 'reviewStatus': '', - 'isReviewer': false, - 'reviewPanels': [], - 'amendmentStatus': '', - }, - ], - 'avgDecisionTime': 1, - 'canViewSubmitted': true, - } - '404': - description: Failed to find the application requested. - content: - application/json: - schema: - type: object - properties: - success: - type: boolean - examples: - 'Not Found': - value: { 'success': false } - '401': - description: Unauthorised attempt to access an application. - content: - application/json: - schema: - type: object - properties: - status: - type: string - message: - type: string - examples: - 'Unauthorised': - value: { 'status': 'failure', 'message': 'Unauthorised' } - - /api/v1/data-access-request/{datasetID}: - get: - summary: Returns access request template. - security: - - cookieAuth: [] - tags: - - Data Access Request - parameters: - - in: path - name: datasetID - required: true - description: The ID of the datset - schema: - type: string - example: 6efbc62f-6ebb-4f18-959b-1ec6fd0cc6fb - responses: - '200': - description: OK - - /api/v1/person/{id}: - get: - summary: Returns details for a person. - tags: - - Person - parameters: - - in: path - name: id - required: true - description: The ID of the person - schema: - type: string - example: 900000014 - responses: - '200': - description: OK - - /api/v1/person: - get: - summary: Returns an array of person objects. - tags: - - Person - responses: - '200': - description: OK - post: - summary: Returns a new person object. - tags: - - Person - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - firstname - - lastname - - bio - - link - - orcid - - emailNotifications - - terms - properties: - firstname: - type: string - lastname: - type: string - bio: - type: string - link: - type: string - orcid: - type: string - emailNotifications: - type: boolean - terms: - type: boolean - example: - firstname: 'John' - lastname: 'Smith' - bio: 'Researcher' - link: 'http://google.com' - orcid: 'https://orcid.org/123456789' - emailNotifications: false - terms: true - responses: - '200': - description: OK - put: - summary: Returns edited person object. - tags: - - Person - responses: - '200': - description: OK - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - id - - bio - - link - - orcid - - emailNotifications - - terms - properties: - id: - type: string - bio: - type: string - link: - type: string - orcid: - type: string - emailNotifications: - type: boolean - terms: - type: boolean - example: - id: '5268590523943617' - bio: 'Research assistant' - link: 'http://google.com' - orcid: 'https://orcid.org/123456789' - emailNotifications: false - terms: true - - /api/v1/search: - get: - tags: - - Search - summary: Search for HDRUK /search?search - parameters: - - in: query - name: params - schema: - type: object - properties: - search: - type: string - example: Epilepsy - type: - type: string - example: all - category: - type: string - example: API - programmingLanguage: - type: string - example: Javascript - features: - type: string - example: Arbitrage - topics: - type: string - example: Epilepsy - startIndex: - type: string - example: 0 - maxResults: - type: string - example: 10 - style: form - explode: true - responses: - '200': - description: OK - - /api/v1/stats/topSearches: - get: - summary: Returns top searches for a given month and year. - tags: - - Stats - parameters: - - name: month - in: query - required: true - description: Month number. - schema: - type: string - example: 7 - - name: year - in: query - required: true - description: Year. - schema: - type: string - example: 2020 - responses: - '200': - description: OK - - /api/v1/stats: - get: - summary: Returns the details on recent searches, popular objects, unmet demands or recently updated objects based on the rank query parameter. - tags: - - Stats - parameters: - - name: rank - in: query - required: true - description: The type of stat. - schema: - type: string - example: unmet - - name: type - in: query - required: true - description: Resource type. - schema: - type: string - example: Tools - - name: month - in: query - required: true - description: Month number. - schema: - type: string - example: 7 - - name: year - in: query - required: true - description: Year. - schema: - type: string - example: 2020 - responses: - '200': - description: OK - - /api/v1/kpis: - get: - summary: Returns information for KPIs, based on the KPI type and selectedDate parameters. - tags: - - KPIs - parameters: - - name: type - in: query - required: true - description: The type of KPI. - schema: - type: string - example: uptime - - name: selectedDate - in: query - required: true - description: Full date time string. - schema: - type: string - example: Wed Jul 01 2020 01:00:00 GMT 0100 (British Summer Time) - responses: - '200': - description: OK - - /api/v1/messages/{id}: - delete: - summary: Delete a Message - security: - - cookieAuth: [] - tags: - - Messages - parameters: - - in: path - name: id - required: true - description: The ID of the Message - schema: - type: string - example: '5ee249426136805fbf094eef' - responses: - '204': - description: Ok - put: - summary: Update a single Message - security: - - cookieAuth: [] - tags: - - Messages - parameters: - - in: path - name: id - required: true - description: The ID of the Message - schema: - type: string - example: '5ee249426136805fbf094eef' - requestBody: - content: - application/json: - schema: - type: object - properties: - isRead: - type: boolean - examples: - 'Update message to read': - value: |- - { - "isRead": true - } - responses: - '204': - description: OK - - /api/v1/messages/unread/count: - get: - summary: Returns the number of unread messages for the authenticated user - security: - - cookieAuth: [] - tags: - - Messages - responses: - '200': - description: OK - - /api/v1/messages: - post: - summary: Returns a new Message object and creates an associated parent Topic if a Topic is not specified in request body - security: - - cookieAuth: [] - tags: - - Messages - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - isRead: - type: boolean - messageDescription: - type: string - messageType: - type: string - required: - - isRead - - messageDescription - - messageType - examples: - 'Create new message': - value: |- - { - "isRead": false, - "messageDescription": "this is an example", - "messageType": "message" - } - responses: - '201': - description: OK - - /api/v1/topics: - post: - summary: Returns a new Topic object with ID (Does not create any associated messages) - security: - - cookieAuth: [] - tags: - - Topics - requestBody: - required: true - content: - application/json: - schema: - type: object - properties: - relatedObjectIds: - type: array - items: - type: string - examples: - 'Create a new topic': - value: |- - { - "relatedObjectIds": "['1','2','3']" - } - responses: - '201': - description: A new Topic - content: - application/json: - schema: - type: object - properties: - _id: - type: object - description: Generated ID - title: - type: string - description: Title of message - subtitle: - type: string - description: Subtitle of message - relatedObjectIds: - type: array - items: - type: string - description: Object ID this Topic is related to - createdBy: - type: object - description: User that created the topic - createdDate: - type: string - description: Date the topic was created - recipients: - type: array - items: - type: string - description: Collection of user IDs - tags: - type: array - items: - type: string - description: Collection of tags to describe topic - get: - summary: Returns a list of all topics that the authenticated user is a recipient or member of - security: - - cookieAuth: [] - tags: - - Topics - responses: - '200': - description: Ok - - /api/v1/topics/{id}: - get: - summary: Returns Topic object by ID - security: - - cookieAuth: [] - tags: - - Topics - parameters: - - in: path - name: id - required: true - description: The ID of the topic - schema: - type: string - example: '5ee249426136805fbf094eef' - responses: - '200': - description: Ok - delete: - summary: Soft deletes a message Topic but does not affect associated messages - security: - - cookieAuth: [] - tags: - - Topics - parameters: - - in: path - name: id - required: true - description: The ID of the Topic - schema: - type: string - example: '5ee249426136805fbf094eef' - responses: - '204': - description: Ok - - /api/v1/datasets/{datasetID}: - get: - summary: Returns Dataset object. - tags: - - Datasets - parameters: - - in: path - name: datasetID - required: true - description: The ID of the datset - schema: - type: string - example: '756daeaa-6e47-4269-9df5-477c01cdd271' - responses: - '200': - description: OK - - /api/v1/datasets: - get: - summary: Returns List of Dataset objects. - tags: - - Datasets - parameters: - - in: query - name: limit - required: false - description: Limit the number of results - schema: - type: integer - example: 3 - - in: query - name: offset - required: false - description: Index to offset the search results - schema: - type: integer - example: 1 - - in: query - name: q - required: false - description: Filter using search query - schema: - type: string - example: epilepsy - responses: - '200': - description: OK - - /api/v2/datasets: - get: - summary: Returns a list of dataset objects - tags: - - Datasets v2.0 - description: Version 2.0 of the datasets API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. - parameters: - - name: search - in: query - description: Full text index search function which searches for partial matches in various dataset fields including name, description and abstract. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added. - schema: - type: string - example: COVID-19 - - name: page - in: query - description: A specific page of results to retrieve - schema: - type: number - example: 1 - - name: limit - in: query - description: Maximum number of results returned per page - schema: - type: number - example: 10 - - name: sort - in: query - description: Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below. - schema: - type: string - example: datasetfields.publisher,name,-counter - - name: fields - in: query - description: Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below. - schema: - type: string - example: name,counter,datasetid - - name: count - in: query - description: Returns the number of the number of entities matching the query parameters provided instead of the result payload - schema: - type: boolean - example: true - - name: datasetid - in: query - description: Filter by the unique identifier for a single version of a dataset - schema: - type: string - example: 0cfe60cd-038d-4c03-9a95-894c52135922 - - name: pid - in: query - description: Filter by the identifier for a dataset that persists across versions - schema: - type: string - example: 621dd611-adcf-4434-b538-eecdbe5f72cf - - name: name - in: query - description: Filter by dataset name - schema: - type: string - example: ARIA Dataset - - name: activeflag - in: query - description: Filter by the status of a single dataset version - schema: - type: string - enum: - - active - - archive - example: active - - name: datasetfields.publisher - in: query - description: Filter by the name of the Custodian holding the dataset - schema: - type: string - example: ALLIANCE > BARTS HEALTH NHS TRUST - - name: metadataquality.completeness_percent[gte] - in: query - description: Filter by the metadata quality completeness percentage using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to. - schema: - type: number - example: 90.5 - - name: metadataquality.weighted_completeness_percent[gte] - in: query - description: Filter by the metadata quality weighted completeness percentage using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to. - schema: - type: number - example: 71.2 - - name: metadataquality.weighted_quality_score[gte] - in: query - description: Filter by the metadata quality score using an operator [gte] for greater than or equal to, [gt] for greater than, [lte] for less than or equal to, [lt] for less than, and [eq] for equal to. - schema: - type: number - example: 35.3 - responses: - '200': - description: Successful response containing a list of datasets matching query parameters - - /api/v2/datasets/{datasetid}: - get: - summary: Returns a dataset object. - tags: - - Datasets v2.0 - parameters: - - in: path - name: datasetid - required: true - description: The unqiue identifier for a specific version of a dataset - schema: - type: string - example: af20ebb2-018a-4557-8ced-0bec75dba150 - - in: query - name: raw - required: false - description: A flag which determines if the response triggered is the raw structure in which the data is stored rather than the dataset v2.0 standard - schema: - type: boolean - example: false - description: Version 2.0 of the datasets API introduces the agreed dataset v2.0 schema as defined at the following link - https://github.com/HDRUK/schemata/edit/master/schema/dataset/2.0.0/dataset.schema.json - responses: - '200': - description: Successful response containing a single dataset object - '404': - description: A dataset could not be found by the provided dataset identifier - - /api/v1/projects: - post: - summary: Returns a Project object with ID. - security: - - cookieAuth: [] - tags: - - Projects - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - example: # Sample object - type: 'project' - name: 'Epilepsy data research' - link: 'http://epilepsy.org' - description: 'Epilespy data research description' - categories: { category: 'API', programmingLanguage: ['Javascript'], programmingLanguageVersion: '0.0.0' } - licence: 'MIT licence' - authors: [4495285946631793] - tags: { features: ['Arbitrage'], topics: ['Epilepsy'] } - responses: - '200': - description: OK - get: - summary: Returns List of Project objects. - tags: - - Projects - parameters: - - in: query - name: limit - required: false - description: Limit the number of results - schema: - type: integer - example: 3 - - in: query - name: offset - required: false - description: Index to offset the search results - schema: - type: integer - example: 1 - - in: query - name: q - required: false - description: Filter using search query - schema: - type: string - example: epilepsy - responses: - '200': - description: OK - - /api/v1/projects/{id}: - get: - summary: Returns Project object. - tags: - - Projects - parameters: - - in: path - name: id - required: true - schema: - type: integer - example: 441788967946948 - responses: - '200': - description: OK - patch: - summary: Change status of the Project object. - security: - - cookieAuth: [] - tags: - - Projects - parameters: - - in: path - name: id - required: true - schema: - type: integer - example: 662346984100503 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - activeflag: - type: string - example: # Sample object - activeflag: 'active' - responses: - '200': - description: OK - put: - summary: Returns edited Project object. - security: - - cookieAuth: [] - tags: - - Projects - parameters: - - in: path - name: id - required: true - description: The ID of the project - schema: - type: integer - format: int64 - example: 26542005388306332 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - id: - type: number - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - toolids: - type: array - items: - type: string - example: # Sample object - id: 26542005388306332 - type: 'project' - name: 'Research Data TEST EPILEPSY' - link: 'http://localhost:8080/epilepsy' - description: 'Epilespy data research description' - categories: { category: 'API', programmingLanguage: ['Javascript'], programmingLanguageVersion: '1.0.0' } - licence: 'MIT licence' - authors: [4495285946631793] - tags: { features: ['Arbitrage'], topics: ['Epilepsy'] } - toolids: [] - responses: - '200': - description: OK - - /api/v2/projects: - get: - summary: Returns a list of project objects - tags: - - Projects v2.0 - parameters: - - name: search - in: query - description: Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added. - schema: - type: string - example: health service - - name: page - in: query - description: A specific page of results to retrieve - schema: - type: number - example: 1 - - name: limit - in: query - description: Maximum number of results returned per page - schema: - type: number - example: 10 - - name: sort - in: query - description: Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below. - schema: - type: string - example: name,-counter - - name: fields - in: query - description: Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below. - schema: - type: string - example: name,counter,description - - name: count - in: query - description: Returns the number of the number of entities matching the query parameters provided instead of the result payload - schema: - type: boolean - example: true - description: Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. - responses: - '200': - description: Successful response containing a list of projects matching query parameters - - /api/v2/projects/{id}: - get: - summary: Returns a project object - tags: - - Projects v2.0 - parameters: - - in: path - name: id - required: true - description: The ID of the project - schema: - type: number - example: 100000001 - description: Returns a project object by matching unique identifier in the default format that is stored as within the Gateway - responses: - '200': - description: Successful response containing a single project object - '404': - description: A project could not be found by the provided project identifier - - /api/v1/papers: - post: - summary: Returns a Paper object with ID. - security: - - cookieAuth: [] - tags: - - Papers - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - example: # Sample object - type: 'paper' - name: 'Epilepsy data research' - link: 'http://epilepsy.org' - description: 'Epilespy data research description' - categories: { category: 'API', programmingLanguage: ['Javascript'], programmingLanguageVersion: '0.0.0' } - licence: 'MIT licence' - authors: [4495285946631793] - tags: { features: ['Arbitrage'], topics: ['Epilepsy'] } - responses: - '200': - description: OK - get: - summary: Return List of Paper objects. - tags: - - Papers - parameters: - - in: query - name: limit - required: false - description: Limit the number of results - schema: - type: integer - example: 3 - - in: query - name: offset - required: false - description: Index to offset the search results - schema: - type: integer - example: 1 - - in: query - name: q - required: false - description: Filter using search query - schema: - type: string - example: epilepsy - responses: - '200': - description: OK - - /api/v1/papers/{id}: - get: - summary: Returns Paper object. - tags: - - Papers - parameters: - - name: id - in: path - required: true - description: The ID of the user - schema: - type: integer - format: int64 - minimum: 1 - example: 8370396016757367 - responses: - '200': - description: OK - patch: - summary: Change status of the Paper object. - security: - - cookieAuth: [] - tags: - - Papers - parameters: - - in: path - name: id - required: true - schema: - type: integer - example: 7485531672584456 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - id: - type: number - activeflag: - type: string - example: # Sample object - activeflag: 'active' - responses: - '200': - description: OK - put: - summary: Returns edited Paper object. - security: - - cookieAuth: [] - tags: - - Papers - parameters: - - in: path - name: id - required: true - description: The ID of the paper - schema: - type: integer - format: int64 - example: 7485531672584456 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - id: - type: number - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - toolids: - type: array - items: - type: string - example: # Sample object - id: 7485531672584456 - type: 'paper' - name: 'Test Paper Title 2' - link: 'http://localhost:8080/epilepsy' - description: 'Test abstract 2' - categories: { category: 'API', programmingLanguage: ['Javascript'], programmingLanguageVersion: '1.0.0' } - licence: 'MIT licence' - authors: [4495285946631793] - tags: { features: ['Arbitrage'], topics: ['Epilepsy'] } - toolids: [] - responses: - '200': - description: OK - - /api/v2/papers: - get: - summary: Returns a list of paper objects - tags: - - Papers v2.0 - parameters: - - name: search - in: query - description: Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added. - schema: - type: string - example: Exploration - - name: page - in: query - description: A specific page of results to retrieve - schema: - type: number - example: 1 - - name: limit - in: query - description: Maximum number of results returned per page - schema: - type: number - example: 10 - - name: sort - in: query - description: Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below. - schema: - type: string - example: name,-counter - - name: fields - in: query - description: Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below. - schema: - type: string - example: name,counter,description - - name: count - in: query - description: Returns the number of the number of entities matching the query parameters provided instead of the result payload - schema: - type: boolean - example: true - description: Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. - responses: - '200': - description: Successful response containing a list of papers matching query parameters - - /api/v2/papers/{id}: - get: - summary: Returns paper object - tags: - - Papers v2.0 - parameters: - - in: path - name: id - required: true - description: The ID of the paper - schema: - type: number - example: 13296138992670704 - description: Returns a paper object by matching unique identifier in the default format that is stored as within the Gateway - responses: - '200': - description: Successful response containing a single paper object - '404': - description: A paper could not be found by the provided paper identifier - - /api/v1/tools: - get: - summary: Return List of Tool objects. - tags: - - Tools - parameters: - - in: query - name: limit - required: false - description: Limit the number of results - schema: - type: integer - example: 3 - - in: query - name: offset - required: false - description: Index to offset the search results - schema: - type: integer - example: 1 - - in: query - name: q - required: false - description: Filter using search query - schema: - type: string - example: epilepsy - responses: - '200': - description: OK - post: - summary: Returns new Tool object with ID. - security: - - cookieAuth: [] - tags: - - Tools - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - example: # Sample object - id: 26542005388306332 - responses: - '200': - description: OK - - /api/v1/tools/{id}: - get: - summary: Returns Tool object - tags: - - Tools - parameters: - - in: path - name: id - required: true - description: The ID of the tool - schema: - type: integer - format: int64 - minimum: 1 - example: 19009 - responses: - '200': - description: OK - put: - summary: Returns edited Tool object. - security: - - cookieAuth: [] - tags: - - Tools - parameters: - - in: path - name: id - required: true - schema: - type: integer - example: 123 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - id: - type: number - type: - type: string - name: - type: string - link: - type: string - description: - type: string - categories: - type: object - properties: - category: - type: string - programmingLanguage: - type: array - items: - type: string - programmingLanguageVersion: - type: string - licence: - type: string - authors: - type: array - items: - type: number - tags: - type: object - properties: - features: - type: array - items: - type: string - topics: - type: array - items: - type: string - toolids: - type: array - items: - type: string - example: # Sample object - id: 26542005388306332 - type: 'tool' - name: 'Research Data TEST EPILEPSY' - link: 'http://localhost:8080/epilepsy' - description: 'Epilespy data research description' - categories: { category: 'API', programmingLanguage: ['Javascript'], programmingLanguageVersion: '1.0.0' } - licence: 'MIT licence' - authors: [4495285946631793] - tags: { features: ['Arbitrage'], topics: ['Epilepsy'] } - toolids: [] - responses: - '200': - description: OK - patch: - summary: Change status of Tool object. - security: - - cookieAuth: [] - tags: - - Tools - parameters: - - name: id - in: path - required: true - description: The ID of the tool - schema: - type: integer - format: int64 - example: 5032687830560181 - requestBody: - content: - application/json: - schema: # Request body contents - type: object - required: - - name - properties: - id: - type: number - activeflag: - type: string - example: # Sample object - id: 662346984100503 - activeflag: 'active' - responses: - '200': - description: OK - - /api/v2/tools: - get: - summary: Returns a list of tool objects - tags: - - Tools v2.0 - parameters: - - name: search - in: query - description: Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added. - schema: - type: string - example: Regulation - - name: page - in: query - description: A specific page of results to retrieve - schema: - type: number - example: 1 - - name: limit - in: query - description: Maximum number of results returned per page - schema: - type: number - example: 10 - - name: sort - in: query - description: Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. name for ascending or -name for descending. Multiple fields should be comma separated as shown in the example below. - schema: - type: string - example: name,-counter - - name: fields - in: query - description: Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below. - schema: - type: string - example: name,counter, description - - name: count - in: query - description: Returns the number of the number of entities matching the query parameters provided instead of the result payload - schema: - type: boolean - example: true - description: Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. - responses: - '200': - description: Successful response containing a list of tools matching query parameters - - /api/v2/tools/{id}: - get: - summary: Returns a tool object - tags: - - Tools v2.0 - parameters: - - in: path - name: id - required: true - description: The ID of the tool - schema: - type: number - example: 100000006 - description: Returns a tool object by matching unique identifier in the default format that is stored as within the Gateway - responses: - '200': - description: Successful response containing a single tool object - '404': - description: A tool could not be found by the provided tool identifier - - /api/v2/courses: - get: - summary: Returns a list of courses - parameters: - - name: search - in: query - description: Full text index search function which searches for partial matches in various fields including name and description. The response will contain a metascore indicating the relevancy of the match, by default results are sorted by the most relevant first unless a manual sort query parameter has been added. - schema: - type: string - example: Research - - name: page - in: query - description: A specific page of results to retrieve - schema: - type: number - example: 1 - - name: limit - in: query - description: Maximum number of results returned per page - schema: - type: number - example: 10 - - name: sort - in: query - description: Fields to apply sort operations to. Accepts multiple fields in ascending and descending. E.g. provider for ascending or -provider for descending. Multiple fields should be comma separated as shown in the example below. - schema: - type: string - example: provider,-counter - - name: fields - in: query - description: Limit the size of the response by requesting only certain fields. Note that some additional derived fields are always returned. Multiple fields should be comma separate as shown in the example below. - schema: - type: string - example: provider,counter,description - - name: count - in: query - description: Returns the number of the number of entities matching the query parameters provided instead of the result payload - schema: - type: boolean - example: true - description: Version 2.0 of the courses API introduces a large number of parameterised query string options to aid requests in collecting the data that is most relevant for a given use case. The query parameters defined below support a variety of comparison operators such as equals, contains, greater than, and less than. Using dot notation, any field can be queried, please see some examples below. - tags: - - Courses v2.0 - responses: - '200': - description: Successful response containing a list of course objects matching query parameters - - /api/v2/courses/{id}: - summary: summary - get: - summary: Returns a course object - description: Returns a course object by matching unique identifier in the default format that is stored as within the Gateway - tags: - - Courses v2.0 - parameters: - - in: path - name: id - required: true - description: The ID of the course - schema: - type: number - example: 5540794872521069 - responses: - '200': - description: Successful response containing a single course object - '404': - description: A course could not be found by the provided course identifier - -components: - securitySchemes: - oauth2: - type: oauth2 - flows: - clientCredentials: - tokenUrl: 'https://api.www.healthdatagateway.org/oauth/token' - cookieAuth: - type: http - scheme: jwt diff --git a/test/middleware.test.js b/test/middleware.test.js new file mode 100644 index 00000000..9dd41fbb --- /dev/null +++ b/test/middleware.test.js @@ -0,0 +1,66 @@ +import { resultLimit } from '../src/config/middleware'; + +describe('resultLimit', () => { + const nextFunction = jest.fn(); + + const mockResponse = () => { + const res = {}; + res.status = jest.fn().mockReturnValue(res); + res.json = jest.fn().mockReturnValue(res); + return res; + }; + + const allowedLimit = 100; + + it('should return a 400 response code with the correct reason when the requested limit is non numeric', () => { + const expectedResponse = { + success: false, + message: 'The result limit parameter provided must be a numeric value.', + }; + + const req = { query: { limit: 'one hundred' } }; + const res = mockResponse(); + + resultLimit(req, res, nextFunction, allowedLimit); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + }); + + it('should return a 400 response code with the correct reason when the maximum allowed limit is exceeded', () => { + const expectedResponse = { + success: false, + message: `Maximum request limit exceeded. You may only request up to a maximum of ${allowedLimit} records per page. Please use the page query parameter to request further data.`, + }; + + const req = { query: { limit: 101 } }; + const res = mockResponse(); + + resultLimit(req, res, nextFunction, allowedLimit); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + }); + + it('should invoke the next function when no request limit is provided', () => { + const req = {}; + const res = mockResponse(); + + resultLimit(req, res, nextFunction, allowedLimit); + + expect(res.status.mock.calls.length).toBe(0); + expect(res.json.mock.calls.length).toBe(0); + expect(nextFunction.mock.calls.length).toBe(1); + }); + + it('should invoke the next function when the requested limit is valid', () => { + const req = { query: { limit: 100 } }; + const res = mockResponse(); + + resultLimit(req, res, nextFunction, allowedLimit); + + expect(res.status.mock.calls.length).toBe(0); + expect(res.json.mock.calls.length).toBe(0); + expect(nextFunction.mock.calls.length).toBe(1); + }); +});