diff --git a/cloudbuild.yaml b/cloudbuild.yaml index ac7bc50e..486ce5ff 100644 --- a/cloudbuild.yaml +++ b/cloudbuild.yaml @@ -20,6 +20,8 @@ steps: '${_REGION}', '--allow-unauthenticated', ] + - name: 'node' + args: ['npm', 'install'] - name: 'node' args: ['npm', 'test'] env: diff --git a/package.json b/package.json index c35e5de2..482c8c76 100644 --- a/package.json +++ b/package.json @@ -7,9 +7,10 @@ "@google-cloud/storage": "^5.3.0", "@sendgrid/mail": "^7.1.0", "@sentry/node": "^5.29.0", + "ajv": "^7.1.1", "async": "^3.2.0", "await-to-js": "^2.1.1", - "axios": "0.19.2", + "axios": "0.21.1", "axios-retry": "^3.1.9", "base64url": "^3.0.1", "bcrypt": "^5.0.0", @@ -28,6 +29,7 @@ "express-session": "^1.17.1", "express-validator": "^6.6.1", "faker": "^5.3.1", + "form-data": "^3.0.0", "googleapis": "^55.0.0", "jose": "^2.0.2", "jsonwebtoken": "^8.5.1", diff --git a/src/config/server.js b/src/config/server.js index cc427548..de88bc0c 100644 --- a/src/config/server.js +++ b/src/config/server.js @@ -63,8 +63,10 @@ connectToDatabase(); // (optional) only made for logging and // bodyParser, parses the request body to be a readable json format -app.use(bodyParser.urlencoded({ extended: false })); -app.use(bodyParser.json()); + +app.use(bodyParser.json({ limit: '10mb', extended: true })); +app.use(bodyParser.urlencoded({ limit: '10mb', extended: false })); + app.use(logger('dev')); app.use(cookieParser()); app.use(passport.initialize()); @@ -214,6 +216,7 @@ app.use('/api/v1/coursecounter', require('../resources/course/coursecounter.rout app.use('/api/v1/discourse', require('../resources/discourse/discourse.route')); +app.use('/api/v1/dataset-onboarding', require('../resources/dataset/datasetonboarding.route')); app.use('/api/v1/datasets', require('../resources/dataset/v1/dataset.route')); app.use('/api/v2/datasets', require('../resources/dataset/v2/dataset.route')); @@ -226,6 +229,8 @@ app.use('/api/v1/analyticsdashboard', require('../resources/googleanalytics/goog app.use('/api/v1/help', require('../resources/help/help.router')); +app.use('/api/v2/filters', require('../resources/filters/filters.route')); + initialiseAuthentication(app); // launch our backend into a port diff --git a/src/resources/account/account.route.js b/src/resources/account/account.route.js index e3b74634..75275f62 100644 --- a/src/resources/account/account.route.js +++ b/src/resources/account/account.route.js @@ -62,39 +62,6 @@ router.get('/admin', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Adm return result; }); -/** - * {get} /api/v1/accounts/admin/collections - * - * Returns list of all collections. - */ -router.get('/admin/collections', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { - var result; - var startIndex = 0; - var maxResults = 25; - - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.maxResults) { - maxResults = req.query.maxResults; - } - - var q = Collections.aggregate([ - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - { $sort: { updatedAt: -1 } }, - ]); //.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - result = res.json({ success: true, data: data }); - }); - - return result; -}); - /** * {get} /api/v1/accounts * diff --git a/src/resources/auth/auth.route.js b/src/resources/auth/auth.route.js index f3d54b39..2c60bf82 100644 --- a/src/resources/auth/auth.route.js +++ b/src/resources/auth/auth.route.js @@ -79,6 +79,14 @@ router.get('/status', function (req, res, next) { return { ...publisher, type, roles }; }); } + + let adminArray = teams.filter(team => team.type === 'admin'); + let teamArray = teams + .filter(team => team.type !== 'admin') + .sort(function (a, b) { + return a.name.toUpperCase() < b.name.toUpperCase() ? -1 : a.name.toUpperCase() > b.name.toUpperCase() ? 1 : 0; + }); + // 2. Return user info return res.json({ success: true, @@ -88,7 +96,7 @@ router.get('/status', function (req, res, next) { id: req.user.id, name: req.user.firstname + ' ' + req.user.lastname, loggedIn: true, - teams, + teams: [...adminArray, ...teamArray], provider: req.user.provider, advancedSearchRoles: req.user.advancedSearchRoles, acceptedAdvancedSearchTerms: req.user.acceptedAdvancedSearchTerms, diff --git a/src/resources/base/repository.js b/src/resources/base/repository.js index c529c150..426087fc 100644 --- a/src/resources/base/repository.js +++ b/src/resources/base/repository.js @@ -8,8 +8,13 @@ export default class Repository { //Build query let queryObj = { ...query }; + // Population from query + if(query.populate) { + populate = query.populate.split(',').join(' '); + } + // Filtering - const excludedFields = ['page', 'sort', 'limit', 'fields', 'count', 'search', 'expanded']; + const excludedFields = ['page', 'sort', 'limit', 'fields', 'count', 'search', 'expanded', 'populate']; excludedFields.forEach(el => delete queryObj[el]); // Keyword search diff --git a/src/resources/collections/collections.repository.js b/src/resources/collections/collections.repository.js index 26d47c9e..4fa16bab 100644 --- a/src/resources/collections/collections.repository.js +++ b/src/resources/collections/collections.repository.js @@ -1,20 +1,31 @@ import { Data } from '../tool/data.model'; import { Course } from '../course/course.model'; import { Collections } from './collections.model'; +import { UserModel } from '../user/user.model'; +import emailGenerator from '../utilities/emailGenerator.util'; import _ from 'lodash'; +import helper from '../utilities/helper.util'; + +const hdrukEmail = `enquiry@healthdatagateway.org`; const getCollectionObjects = async (req, res) => { let relatedObjects = []; await Collections.find( { id: parseInt(req.params.collectionID) }, - { 'relatedObjects._id': 1, 'relatedObjects.objectId': 1, 'relatedObjects.objectType': 1, 'relatedObjects.pid': 1 } + { + 'relatedObjects._id': 1, + 'relatedObjects.objectId': 1, + 'relatedObjects.objectType': 1, + 'relatedObjects.pid': 1, + 'relatedObjects.updated': 1, + } ).then(async res => { await new Promise(async (resolve, reject) => { if (_.isEmpty(res)) { reject(`Collection not found for Id: ${req.params.collectionID}.`); } else { for (let object of res[0].relatedObjects) { - let relatedObject = await getCollectionObject(object.objectId, object.objectType, object.pid); + let relatedObject = await getCollectionObject(object.objectId, object.objectType, object.pid, object.updated); if (!_.isUndefined(relatedObject)) { relatedObjects.push(relatedObject); } else { @@ -28,10 +39,11 @@ const getCollectionObjects = async (req, res) => { } }); }); - return relatedObjects; + + return relatedObjects.sort((a, b) => b.updated - a.updated); }; -function getCollectionObject(objectId, objectType, pid) { +function getCollectionObject(objectId, objectType, pid, updated) { let id = pid && pid.length > 0 ? pid : objectId; return new Promise(async (resolve, reject) => { @@ -54,30 +66,317 @@ function getCollectionObject(objectId, objectType, pid) { bio: 1, authors: 1, } - ).populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]); + ) + .populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]) + .lean(); } else if (!isNaN(id) && objectType === 'course') { data = await Course.find( { id: parseInt(id) }, { id: 1, type: 1, activeflag: 1, title: 1, provider: 1, courseOptions: 1, award: 1, domains: 1, tags: 1, description: 1 } - ); + ).lean(); } else { // 1. Search for a dataset based on pid data = await Data.find( { pid: id, activeflag: 'active' }, { id: 1, datasetid: 1, pid: 1, type: 1, activeflag: 1, name: 1, datasetv2: 1, datasetfields: 1, tags: 1, description: 1 } - ); + ).lean(); // 2. If dataset not found search for a dataset based on datasetID if (!data || data.length <= 0) { - data = await Data.find({ datasetid: objectId }, { datasetid: 1, pid: 1 }); + data = await Data.find({ datasetid: objectId }, { datasetid: 1, pid: 1 }).lean(); // 3. Use retrieved dataset's pid to search by pid again data = await Data.find( { pid: data[0].pid, activeflag: 'active' }, { id: 1, datasetid: 1, pid: 1, type: 1, activeflag: 1, name: 1, datasetv2: 1, datasetfields: 1, tags: 1, description: 1 } - ); + ).lean(); + } + // 4. If dataset still not found search for deleted dataset by pid + if (!data || data.length <= 0) { + data = await Data.find( + { pid: id, activeflag: 'archive' }, + { id: 1, datasetid: 1, pid: 1, type: 1, activeflag: 1, name: 1, datasetv2: 1, datasetfields: 1, tags: 1, description: 1 } + ).lean(); } } - resolve(data[0]); + + let relatedObject = { ...data[0], updated: Date.parse(updated) }; + resolve(relatedObject); + }); +} + +async function sendEmailNotifications(collections, activeflag, collectionCreator, isEdit) { + // Generate URL for linking collection in email + const collectionLink = process.env.homeURL + '/collection/' + collections.id; + + // Query Db for all admins or authors of the collection + var q = UserModel.aggregate([ + { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + { + $project: { + _id: 1, + firstname: 1, + lastname: 1, + email: 1, + role: 1, + id: 1, + }, + }, + ]); + + // Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } else { + let subject; + let html; + + emailRecipients.map(emailRecipient => { + if (collections.authors.includes(emailRecipient.id)) { + let author = Number(collections.authors.filter(author => author === emailRecipient.id)); + + if (activeflag === 'active') { + subject = generateCollectionEmailSubject( + 'Creator', + collections.publicflag, + collections.name, + author === collectionCreator.id ? true : false, + isEdit + ); + html = generateCollectionEmailContent( + 'Creator', + collections.publicflag, + collections.name, + collectionLink, + author === collectionCreator.id ? true : false, + isEdit + ); + } + } else if (activeflag === 'active' && emailRecipient.role === 'Admin') { + subject = generateCollectionEmailSubject('Admin', collections.publicflag, collections.name, false, isEdit); + html = generateCollectionEmailContent('Admin', collections.publicflag, collections.name, collectionLink, false, isEdit); + } + + emailGenerator.sendEmail([emailRecipient], `${hdrukEmail}`, subject, html, false); + }); + } + }); +} + +function generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit) { + let emailSubject; + + if (role !== 'Admin' && isCreator !== true) { + if (isEdit === true) { + emailSubject = `The ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName} that you are a collaborator on has been edited and is now live`; + } else { + emailSubject = `You have been added as a collaborator on the ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName}`; + } + } else { + emailSubject = `${role === 'Admin' ? 'A' : 'Your'} ${ + publicflag === true ? 'public' : 'private' + } collection ${collectionName} has been ${isEdit === true ? 'edited' : 'published'} and is now live`; + } + + return emailSubject; +} + +function generateCollectionEmailContent(role, publicflag, collectionName, collectionLink, isCreator, isEdit) { + return `
+
+ + + + + + + + + + + + + + +
+ ${generateCollectionEmailSubject(role, publicflag, collectionName, isCreator, isEdit)} +
+ ${ + publicflag === true + ? `${role === 'Admin' ? 'A' : 'Your'} public collection has been ${ + isEdit === true ? 'edited on' : 'published to' + } the Gateway. The collection is searchable on the Gateway and can be viewed by all users.` + : `${role === 'Admin' ? 'A' : 'Your'} private collection has been ${ + isEdit === true ? 'edited on' : 'published to' + } the Gateway. Only those who you share the collection link with will be able to view the collection.` + } +
+ View Collection +
+
+
`; +} + +const getCollectionsAdmin = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let searchString = ''; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + if (req.query.status) { + status = req.query.status; + } + + let searchQuery; + if (status === 'all') { + searchQuery = {}; + } else { + searchQuery = { $and: [{ activeflag: status }] }; + } + + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + + await Promise.all([getObjectResult(searchAll, searchQuery, startIndex, limit), getCountsByStatus()]).then(values => { + resolve(values); + }); + }); +}; + +const getCollections = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let idString = req.user.id; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.query.id) { + idString = req.query.id; + } + if (req.query.status) { + status = req.query.status; + } + + let searchQuery; + if (status === 'all') { + searchQuery = [{ authors: parseInt(idString) }]; + } else { + searchQuery = [{ authors: parseInt(idString) }, { activeflag: status }]; + } + + let query = Collections.aggregate([ + { $match: { $and: searchQuery } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1, _id: 1 } }, + ]) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + + await Promise.all([getUserCollections(query), getCountsByStatus(idString)]).then(values => { + resolve(values); + }); + + function getUserCollections(query) { + return new Promise((resolve, reject) => { + query.exec((err, data) => { + data && + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); + } + }); +}; + +function getObjectResult(searchAll, searchQuery, startIndex, limit) { + let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + let q = ''; + + if (searchAll) { + q = Collections.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]) + .sort({ updatedAt: -1, _id: 1 }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } else { + q = Collections.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]) + .sort({ score: { $meta: 'textScore' } }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') { + resolve([]); + } else { + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + resolve(data); + } + }); + }); +} + +function getCountsByStatus(idString) { + let q; + + if (_.isUndefined(idString)) { + q = Collections.find({}, { id: 1, name: 1, activeflag: 1 }); + } else { + q = Collections.find({ authors: parseInt(idString) }, { id: 1, name: 1, activeflag: 1 }); + } + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length; + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length; + + let countSummary = { activeCount: activeCount, archiveCount: archiveCount }; + + resolve(countSummary); + }); }); } -export { getCollectionObjects }; +export { getCollectionObjects, getCollectionsAdmin, getCollections, sendEmailNotifications, generateCollectionEmailSubject }; diff --git a/src/resources/collections/collections.route.js b/src/resources/collections/collections.route.js index e5ece357..7a45a69a 100644 --- a/src/resources/collections/collections.route.js +++ b/src/resources/collections/collections.route.js @@ -2,25 +2,52 @@ import express from 'express'; import { ROLES } from '../user/user.roles'; import passport from 'passport'; import { utils } from '../auth'; -// import { UserModel } from '../user/user.model' import { Collections } from '../collections/collections.model'; import { Data } from '../tool/data.model'; import { MessagesModel } from '../message/message.model'; import { UserModel } from '../user/user.model'; -import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; import _ from 'lodash'; import escape from 'escape-html'; -import { getCollectionObjects } from './collections.repository'; +import { + getCollectionObjects, + getCollectionsAdmin, + getCollections, + sendEmailNotifications, + generateCollectionEmailSubject, +} from './collections.repository'; const inputSanitizer = require('../utilities/inputSanitizer'); const urlValidator = require('../utilities/urlValidator'); -const hdrukEmail = `enquiry@healthdatagateway.org`; - const router = express.Router(); +// @router GET /api/v1/collections/getList +// @desc Returns List of Collections +// @access Private +router.get('/getList', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + let role = req.user.role; + + if (role === ROLES.Admin) { + await getCollectionsAdmin(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } else if (role === ROLES.Creator) { + await getCollections(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } +}); + router.get('/:collectionID', async (req, res) => { var q = Collections.aggregate([ { $match: { $and: [{ id: parseInt(req.params.collectionID) }] } }, @@ -89,10 +116,10 @@ router.get('/entityid/:entityID', async (req, res) => { router.put('/edit', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { const collectionCreator = req.body.collectionCreator; - var { id, name, description, imageLink, authors, relatedObjects, publicflag, keywords } = req.body; + let { id, name, description, imageLink, authors, relatedObjects, publicflag, keywords, previousPublicFlag } = req.body; imageLink = urlValidator.validateURL(imageLink); - Collections.findOneAndUpdate( + await Collections.findOneAndUpdate( { id: id }, { name: inputSanitizer.removeNonBreakingSpaces(name), @@ -111,6 +138,20 @@ router.put('/edit', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admi ).then(() => { return res.json({ success: true }); }); + + await Collections.find({ id: id }, { publicflag: 1, id: 1, activeflag: 1, authors: 1, name: 1 }).then(async res => { + if (previousPublicFlag === false && publicflag === true) { + await sendEmailNotifications(res[0], res[0].activeflag, collectionCreator, true); + + if (res[0].authors) { + res[0].authors.forEach(async authorId => { + await createMessage(authorId, res[0], res[0].activeflag, collectionCreator, true); + }); + } + + await createMessage(0, res[0], res[0].activeflag, collectionCreator, true); + } + }); }); router.post('/add', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { @@ -130,21 +171,16 @@ router.post('/add', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admi collections.publicflag = publicflag; collections.keywords = keywords; - try { - if (collections.authors) { - collections.authors.forEach(async authorId => { - await createMessage(authorId, collections, collections.activeflag, collectionCreator); - }); - } - await createMessage(0, collections, collections.activeflag, collectionCreator); - - // Send email notifications to all admins and authors who have opted in - await sendEmailNotifications(collections, collections.activeflag, collectionCreator); - } catch (err) { - console.error(err.message); - // return res.status(500).json({ success: false, error: err }); + if (collections.authors) { + collections.authors.forEach(async authorId => { + await createMessage(authorId, collections, collections.activeflag, collectionCreator); + }); } + await createMessage(0, collections, collections.activeflag, collectionCreator); + + await sendEmailNotifications(collections, collections.activeflag, collectionCreator); + collections.save(err => { if (err) { return res.json({ success: false, error: err }); @@ -214,7 +250,7 @@ router.delete('/delete/:id', passport.authenticate('jwt'), utils.checkIsInRole(R module.exports = router; -async function createMessage(authorId, collections, activeflag, collectionCreator) { +async function createMessage(authorId, collections, activeflag, collectionCreator, isEdit) { let message = new MessagesModel(); const collectionLink = process.env.homeURL + '/collection/' + collections.id; @@ -230,80 +266,21 @@ async function createMessage(authorId, collections, activeflag, collectionCreato if (authorId === 0) { message.messageType = 'added collection'; - message.messageDescription = `${collectionCreator.name} added a new collection: ${collections.name}.`; + message.messageDescription = generateCollectionEmailSubject('Admin', collections.publicflag, collections.name, false, isEdit); saveMessage(); } for (let messageRecipient of messageRecipients) { - if (activeflag === 'active' && authorId === messageRecipient.id && authorId === collectionCreator.id) { + if (activeflag === 'active' && authorId === messageRecipient.id) { message.messageType = 'added collection'; - message.messageDescription = `Your new collection ${collections.name} has been added.`; - saveMessage(); - } else if (activeflag === 'active' && authorId === messageRecipient.id && authorId !== collectionCreator.id) { - message.messageType = 'added collection'; - message.messageDescription = `${collectionCreator.name} added you as a collaborator on the new collection ${collections.name}.`; + message.messageDescription = generateCollectionEmailSubject( + 'Creator', + collections.publicflag, + collections.name, + authorId === collectionCreator.id ? true : false, + isEdit + ); saveMessage(); } } - - //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS - // else if (activeflag === 'archive') { - // message.messageType = 'rejected'; - // message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${collectionLink}` - // } -} - -async function sendEmailNotifications(collections, activeflag, collectionCreator) { - let subject; - let html; - // 1. Generate URL for linking collection in email - const collectionLink = process.env.homeURL + '/collection/' + collections.id; - - // 2. Build email body - emailRecipients.map(emailRecipient => { - if (activeflag === 'active' && emailRecipient.role === 'Admin') { - subject = `New collection ${collections.name} has been added and is now live`; - html = `New collection ${collections.name} has been added and is now live

${collectionLink}`; - } - - collections.authors.map(author => { - if (activeflag === 'active' && author === emailRecipient.id && author === collectionCreator.id) { - subject = `Your collection ${collections.name} has been added and is now live`; - html = `Your collection ${collections.name} has been added and is now live

${collectionLink}`; - } else if (activeflag === 'active' && author === emailRecipient.id && author !== collectionCreator.id) { - subject = `You have been added as a collaborator on collection ${collections.name}`; - html = `${collectionCreator.name} has added you as a collaborator to the collection ${collections.name} which is now live

${collectionLink}`; - } - }); - }); - - if (activeflag === 'active') { - subject = `Your collection ${collections.name} has been approved and is now live`; - html = `Your collection ${collections.name} has been approved and is now live

${collectionLink}`; - } - //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS - // else if (activeflag === 'archive') { - // subject = `Your collection ${collections.name} has been rejected` - // html = `Your collection ${collections.name} has been rejected

${collectionLink}` - // } - - // 3. Query Db for all admins or authors of the collection who have opted in to email updates - var q = UserModel.aggregate([ - // Find all users who are admins or authors of this collection - { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, - ]); - - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); - }); } diff --git a/src/resources/course/course.model.js b/src/resources/course/course.model.js index e563af4d..8ffb47ac 100644 --- a/src/resources/course/course.model.js +++ b/src/resources/course/course.model.js @@ -8,7 +8,7 @@ const courseSchema = new Schema( type: String, creator: Number, activeflag: String, - //updatedon: Date, + updatedon: Date, counter: Number, discourseTopicId: Number, relatedObjects: [ diff --git a/src/resources/course/course.repository.js b/src/resources/course/course.repository.js index 84326c39..21eb50aa 100644 --- a/src/resources/course/course.repository.js +++ b/src/resources/course/course.repository.js @@ -101,6 +101,7 @@ const editCourse = async (req, res) => { let relatedObjects = req.body.relatedObjects; let courseOptions = req.body.courseOptions; let entries = req.body.entries; + let updatedon = Date.now(); Course.findOneAndUpdate( { id: id }, @@ -120,6 +121,7 @@ const editCourse = async (req, res) => { award: inputSanitizer.removeNonBreakingSpaces(req.body.award), competencyFramework: inputSanitizer.removeNonBreakingSpaces(req.body.competencyFramework), nationalPriority: inputSanitizer.removeNonBreakingSpaces(req.body.nationalPriority), + updatedon: updatedon, }, err => { if (err) { diff --git a/src/resources/datarequest/amendment/amendment.controller.js b/src/resources/datarequest/amendment/amendment.controller.js index 63217b48..f679ff4e 100644 --- a/src/resources/datarequest/amendment/amendment.controller.js +++ b/src/resources/datarequest/amendment/amendment.controller.js @@ -104,8 +104,6 @@ const setAmendment = async (req, res) => { } else { // 10. Update json schema and question answers with modifications since original submission let accessRecordObj = accessRecord.toObject(); - accessRecordObj.questionAnswers = JSON.parse(accessRecordObj.questionAnswers); - accessRecordObj.jsonSchema = JSON.parse(accessRecordObj.jsonSchema); accessRecordObj = injectAmendments(accessRecordObj, userType, req.user); // 11. Append question actions depending on user type and application status let userRole = activeParty === constants.userTypes.CUSTODIAN ? constants.roleTypes.MANAGER : ''; @@ -525,12 +523,7 @@ const injectNavigationAmendment = (jsonSchema, questionSetId, userType, complete const getLatestQuestionAnswer = (accessRecord, questionId) => { // 1. Include original submission of question answer - let parsedQuestionAnswers = {}; - if (typeof accessRecord.questionAnswers === 'string') { - parsedQuestionAnswers = JSON.parse(accessRecord.questionAnswers); - } else { - parsedQuestionAnswers = _.cloneDeep(accessRecord.questionAnswers); - } + let parsedQuestionAnswers = _.cloneDeep(accessRecord.questionAnswers); let initialSubmission = { questionAnswers: { [`${questionId}`]: { @@ -703,12 +696,6 @@ const revertAmendmentAnswer = (accessRecord, questionId, user) => { const createNotifications = async (type, accessRecord) => { // Project details from about application let { aboutApplication = {}, questionAnswers } = accessRecord; - if (typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(accessRecord.aboutApplication); - } - if (typeof questionAnswers === 'string') { - questionAnswers = JSON.parse(accessRecord.questionAnswers); - } let { projectName = 'No project name set' } = aboutApplication; let { dateSubmitted = '' } = accessRecord; // Publisher details from single dataset diff --git a/src/resources/datarequest/datarequest.controller.js b/src/resources/datarequest/datarequest.controller.js index a1c1f48f..18ffb8e9 100644 --- a/src/resources/datarequest/datarequest.controller.js +++ b/src/resources/datarequest/datarequest.controller.js @@ -27,18 +27,22 @@ module.exports = { //GET api/v1/data-access-request getAccessRequestsByUser: async (req, res) => { try { - // 1. Deconstruct the + // 1. Deconstruct the parameters passed let { id: userId } = req.user; + let { query = {} } = req; // 2. Find all data access request applications created with multi dataset version - let applications = await DataRequestModel.find({ $or: [{ userId: parseInt(userId) }, { authorIds: userId }] }).populate( - 'datasets mainApplicant' - ); + let applications = await DataRequestModel.find({ + $and: [{ ...query }, { $or: [{ userId: parseInt(userId) }, { authorIds: userId }] }], + }) + .select('-jsonSchema -questionAnswers -files') + .populate('datasets mainApplicant') + .lean(); // 3. Append project name and applicants let modifiedApplications = [...applications] .map(app => { - return module.exports.createApplicationDTO(app.toObject(), constants.userTypes.APPLICANT); + return module.exports.createApplicationDTO(app, constants.userTypes.APPLICANT); }) .sort((a, b) => b.updatedAt - a.updatedAt); @@ -116,8 +120,6 @@ module.exports = { } } // 11. Update json schema and question answers with modifications since original submission - accessRecord.questionAnswers = JSON.parse(accessRecord.questionAnswers); - accessRecord.jsonSchema = JSON.parse(accessRecord.jsonSchema); accessRecord = amendmentController.injectAmendments(accessRecord, userType, req.user); // 12. Determine the current active party handling the form let activeParty = amendmentController.getAmendmentIterationParty(accessRecord); @@ -136,8 +138,6 @@ module.exports = { status: 'success', data: { ...accessRecord, - aboutApplication: - typeof accessRecord.aboutApplication === 'string' ? JSON.parse(accessRecord.aboutApplication) : accessRecord.aboutApplication, datasets: accessRecord.datasets, readOnly, ...countUnsubmittedAmendments, @@ -159,9 +159,9 @@ module.exports = { //GET api/v1/data-access-request/dataset/:datasetId getAccessRequestByUserAndDataset: async (req, res) => { - let accessRecord; + let accessRecord, dataset; + let formType = constants.formTypes.Extended5Safe; let data = {}; - let dataset; try { // 1. Get dataSetId from params let { @@ -201,27 +201,32 @@ module.exports = { }); } // 2. Build up the accessModel for the user - let { jsonSchema, version, _id: schemaId } = accessRequestTemplate; - // 3. create new DataRequestModel + let { jsonSchema, version, _id: schemaId, isCloneable = false } = accessRequestTemplate; + // 3. check for the type of form [enquiry - 5safes] + if (schemaId.toString() === constants.enquiryFormId) formType = constants.formTypes.Enquiry; + + // 4. create new DataRequestModel let record = new DataRequestModel({ version, userId, dataSetId, datasetIds: [dataSetId], datasetTitles: [dataset.name], + isCloneable, jsonSchema, schemaId, publisher, - questionAnswers: '{}', + questionAnswers: {}, aboutApplication: {}, applicationStatus: constants.applicationStatuses.INPROGRESS, + formType, }); - // 4. save record + // 5. save record const newApplication = await record.save(); newApplication.projectId = helper.generateFriendlyId(newApplication._id); await newApplication.save(); - // 5. return record + // 6. return record data = { ...newApplication._doc, mainApplicant: { firstname, lastname }, @@ -229,17 +234,19 @@ module.exports = { } else { data = { ...accessRecord.toObject() }; } - // 6. Parse json to allow us to modify schema - data.jsonSchema = JSON.parse(data.jsonSchema); // 7. Append question actions depending on user type and application status - data.jsonSchema = datarequestUtil.injectQuestionActions(data.jsonSchema, constants.userTypes.APPLICANT, data.applicationStatus, null, constants.userTypes.APPLICANT); + data.jsonSchema = datarequestUtil.injectQuestionActions( + data.jsonSchema, + constants.userTypes.APPLICANT, + data.applicationStatus, + null, + constants.userTypes.APPLICANT + ); // 8. Return payload return res.status(200).json({ status: 'success', data: { ...data, - questionAnswers: JSON.parse(data.questionAnswers), - aboutApplication: typeof data.aboutApplication === 'string' ? JSON.parse(data.aboutApplication) : data.aboutApplication, dataset, projectId: data.projectId || helper.generateFriendlyId(data._id), userType: constants.userTypes.APPLICANT, @@ -258,6 +265,7 @@ module.exports = { //GET api/v1/data-access-request/datasets/:datasetIds getAccessRequestByUserAndMultipleDatasets: async (req, res) => { let accessRecord; + let formType = constants.formTypes.Extended5Safe; let data = {}; let datasets = []; try { @@ -309,25 +317,29 @@ module.exports = { }); } // 3. Build up the accessModel for the user - let { jsonSchema, version, _id: schemaId } = accessRequestTemplate; - // 4. Create new DataRequestModel + let { jsonSchema, version, _id: schemaId, isCloneable = false } = accessRequestTemplate; + // 4. Check form is enquiry + if (schemaId.toString() === constants.enquiryFormId) formType = constants.formTypes.Enquiry; + // 5. Create new DataRequestModel let record = new DataRequestModel({ version, userId, datasetIds: arrDatasetIds, datasetTitles: arrDatasetNames, + isCloneable, jsonSchema, schemaId, publisher, - questionAnswers: '{}', + questionAnswers: {}, aboutApplication: {}, applicationStatus: constants.applicationStatuses.INPROGRESS, + formType, }); - // 4. save record + // 6. save record const newApplication = await record.save(); newApplication.projectId = helper.generateFriendlyId(newApplication._id); await newApplication.save(); - // 5. return record + // 7. return record data = { ...newApplication._doc, mainApplicant: { firstname, lastname }, @@ -335,17 +347,19 @@ module.exports = { } else { data = { ...accessRecord.toObject() }; } - // 6. Parse json to allow us to modify schema - data.jsonSchema = JSON.parse(data.jsonSchema); - // 7. Append question actions depending on user type and application status - data.jsonSchema = datarequestUtil.injectQuestionActions(data.jsonSchema, constants.userTypes.APPLICANT, data.applicationStatus, null, constants.userTypes.APPLICANT); - // 8. Return payload + // 8. Append question actions depending on user type and application status + data.jsonSchema = datarequestUtil.injectQuestionActions( + data.jsonSchema, + constants.userTypes.APPLICANT, + data.applicationStatus, + null, + constants.userTypes.APPLICANT + ); + // 9. Return payload return res.status(200).json({ status: 'success', data: { ...data, - questionAnswers: JSON.parse(data.questionAnswers), - aboutApplication: typeof data.aboutApplication === 'string' ? JSON.parse(data.aboutApplication) : data.aboutApplication, datasets, projectId: data.projectId || helper.generateFriendlyId(data._id), userType: constants.userTypes.APPLICANT, @@ -386,7 +400,6 @@ module.exports = { module.exports.updateApplication(accessRequestRecord, updateObj).then(accessRequestRecord => { const { unansweredAmendments = 0, answeredAmendments = 0, dirtySchema = false } = accessRequestRecord; if (dirtySchema) { - accessRequestRecord.jsonSchema = JSON.parse(accessRequestRecord.jsonSchema); accessRequestRecord = amendmentController.injectAmendments(accessRequestRecord, constants.userTypes.APPLICANT, req.user); } let data = { @@ -413,9 +426,6 @@ module.exports = { let updateObj = {}; let { aboutApplication, questionAnswers, updatedQuestionId, user, jsonSchema = '' } = data; if (aboutApplication) { - if (typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(aboutApplication); - } const { datasetIds, datasetTitles } = aboutApplication.selectedDatasets.reduce( (newObj, dataset) => { newObj.datasetIds = [...newObj.datasetIds, dataset.datasetId]; @@ -459,7 +469,7 @@ module.exports = { if (_.isNil(updateObj.questionAnswers)) { return accessRecord; } - let updatedAnswer = JSON.parse(updateObj.questionAnswers)[updatedQuestionId]; + let updatedAnswer = updateObj.questionAnswers[updatedQuestionId]; accessRecord = amendmentController.handleApplicantAmendment(accessRecord.toObject(), updatedQuestionId, '', updatedAnswer, user); await DataRequestModel.replaceOne({ _id }, accessRecord, err => { if (err) { @@ -1422,8 +1432,6 @@ module.exports = { }); } else { // 8. Send notifications and emails with amendments - accessRecord.questionAnswers = JSON.parse(accessRecord.questionAnswers); - accessRecord.jsonSchema = JSON.parse(accessRecord.jsonSchema); accessRecord = amendmentController.injectAmendments(accessRecord, userType, req.user); await module.exports.createNotifications( accessRecord.submissionType === constants.submissionTypes.INITIAL @@ -1433,7 +1441,7 @@ module.exports = { accessRecord, req.user ); - // 8. Start workflow process in Camunda if publisher requires it and it is the first submission + // 9. Start workflow process in Camunda if publisher requires it and it is the first submission if (accessRecord.workflowEnabled && accessRecord.submissionType === constants.submissionTypes.INITIAL) { let { publisherObj: { name: publisher }, @@ -1449,7 +1457,7 @@ module.exports = { } } }); - // 9. Return aplication and successful response + // 10. Return aplication and successful response return res.status(200).json({ status: 'success', data: accessRecord._doc }); } catch (err) { console.error(err.message); @@ -1478,9 +1486,7 @@ module.exports = { //POST api/v1/data-access-request/:id/email mailDataAccessRequestInfoById: async (req, res) => { - - try{ - + try { // 1. Get the required request params const { params: { id }, @@ -1493,8 +1499,7 @@ module.exports = { }, { path: 'mainApplicant', - } - + }, ]); if (!accessRecord) { @@ -1503,7 +1508,7 @@ module.exports = { // 3. Ensure single datasets are mapped correctly into array if (_.isEmpty(accessRecord.datasets)) { - accessRecord.datasets = [accessRecord.dataset]; + accessRecord.datasets = [accessRecord.dataset]; } // 4. If application is not in progress, actions cannot be performed @@ -1513,19 +1518,18 @@ module.exports = { message: 'This application is no longer in pre-submission status and therefore this action cannot be performed', }); } - + // 5. Get the requesting users permission levels let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord.toObject(), req.user.id, req.user._id); // 6. Return unauthorised message if the requesting user is not an applicant if (!authorised || userType !== constants.userTypes.APPLICANT) { return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } - + // 7. Send notification to the authorised user module.exports.createNotifications(constants.notificationTypes.INPROGRESS, {}, accessRecord, req.user); return res.status(200).json({ status: 'success' }); - } catch (err) { console.error(err.message); return res.status(500).json({ @@ -1632,9 +1636,8 @@ module.exports = { if (!authorised || userType !== constants.userTypes.APPLICANT) { return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } - // 6. Parse the json schema to be modified - let jsonSchema = JSON.parse(accessRecord.jsonSchema); - let questionAnswers = JSON.parse(accessRecord.questionAnswers); + // 6. Extract schema and answers + let { jsonSchema, questionAnswers } = _.cloneDeep(accessRecord); // 7. Perform different action depending on mode passed switch (mode) { case constants.formActions.ADDREPEATABLESECTION: @@ -1662,7 +1665,6 @@ module.exports = { message: 'You must supply the question identifiers to remove when performing this action', }); } - // Add clicked 'remove' button to questions to delete (questionId) questionIds = [...questionIds, questionId]; jsonSchema = dynamicForm.removeQuestionReferences(questionSetId, questionIds, jsonSchema); questionAnswers = dynamicForm.removeQuestionAnswers(questionIds, questionAnswers); @@ -1673,16 +1675,16 @@ module.exports = { message: 'You must supply a valid action to perform', }); } - // 8. Save changes to database - accessRecord.jsonSchema = JSON.stringify(jsonSchema); - accessRecord.questionAnswers = JSON.stringify(questionAnswers); - + // 8. Update record + accessRecord.jsonSchema = jsonSchema; + accessRecord.questionAnswers = questionAnswers; + // 9. Save changes to database await accessRecord.save(async err => { if (err) { console.error(err.message); return res.status(500).json({ status: 'error', message: err.message }); } else { - // 9. Append question actions for in progress applicant + // 10. Append question actions for in progress applicant jsonSchema = datarequestUtil.injectQuestionActions( jsonSchema, constants.userTypes.APPLICANT, // current user type @@ -1690,7 +1692,7 @@ module.exports = { null, constants.userTypes.APPLICANT // active party ); - // 10. Return necessary object to reflect schema update + // 11. Return necessary object to reflect schema update return res.status(200).json({ success: true, accessRecord: { @@ -1709,6 +1711,127 @@ module.exports = { } }, + //POST api/v1/data-access-request/:id/clone + cloneApplication: async (req, res) => { + try { + // 1. Get the required request and body params + const { + params: { id: appIdToClone }, + } = req; + const { datasetIds = [], datasetTitles = [], publisher = '', appIdToCloneInto = '' } = req.body; + + // 2. Retrieve DAR to clone from database + let appToClone = await DataRequestModel.findOne({ _id: appIdToClone }) + .populate([ + { + path: 'datasets dataset authors', + }, + { + path: 'mainApplicant', + }, + { + path: 'publisherObj', + populate: { + path: 'team', + populate: { + path: 'users', + }, + }, + }, + ]) + .lean(); + if (!appToClone) { + return res.status(404).json({ status: 'error', message: 'Application not found.' }); + } + + // 3. Get the requesting users permission levels + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(appToClone, req.user.id, req.user._id); + + // 4. Return unauthorised message if the requesting user is not an applicant + if (!authorised || userType !== constants.userTypes.APPLICANT) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + + // 5. Update question answers with modifications since original submission + appToClone = amendmentController.injectAmendments(appToClone, constants.userTypes.APPLICANT, req.user); + + // 6. Create callback function used to complete the save process + const saveCallBack = (err, doc) => { + if (err) { + console.error(err.message); + return res.status(500).json({ status: 'error', message: err.message }); + } + + // Create notifications + module.exports.createNotifications( + constants.notificationTypes.APPLICATIONCLONED, + { newDatasetTitles: datasetTitles, newApplicationId: doc._id.toString() }, + appToClone, + req.user + ); + + // Return successful response + return res.status(200).json({ + success: true, + accessRecord: doc, + }); + }; + + // 7. Set up new access record or load presubmission application as provided in request and save + let clonedAccessRecord = {}; + if (_.isEmpty(appIdToCloneInto)) { + clonedAccessRecord = await datarequestUtil.cloneIntoNewApplication(appToClone, { + userId: req.user.id, + datasetIds, + datasetTitles, + publisher, + }); + // Save new record + await DataRequestModel.create(clonedAccessRecord, saveCallBack); + } else { + let appToCloneInto = await DataRequestModel.findOne({ _id: appIdToCloneInto }) + .populate([ + { + path: 'datasets dataset authors', + }, + { + path: 'mainApplicant', + }, + { + path: 'publisherObj', + populate: { + path: 'team', + populate: { + path: 'users', + }, + }, + }, + ]) + .lean(); + // Ensure application to clone into was found + if (!appToCloneInto) { + return res.status(404).json({ status: 'error', message: 'Application to clone into not found.' }); + } + // Get permissions for application to clone into + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(appToCloneInto, req.user.id, req.user._id); + // Return unauthorised message if the requesting user is not authorised to the new application + if (!authorised || userType !== constants.userTypes.APPLICANT) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + clonedAccessRecord = await datarequestUtil.cloneIntoExistingApplication(appToClone, appToCloneInto); + + // Save into existing record + await DataRequestModel.findOneAndUpdate({ _id: appIdToCloneInto }, clonedAccessRecord, { new: true }, saveCallBack); + } + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred cloning the existing application', + }); + } + }, + updateFileStatus: async (req, res) => { try { // 1. Get the required request params @@ -1757,24 +1880,77 @@ module.exports = { } }, + // API DELETE api/v1/data-access-request/:id + deleteDraftAccessRequest: async (req, res) => { + try { + // 1. Get the required request and body params + const { + params: { id: appIdToDelete }, + } = req; + + // 2. Retrieve DAR to clone from database + let appToDelete = await DataRequestModel.findOne({ _id: appIdToDelete }).populate([ + { + path: 'datasets dataset authors', + }, + { + path: 'mainApplicant', + }, + { + path: 'publisherObj', + populate: { + path: 'team', + populate: { + path: 'users', + }, + }, + }, + ]); + + // 3. Get the requesting users permission levels + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(appToDelete, req.user.id, req.user._id); + + // 4. Return unauthorised message if the requesting user is not an applicant + if (!authorised || userType !== constants.userTypes.APPLICANT) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + + // 5. If application is not in progress, actions cannot be performed + if (appToDelete.applicationStatus !== constants.applicationStatuses.INPROGRESS) { + return res.status(400).json({ + success: false, + message: 'This application is no longer in pre-submission status and therefore this action cannot be performed', + }); + } + + // 6. Delete applicatioin + DataRequestModel.findOneAndDelete({ _id: appIdToDelete }, err => { + if (err) console.error(err.message); + }); + + // 7. Create notifications + await module.exports.createNotifications(constants.notificationTypes.APPLICATIONDELETED, {}, appToDelete, req.user); + + return res.status(200).json({ + success: true, + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred deleting the existing application', + }); + } + }, + createNotifications: async (type, context, accessRecord, user) => { // Project details from about application if 5 Safes let { aboutApplication = {} } = accessRecord; - if (typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(accessRecord.aboutApplication); - } let { projectName = 'No project name set' } = aboutApplication; - let { projectId, _id, workflow = {}, dateSubmitted = '', jsonSchema, questionAnswers } = accessRecord; + let { projectId, _id, workflow = {}, dateSubmitted = '', jsonSchema, questionAnswers, createdAt } = accessRecord; if (_.isEmpty(projectId)) { projectId = _id; } - // Parse the schema - if (typeof jsonSchema === 'string') { - jsonSchema = JSON.parse(accessRecord.jsonSchema); - } - if (typeof questionAnswers === 'string') { - questionAnswers = JSON.parse(accessRecord.questionAnswers); - } let { pages, questionPanels, questionSets: questions } = jsonSchema; // Publisher details from single dataset let { @@ -1833,7 +2009,7 @@ module.exports = { 'data access request', accessRecord._id ); - + options = { userType: '', userEmail: appEmail, @@ -1843,16 +2019,15 @@ module.exports = { userType: 'applicant', submissionType: constants.submissionTypes.INPROGRESS, }; - - + // Build email template ({ html, jsonContent } = await emailGenerator.generateEmail( - aboutApplication, - questions, - pages, - questionPanels, - questionAnswers, - options + aboutApplication, + questions, + pages, + questionPanels, + questionAnswers, + options )); await emailGenerator.sendEmail( [user], @@ -1862,8 +2037,8 @@ module.exports = { false, attachments ); - break; - case constants.notificationTypes.STATUSCHANGE: + break; + case constants.notificationTypes.STATUSCHANGE: // 1. Create notifications // Custodian manager and current step reviewer notifications if (_.has(accessRecord.datasets[0].toObject(), 'publisher.team.users')) { @@ -1935,8 +2110,9 @@ module.exports = { await notificationBuilder.triggerNotificationMessage( custodianUserIds, `A Data Access Request has been submitted to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, - 'data access request', - accessRecord._id + 'data access request received', + accessRecord._id, + accessRecord.datasets[0].publisher.name ); } else { const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || contactPoint; @@ -2359,6 +2535,93 @@ module.exports = { false ); break; + case constants.notificationTypes.APPLICATIONCLONED: + // Deconstruct required variables from context object + const { newDatasetTitles, newApplicationId } = context; + // 1. Create notifications + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Your Data Access Request for ${datasetTitles} was successfully duplicated into a new form for ${newDatasetTitles.join( + ',' + )}, which can now be edited`, + 'data access request', + newApplicationId + ); + // Create authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + authors.map(author => author.id), + `A Data Access Request you contributed to for ${datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, + 'data access request unlinked', + newApplicationId + ); + } + // 2. Send emails to relevant users + // Aggregate objects for custodian and applicant + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; + // Create object to pass through email data + options = { + id: accessRecord._id, + projectId, + projectName, + datasetTitles, + dateSubmitted, + applicants, + firstname, + lastname, + }; + // Create email body content + html = emailGenerator.generateDARClonedEmail(options); + // Send email + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + `Data Access Request for ${datasetTitles} has been duplicated into a new form by ${firstname} ${lastname}`, + html, + false + ); + break; + case constants.notificationTypes.APPLICATIONDELETED: + // 1. Create notifications + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Your Data Access Request for ${datasetTitles} was successfully deleted`, + 'data access request unlinked', + accessRecord._id + ); + // Create authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + authors.map(author => author.id), + `A draft Data Access Request you contributed to for ${datasetTitles} has been deleted by ${firstname} ${lastname}`, + 'data access request unlinked', + accessRecord._id + ); + } + // 2. Send emails to relevant users + // Aggregate objects for custodian and applicant + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; + // Create object to pass through email data + options = { + publisher, + projectName, + datasetTitles, + createdAt, + applicants, + firstname, + lastname, + }; + // Create email body content + html = emailGenerator.generateDARDeletedEmail(options); + // Send email + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + ` ${firstname} ${lastname} has deleted a data access request application`, + html, + false + ); + break; } }, @@ -2465,17 +2728,13 @@ module.exports = { let { aboutApplication, questionAnswers } = app; if (aboutApplication) { - if (typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(aboutApplication); - } ({ projectName } = aboutApplication); } if (_.isEmpty(projectName)) { projectName = `${publisher} - ${name}`; } if (questionAnswers) { - let questionAnswersObj = JSON.parse(questionAnswers); - applicants = datarequestUtil.extractApplicantNames(questionAnswersObj).join(', '); + applicants = datarequestUtil.extractApplicantNames(questionAnswers).join(', '); } if (_.isEmpty(applicants)) { let { firstname, lastname } = app.mainApplicant; diff --git a/src/resources/datarequest/datarequest.model.js b/src/resources/datarequest/datarequest.model.js index 51cfc047..9c541022 100644 --- a/src/resources/datarequest/datarequest.model.js +++ b/src/resources/datarequest/datarequest.model.js @@ -1,111 +1,125 @@ import { model, Schema } from 'mongoose'; -import { WorkflowSchema } from '../workflow/workflow.model'; +import { WorkflowSchema } from '../workflow/workflow.model'; +import constants from '../utilities/constants.util'; -const DataRequestSchema = new Schema({ - version: Number, - userId: Number, // Main applicant - authorIds: [Number], - dataSetId: String, - datasetIds: [{ type: String}], - datasetTitles: [{ type: String}], - projectId: String, - workflowId: { type : Schema.Types.ObjectId, ref: 'Workflow' }, - workflow: { type: WorkflowSchema }, - applicationStatus: { - type: String, - default: 'inProgress', - enum: ['inProgress' , 'submitted', 'inReview', 'approved', 'rejected', 'approved with conditions', 'withdrawn'] - }, - archived: { - Boolean, - default: false - }, - applicationStatusDesc : String, - schemaId: { type : Schema.Types.ObjectId, ref: 'data_request_schemas' }, - jsonSchema: { - type: String, - default: "{}" - }, - questionAnswers: { - type: String, - default: "{}" - }, - aboutApplication: { - type: Object, - default: {} - }, - dateSubmitted: { - type: Date - }, - dateFinalStatus: { - type: Date - }, - dateReviewStart: { - type: Date - }, - publisher: { - type: String, - default: "" - }, - files: [{ - name: { type: String }, - size: { type: Number }, - description: { type: String }, - status: { type: String }, - fileId: { type: String }, - error: { type: String, default: '' }, - owner: { - type: Schema.Types.ObjectId, - ref: 'User' - } - }], - amendmentIterations: [{ - dateCreated: { type: Date }, - createdBy: { type : Schema.Types.ObjectId, ref: 'User' }, - dateReturned: { type: Date }, - returnedBy: { type : Schema.Types.ObjectId, ref: 'User' }, - dateSubmitted: { type: Date }, - submittedBy: { type : Schema.Types.ObjectId, ref: 'User' }, - questionAnswers: { type: Object, default: {} } - }], -}, { - timestamps: true, - toJSON: { virtuals: true }, - toObject: { virtuals: true } -}); +const DataRequestSchema = new Schema( + { + version: Number, + userId: Number, // Main applicant + authorIds: [Number], + dataSetId: String, + datasetIds: [{ type: String }], + datasetTitles: [{ type: String }], + isCloneable: Boolean, + projectId: String, + workflowId: { type: Schema.Types.ObjectId, ref: 'Workflow' }, + workflow: { type: WorkflowSchema }, + applicationStatus: { + type: String, + default: 'inProgress', + enum: ['inProgress', 'submitted', 'inReview', 'approved', 'rejected', 'approved with conditions', 'withdrawn'], + }, + archived: { + Boolean, + default: false, + }, + applicationStatusDesc: String, + schemaId: { type: Schema.Types.ObjectId, ref: 'data_request_schemas' }, + jsonSchema: { + type: Object, + default: {}, + }, + questionAnswers: { + type: Object, + default: {}, + }, + aboutApplication: { + type: Object, + default: {}, + }, + dateSubmitted: { + type: Date, + }, + dateFinalStatus: { + type: Date, + }, + dateReviewStart: { + type: Date, + }, + publisher: { + type: String, + default: '', + }, + formType: { + type: String, + default: constants.formTypes.Extended5Safe, + enum: Object.values(constants.formTypes), + }, + files: [ + { + name: { type: String }, + size: { type: Number }, + description: { type: String }, + status: { type: String }, + fileId: { type: String }, + error: { type: String, default: '' }, + owner: { + type: Schema.Types.ObjectId, + ref: 'User', + }, + }, + ], + amendmentIterations: [ + { + dateCreated: { type: Date }, + createdBy: { type: Schema.Types.ObjectId, ref: 'User' }, + dateReturned: { type: Date }, + returnedBy: { type: Schema.Types.ObjectId, ref: 'User' }, + dateSubmitted: { type: Date }, + submittedBy: { type: Schema.Types.ObjectId, ref: 'User' }, + questionAnswers: { type: Object, default: {} }, + }, + ], + }, + { + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); DataRequestSchema.virtual('datasets', { - ref: 'Data', - foreignField: 'datasetid', - localField: 'datasetIds', - justOne: false + ref: 'Data', + foreignField: 'datasetid', + localField: 'datasetIds', + justOne: false, }); DataRequestSchema.virtual('dataset', { - ref: 'Data', - foreignField: 'datasetid', - localField: 'dataSetId', - justOne: true + ref: 'Data', + foreignField: 'datasetid', + localField: 'dataSetId', + justOne: true, }); DataRequestSchema.virtual('mainApplicant', { - ref: 'User', - foreignField: 'id', - localField: 'userId', - justOne: true + ref: 'User', + foreignField: 'id', + localField: 'userId', + justOne: true, }); DataRequestSchema.virtual('publisherObj', { - ref: 'Publisher', - foreignField: 'name', - localField: 'publisher', - justOne: true + ref: 'Publisher', + foreignField: 'name', + localField: 'publisher', + justOne: true, }); DataRequestSchema.virtual('authors', { - ref: 'User', - foreignField: 'id', - localField: 'authorIds' + ref: 'User', + foreignField: 'id', + localField: 'authorIds', }); -export const DataRequestModel = model('data_request', DataRequestSchema) +export const DataRequestModel = model('data_request', DataRequestSchema); diff --git a/src/resources/datarequest/datarequest.route.js b/src/resources/datarequest/datarequest.route.js index c2f6d726..460f7b2d 100644 --- a/src/resources/datarequest/datarequest.route.js +++ b/src/resources/datarequest/datarequest.route.js @@ -8,13 +8,13 @@ const datarequestController = require('./datarequest.controller'); const fs = require('fs'); const path = './tmp'; const storage = multer.diskStorage({ - destination: function (req, file, cb) { - if (!fs.existsSync(path)) { - fs.mkdirSync(path); - } - cb(null, path) - } -}) + destination: function (req, file, cb) { + if (!fs.existsSync(path)) { + fs.mkdirSync(path); + } + cb(null, path); + }, +}); const multerMid = multer({ storage: storage }); const router = express.Router(); @@ -40,12 +40,19 @@ router.get('/dataset/:dataSetId', passport.authenticate('jwt'), datarequestContr router.get('/datasets/:datasetIds', passport.authenticate('jwt'), datarequestController.getAccessRequestByUserAndMultipleDatasets); // @route GET api/v1/data-access-request/:id/file/:fileId -// @desc GET +// @desc GET // @access Private -router.get('/:id/file/:fileId', param('id').customSanitizer(value => {return value}), passport.authenticate('jwt'), datarequestController.getFile); +router.get( + '/:id/file/:fileId', + param('id').customSanitizer(value => { + return value; + }), + passport.authenticate('jwt'), + datarequestController.getFile +); // @route GET api/v1/data-access-request/:id/file/:fileId/status -// @desc GET Status of a file +// @desc GET Status of a file // @access Private router.get('/:id/file/:fileId/status', passport.authenticate('jwt'), datarequestController.getFileStatus); @@ -80,13 +87,13 @@ router.put('/:id/startreview', passport.authenticate('jwt'), datarequestControll router.put('/:id/stepoverride', passport.authenticate('jwt'), datarequestController.updateAccessRequestStepOverride); // @route PUT api/v1/data-access-request/:id/deletefile -// @desc Update access request deleting a file by Id +// @desc Update access request deleting a file by Id // @access Private - Applicant (Gateway User) router.put('/:id/deletefile', passport.authenticate('jwt'), datarequestController.updateAccessRequestDeleteFile); // @route POST api/v1/data-access-request/:id/upload // @desc POST application files to scan bucket -// @access Private - Applicant (Gateway User / Custodian Manager) +// @access Private - Applicant (Gateway User / Custodian Manager) router.post('/:id/upload', passport.authenticate('jwt'), multerMid.array('assets'), datarequestController.uploadFiles); // @route POST api/v1/data-access-request/:id/amendments @@ -104,6 +111,11 @@ router.post('/:id/requestAmendments', passport.authenticate('jwt'), amendmentCon // @access Private - Applicant router.post('/:id/actions', passport.authenticate('jwt'), datarequestController.performAction); +// @route POST api/v1/data-access-request/:id/clone +// @desc Clone an existing application forms answers into a new one potentially for a different custodian +// @access Private - Applicant +router.post('/:id/clone', passport.authenticate('jwt'), datarequestController.cloneApplication); + // @route POST api/v1/data-access-request/:id // @desc Submit request record // @access Private - Applicant (Gateway User) @@ -120,8 +132,13 @@ router.post('/:id/notify', passport.authenticate('jwt'), datarequestController.n router.post('/:id/file/:fileId/status', passport.authenticate('jwt'), datarequestController.updateFileStatus); // @route POST api/v1/data-access-request/:id/email -// @desc Mail a Data Access Request information in presubmission +// @desc Mail a Data Access Request information in presubmission // @access Private - Applicant router.post('/:id/email', passport.authenticate('jwt'), datarequestController.mailDataAccessRequestInfoById); -module.exports = router; \ No newline at end of file +// @route DELETE api/v1/data-access-request/:id +// @desc Delete an application in a presubmissioin +// @access Private - Applicant +router.delete('/:id', passport.authenticate('jwt'), datarequestController.deleteDraftAccessRequest); + +module.exports = router; diff --git a/src/resources/datarequest/datarequest.schemas.model.js b/src/resources/datarequest/datarequest.schemas.model.js index c2da5c35..50f85776 100644 --- a/src/resources/datarequest/datarequest.schemas.model.js +++ b/src/resources/datarequest/datarequest.schemas.model.js @@ -1,4 +1,5 @@ import { model, Schema } from 'mongoose'; +import constants from '../utilities/constants.util'; const DataRequestSchemas = new Schema({ id: Number, @@ -15,9 +16,21 @@ const DataRequestSchemas = new Schema({ type: String, default: '' }, - jsonSchema: String, + formType: { + type: String, + default: constants.formTypes.Extended5Safe, + enum: Object.values(constants.formTypes) + }, + jsonSchema: { + type: Object, + default: {} + }, + isCloneable: Boolean }, { timestamps: true }); + export const DataRequestSchemaModel = model('data_request_schemas', DataRequestSchemas); + + diff --git a/src/resources/datarequest/datarequest.schemas.route.js b/src/resources/datarequest/datarequest.schemas.route.js index 490598f3..01c51a48 100644 --- a/src/resources/datarequest/datarequest.schemas.route.js +++ b/src/resources/datarequest/datarequest.schemas.route.js @@ -17,7 +17,7 @@ router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, dataRequestSchema.version = version; dataRequestSchema.dataSetId = dataSetId; dataRequestSchema.publisher = publisher; - dataRequestSchema.jsonSchema = JSON.stringify(jsonSchema); + dataRequestSchema.jsonSchema = jsonSchema; await dataRequestSchema.save(async err => { if (err) return res.json({ success: false, error: err }); diff --git a/src/resources/datarequest/utils/__tests__/datarequest.util.test.js b/src/resources/datarequest/utils/__tests__/datarequest.util.test.js index 77f6fe1c..d9331270 100644 --- a/src/resources/datarequest/utils/__tests__/datarequest.util.test.js +++ b/src/resources/datarequest/utils/__tests__/datarequest.util.test.js @@ -16,12 +16,54 @@ describe('injectQuestionActions', () => { order: 2, }; const cases = [ - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.INPROGRESS, '', constants.userTypes.APPLICANT, [guidance]], - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.APPROVED, '', constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.APPROVEDWITHCONDITIONS, '', constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.INREVIEW, '', constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.WITHDRAWN, '', constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.SUBMITTED, '', constants.userTypes.CUSTODIAN, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.INPROGRESS, + '', + constants.userTypes.APPLICANT, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.APPROVED, + '', + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.APPROVEDWITHCONDITIONS, + '', + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.INREVIEW, + '', + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.WITHDRAWN, + '', + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.APPLICANT, + constants.applicationStatuses.SUBMITTED, + '', + constants.userTypes.CUSTODIAN, + [guidance], + ], [ data[0].jsonSchema, constants.userTypes.CUSTODIAN, @@ -54,7 +96,14 @@ describe('injectQuestionActions', () => { constants.userTypes.APPLICANT, [guidance], ], - [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.WITHDRAWN, constants.roleTypes.MANAGER, constants.userTypes.CUSTODIAN, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.WITHDRAWN, + constants.roleTypes.MANAGER, + constants.userTypes.CUSTODIAN, + [guidance], + ], [ data[0].jsonSchema, constants.userTypes.CUSTODIAN, @@ -63,7 +112,14 @@ describe('injectQuestionActions', () => { constants.userTypes.CUSTODIAN, [guidance], ], - [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.APPROVED, constants.roleTypes.REVIEWER, constants.userTypes.CUSTODIAN, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.APPROVED, + constants.roleTypes.REVIEWER, + constants.userTypes.CUSTODIAN, + [guidance], + ], [ data[0].jsonSchema, constants.userTypes.CUSTODIAN, @@ -72,9 +128,30 @@ describe('injectQuestionActions', () => { constants.userTypes.CUSTODIAN, [guidance], ], - [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.INREVIEW, constants.roleTypes.REVIEWER, constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.WITHDRAWN, constants.roleTypes.REVIEWER, constants.userTypes.CUSTODIAN, [guidance]], - [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.SUBMITTED, constants.roleTypes.REVIEWER, constants.userTypes.CUSTODIAN, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.INREVIEW, + constants.roleTypes.REVIEWER, + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.WITHDRAWN, + constants.roleTypes.REVIEWER, + constants.userTypes.CUSTODIAN, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.SUBMITTED, + constants.roleTypes.REVIEWER, + constants.userTypes.CUSTODIAN, + [guidance], + ], ]; test.each(cases)( 'given a jsonSchema object %p and the user is a/an %p, and the application status is %p, it returns the correct question actions', diff --git a/src/resources/datarequest/utils/datarequest.util.js b/src/resources/datarequest/utils/datarequest.util.js index 40d26d63..7bc29a6c 100644 --- a/src/resources/datarequest/utils/datarequest.util.js +++ b/src/resources/datarequest/utils/datarequest.util.js @@ -1,7 +1,11 @@ -import _ from 'lodash'; +import { has, isEmpty, isNil } from 'lodash'; import constants from '../../utilities/constants.util'; import teamController from '../../team/team.controller'; import moment from 'moment'; +import { DataRequestSchemaModel } from '../datarequest.schemas.model'; +import dynamicForm from '../../utilities/dynamicForms/dynamicForm.util'; + +const repeatedSectionRegex = /_[a-zA-Z|\d]{5}$/gm; const injectQuestionActions = (jsonSchema, userType, applicationStatus, role = '', activeParty) => { let formattedSchema = {}; @@ -9,7 +13,7 @@ const injectQuestionActions = (jsonSchema, userType, applicationStatus, role = ' if (applicationStatus === constants.applicationStatuses.INREVIEW) { formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][role][applicationStatus][activeParty] }; } else { - formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][role][applicationStatus]}; + formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][role][applicationStatus] }; } } else { formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][applicationStatus] }; @@ -27,9 +31,9 @@ const getUserPermissionsForApplication = (application, userId, _id) => { return { authorised, userType }; } // Check if the user is a custodian team member and assign permissions if so - if (_.has(application.datasets[0], 'publisher.team')) { + if (has(application.datasets[0], 'publisher.team')) { isTeamMember = teamController.checkTeamPermissions('', application.datasets[0].publisher.team, _id); - } else if (_.has(application, 'publisherObj.team')) { + } else if (has(application, 'publisherObj.team')) { isTeamMember = teamController.checkTeamPermissions('', application.publisherObj.team, _id); } if (isTeamMember) { @@ -37,8 +41,8 @@ const getUserPermissionsForApplication = (application, userId, _id) => { authorised = true; } // If user is not authenticated as a custodian, check if they are an author or the main applicant - if (application.applicationStatus === constants.applicationStatuses.INPROGRESS || _.isEmpty(userType)) { - if (application.authorIds.includes(userId) || application.userId === userId) { + if (application.applicationStatus === constants.applicationStatuses.INPROGRESS || isEmpty(userType)) { + if (application.userId === userId || (application.authorIds && application.authorIds.includes(userId))) { userType = constants.userTypes.APPLICANT; authorised = true; } @@ -120,9 +124,9 @@ const updateQuestion = (questionsArr, question) => { return; } // 5. If target question has not been identified, recall function with child questions - if (_.has(currentQuestion, 'input.options')) { + if (has(currentQuestion, 'input.options')) { currentQuestion.input.options.forEach(option => { - if (_.has(option, 'conditionalQuestions')) { + if (has(option, 'conditionalQuestions')) { Array.isArray(option.conditionalQuestions) && option.conditionalQuestions.forEach(iter); } }); @@ -145,16 +149,16 @@ const setQuestionState = (question, questionAlert, readOnly) => { }, }; // 3. Recursively set readOnly mode for children - if (_.has(question, 'input.options')) { + if (has(question, 'input.options')) { question.input.options.forEach(function iter(currentQuestion) { // 4. If current question contains an input, set readOnly mode - if (_.has(currentQuestion, 'input')) { + if (has(currentQuestion, 'input')) { currentQuestion.input.readOnly = readOnly; } // 5. Recall the iteration with each child question - if (_.has(currentQuestion, 'conditionalQuestions')) { + if (has(currentQuestion, 'conditionalQuestions')) { currentQuestion.conditionalQuestions.forEach(option => { - if (_.has(option, 'input.options')) { + if (has(option, 'input.options')) { Array.isArray(option.input.options) && option.input.options.forEach(iter); } else { option.input.readOnly = readOnly; @@ -179,11 +183,11 @@ const buildQuestionAlert = (userType, iterationStatus, completed, amendment, use requestedBy = matchCurrentUser(user, requestedBy); updatedBy = matchCurrentUser(user, updatedBy); // 5. Update the generic question alerts to match the scenario - let relevantActioner = !_.isNil(updatedBy) ? updatedBy : userType === constants.userTypes.CUSTODIAN ? requestedBy : publisher; + let relevantActioner = !isNil(updatedBy) ? updatedBy : userType === constants.userTypes.CUSTODIAN ? requestedBy : publisher; questionAlert.text = questionAlert.text.replace('#NAME#', relevantActioner); questionAlert.text = questionAlert.text.replace( '#DATE#', - userType === !_.isNil(dateUpdated) ? moment(dateUpdated).format('Do MMM YYYY') : moment(dateRequested).format('Do MMM YYYY') + userType === !isNil(dateUpdated) ? moment(dateUpdated).format('Do MMM YYYY') : moment(dateRequested).format('Do MMM YYYY') ); // 6. Return the built question alert return questionAlert; @@ -204,6 +208,146 @@ const matchCurrentUser = (user, auditField) => { return auditField; }; +const cloneIntoExistingApplication = (appToClone, appToUpdate) => { + // 1. Extract values required to clone into existing application + const { questionAnswers } = appToClone; + const { jsonSchema: schemaToUpdate } = appToUpdate; + + // 2. Extract and append any user repeated sections from the original form + if (containsUserRepeatedSections(questionAnswers)) { + const updatedSchema = copyUserRepeatedSections(appToClone, schemaToUpdate); + appToUpdate.jsonSchema = updatedSchema; + } + + // 3. Return updated application + return { ...appToUpdate, questionAnswers }; +}; + +const cloneIntoNewApplication = async (appToClone, context) => { + // 1. Extract values required to clone existing application + const { userId, datasetIds, datasetTitles, publisher } = context; + const { questionAnswers } = appToClone; + + // 2. Get latest publisher schema + const { jsonSchema, version, _id: schemaId, isCloneable = false, formType } = await getLatestPublisherSchema(publisher); + + // 3. Create new application with combined details + let newApplication = { + version, + userId, + datasetIds, + datasetTitles, + isCloneable, + formType, + jsonSchema, + schemaId, + publisher, + questionAnswers, + aboutApplication: {}, + amendmentIterations: [], + applicationStatus: constants.applicationStatuses.INPROGRESS, + }; + + // 4. Extract and append any user repeated sections from the original form + if (containsUserRepeatedSections(questionAnswers)) { + const updatedSchema = copyUserRepeatedSections(appToClone, jsonSchema); + newApplication.jsonSchema = updatedSchema; + } + + // 5. Return the cloned application + return newApplication; +}; + +const getLatestPublisherSchema = async publisher => { + // 1. Find latest schema for publisher + let schema = await DataRequestSchemaModel.findOne({ + $or: [{ publisher }], + status: 'active', + }).sort({ createdAt: -1 }); + + // 2. If no schema is found, throw error + if (!schema) { + throw new Error('The selected publisher does not have an active application form'); + } + + // 3. Return schema + return schema; +}; + +const containsUserRepeatedSections = questionAnswers => { + // 1. Use regex pattern matching to detect repeated sections (questionId contains _ followed by 5 alphanumeric characters) + // e.g. applicantfirstname_1TV6P + return Object.keys(questionAnswers).some(key => key.match(repeatedSectionRegex)); +}; + +const copyUserRepeatedSections = (appToClone, schemaToUpdate) => { + const { questionAnswers } = appToClone; + const { questionSets } = schemaToUpdate; + let copiedQuestionSuffixes = []; + // 1. Extract all answers to repeated sections indicating questions that may need to be carried over + const repeatedQuestionIds = extractRepeatedQuestionIds(questionAnswers); + // 2. Iterate through each repeated question id + repeatedQuestionIds.forEach(qId => { + // 3. Skip if question has already been copied in by a previous clone operation + let questionExists = questionSets.some(qS => !isNil(dynamicForm.findQuestionRecursive(qS.questions, qId))); + if (questionExists) { + return; + } + // 4. Split question id to get original id and unique suffix + const [questionId, uniqueSuffix] = qId.split('_'); + // 5. Find the question in the new schema + questionSets.forEach(qS => { + // 6. Check if related group has already been copied in by this clone operation + if (copiedQuestionSuffixes.includes(uniqueSuffix)) { + return; + } + let question = dynamicForm.findQuestionRecursive(qS.questions, questionId); + // 7. Ensure question was found and still exists in new schema + if (question) { + schemaToUpdate = insertUserRepeatedSections(questionSets, qS, schemaToUpdate, uniqueSuffix); + // 8. Update duplicate question groups that have now been processed + copiedQuestionSuffixes = [...copiedQuestionSuffixes, uniqueSuffix]; + } + }); + }); + // 9. Return updated schema + return { ...schemaToUpdate }; +}; + +const insertUserRepeatedSections = (questionSets, questionSet, schemaToUpdate, uniqueSuffix) => { + const { questionSetId, questions } = questionSet; + // 1. Determine if question is repeatable via a question set or question group + const repeatQuestionsId = `add-${questionSetId}`; + if (questionSets.some(qS => qS.questionSetId === repeatQuestionsId)) { + // 2. Replicate question set + let duplicateQuestionSet = dynamicForm.duplicateQuestionSet(repeatQuestionsId, schemaToUpdate, uniqueSuffix); + schemaToUpdate = dynamicForm.insertQuestionSet(repeatQuestionsId, duplicateQuestionSet, schemaToUpdate); + } else { + // 2. Find and replicate the question group + let duplicateQuestionsButton = dynamicForm.findQuestionRecursive(questions, repeatQuestionsId); + if (duplicateQuestionsButton) { + const { + questionId, + input: { questionIds, separatorText }, + } = duplicateQuestionsButton; + let duplicateQuestions = dynamicForm.duplicateQuestions(questionSetId, questionIds, separatorText, schemaToUpdate, uniqueSuffix); + schemaToUpdate = dynamicForm.insertQuestions(questionSetId, questionId, duplicateQuestions, schemaToUpdate); + } + } + // 3. Return updated schema + return schemaToUpdate; +}; + +const extractRepeatedQuestionIds = questionAnswers => { + // 1. Reduce original question answers to only answers relating to repeating sections + return Object.keys(questionAnswers).reduce((arr, key) => { + if (key.match(repeatedSectionRegex)) { + arr = [...arr, key]; + } + return arr; + }, []); +}; + export default { injectQuestionActions: injectQuestionActions, getUserPermissionsForApplication: getUserPermissionsForApplication, @@ -212,4 +356,6 @@ export default { updateQuestion: updateQuestion, buildQuestionAlert: buildQuestionAlert, setQuestionState: setQuestionState, + cloneIntoExistingApplication: cloneIntoExistingApplication, + cloneIntoNewApplication: cloneIntoNewApplication, }; diff --git a/src/resources/dataset/__tests__/dataset.controller.test.js b/src/resources/dataset/__tests__/dataset.controller.test.js index ff39944b..4f9e6eb1 100644 --- a/src/resources/dataset/__tests__/dataset.controller.test.js +++ b/src/resources/dataset/__tests__/dataset.controller.test.js @@ -100,7 +100,7 @@ describe('DatasetController', function () { expect(serviceStub.calledOnce).toBe(true); expect(status.calledWith(200)).toBe(true); - expect(json.calledWith({ success: true, data: stubValue })).toBe(true); + expect(json.calledWith({ success: true, datasets: stubValue })).toBe(true); }); it('should return a server error if an unexpected exception occurs', async function () { diff --git a/src/resources/dataset/dataset.controller.js b/src/resources/dataset/dataset.controller.js index df338a82..f6829c03 100644 --- a/src/resources/dataset/dataset.controller.js +++ b/src/resources/dataset/dataset.controller.js @@ -18,7 +18,8 @@ export default class DatasetController extends Controller { }); } // Find the dataset - let dataset = await this.datasetService.getDataset(id, req.query); + const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; + let dataset = await this.datasetService.getDataset(id, req.query, options); // Return if no dataset found if (!dataset) { return res.status(404).json({ @@ -44,11 +45,12 @@ export default class DatasetController extends Controller { async getDatasets(req, res) { try { // Find the datasets - let datasets = await this.datasetService.getDatasets(req.query); + const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; + let datasets = await this.datasetService.getDatasets(req.query, options); // Return the datasets return res.status(200).json({ success: true, - data: datasets + datasets }); } catch (err) { // Return error response if something goes wrong diff --git a/src/resources/dataset/dataset.entity.js b/src/resources/dataset/dataset.entity.js index d6ac6ce0..2edc0815 100644 --- a/src/resources/dataset/dataset.entity.js +++ b/src/resources/dataset/dataset.entity.js @@ -45,18 +45,15 @@ export default class DatasetClass extends Entity { // Manually update identifier URL link transformedObject.dataset.identifier = `https://web.www.healthdatagateway.org/dataset/${this.datasetid}`; - + // Append static schema details for v2 - const formattedObject = { - '@schema': { - type: `Dataset`, - version: `2.0.0`, - url: `https://raw.githubusercontent.com/HDRUK/schemata/master/schema/dataset/latest/dataset.schema.json`, - }, - ...transformedObject, - }; + transformedObject.dataset['@schema'] = { + type: `Dataset`, + version: `2.0.0`, + url: `https://raw.githubusercontent.com/HDRUK/schemata/master/schema/dataset/latest/dataset.schema.json`, + } // Return v2 object - return formattedObject; + return transformedObject; } } diff --git a/src/resources/dataset/dataset.model.js b/src/resources/dataset/dataset.model.js index 0f9db633..a82cdf29 100644 --- a/src/resources/dataset/dataset.model.js +++ b/src/resources/dataset/dataset.model.js @@ -13,6 +13,8 @@ const datasetSchema = new Schema( name: String, description: String, source: String, + is5Safes: Boolean, + hasTechnicalDetails: Boolean, resultsInsights: String, link: String, type: String, diff --git a/src/resources/dataset/dataset.repository.js b/src/resources/dataset/dataset.repository.js index 0544292d..df4e1db3 100644 --- a/src/resources/dataset/dataset.repository.js +++ b/src/resources/dataset/dataset.repository.js @@ -7,13 +7,11 @@ export default class DatasetRepository extends Repository { this.dataset = Dataset; } - async getDataset(query) { - const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; + async getDataset(query, options) { return this.findOne(query, options); } - async getDatasets(query) { - const options = { lean: false, populate: { path: 'submittedDataAccessRequests' } }; + async getDatasets(query, options) { return this.find(query, options); } diff --git a/src/resources/dataset/dataset.service.js b/src/resources/dataset/dataset.service.js index 1a79d7de..5bd76087 100644 --- a/src/resources/dataset/dataset.service.js +++ b/src/resources/dataset/dataset.service.js @@ -9,13 +9,13 @@ export default class DatasetService { this.courseRepository = courseRepository; } - async getDataset(id, query = {}) { + async getDataset(id, query = {}, options = {}) { // Protect for no id passed - if(!id) return; + if (!id) return; // Get dataset from Db by datasetid first query = { ...query, datasetid: id }; - let dataset = await this.datasetRepository.getDataset(query); + let dataset = await this.datasetRepository.getDataset(query, options); // Return undefined if no dataset found if (!dataset) return; @@ -37,8 +37,8 @@ export default class DatasetService { return dataset; } - async getDatasets(query = {}) { - return this.datasetRepository.getDatasets(query); + async getDatasets(query = {}, options = {} ) { + return this.datasetRepository.getDatasets(query, options); } async getRelatedObjects(pid) { @@ -65,7 +65,7 @@ export default class DatasetService { this.paperRepository.find(query, { lean }), this.toolRepository.find(query, { lean }), this.projectRepository.find(query, { lean }), - this.courseRepository.find(query, { lean }) + this.courseRepository.find(query, { lean }), ]); // Flatten and reduce related entities into related objects @@ -79,7 +79,7 @@ export default class DatasetService { objectType: entity.type, user: obj.user, updated: obj.updated, - } + }; }); arr = [...arr, ...formattedEntityRelatedObjects]; return arr; @@ -87,7 +87,7 @@ export default class DatasetService { return relatedObjects; } - reformatTechnicalDetails (dataset) { + reformatTechnicalDetails(dataset) { // Return if no technical details found if (_.isNil(dataset.structuralMetadata) || _.isNil(dataset.structuralMetadata.dataClasses)) { return dataset; @@ -109,5 +109,5 @@ export default class DatasetService { return { id, description, name, dataElementsCount: dataElements.length || 0, dataElements }; }); return dataset; - }; + } } diff --git a/src/resources/dataset/datasetfiles/dummy.json b/src/resources/dataset/datasetfiles/dummy.json new file mode 100644 index 00000000..c0fc228d --- /dev/null +++ b/src/resources/dataset/datasetfiles/dummy.json @@ -0,0 +1 @@ +{ "reasonForExistence": "I live so this folder will live" } diff --git a/src/resources/dataset/datasetonboarding.controller.js b/src/resources/dataset/datasetonboarding.controller.js new file mode 100644 index 00000000..0623bb03 --- /dev/null +++ b/src/resources/dataset/datasetonboarding.controller.js @@ -0,0 +1,1857 @@ +import { Data } from '../tool/data.model'; +import { PublisherModel } from '../publisher/publisher.model'; +import { filtersService } from '../filters/dependency'; +import notificationBuilder from '../utilities/notificationBuilder'; +import emailGenerator from '../utilities/emailGenerator.util'; +import { v4 as uuidv4 } from 'uuid'; +import _ from 'lodash'; +import axios from 'axios'; +import FormData from 'form-data'; +import Ajv from 'ajv'; +import moment from 'moment'; +var fs = require('fs'); + +import constants from '../utilities/constants.util'; + +module.exports = { + //GET api/v1/dataset-onboarding + getDatasetsByPublisher: async (req, res) => { + try { + let { + params: { publisherID }, + } = req; + + if (!publisherID) return res.status(404).json({ status: 'error', message: 'Publisher ID could not be found.' }); + + let datasetIds = []; + + if (publisherID === 'admin') { + // get all datasets in review for admin + datasetIds = await Data.find({ activeflag: 'inReview' }).sort({ 'timestamps.submitted': -1 }); + } else { + // get all pids for publisherID + datasetIds = await Data.find({ + $and: [ + { 'datasetv2.summary.publisher.identifier': publisherID }, + { + $or: [{ activeflag: 'active' }, { activeflag: 'inReview' }, { activeflag: 'draft' }, { activeflag: 'rejected' }], + }, + ], + }) + .sort({ 'timestamps.updated': -1 }) + .distinct('pid'); + } + + let listOfDatasets = []; + for (const datasetId of datasetIds) { + let datasetDetails = await Data.findOne({ + pid: datasetId, + }) + .sort({ 'timestamps.updated': -1 }) + .lean(); + + let datasetVersions = await Data.find( + { + pid: datasetId, + }, + { + _id: 1, + datasetVersion: 1, + activeflag: 1, + } + ) + .sort({ 'timestamps.created': -1 }) + .lean(); + + datasetDetails.listOfVersions = datasetVersions; + listOfDatasets.push(datasetDetails); + } + + return res.status(200).json({ + success: true, + data: { listOfDatasets }, + }); + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err.message }); + } + }, + + //GET api/v1/dataset-onboarding/:id + getDatasetVersion: async (req, res) => { + try { + const id = req.params.id || null; + + if (!id) return res.status(404).json({ status: 'error', message: 'Dataset pid could not be found.' }); + + let dataset = await Data.findOne({ _id: id }); + if (dataset.questionAnswers) { + dataset.questionAnswers = JSON.parse(dataset.questionAnswers); + } else { + //if no questionAnswers then populate from MDC + dataset.questionAnswers = module.exports.populateQuestionAnswers(dataset); + await Data.findOneAndUpdate({ _id: id }, { questionAnswers: JSON.stringify(dataset.questionAnswers) }); + } + + if (!dataset.structuralMetadata) { + //if no structuralMetadata then populate from MDC + dataset.structuralMetadata = module.exports.populateStructuralMetadata(dataset); + await Data.findOneAndUpdate({ _id: id }, { structuralMetadata: dataset.structuralMetadata }); + } + + let listOfDatasets = await Data.find({ pid: dataset.pid }, { _id: 1, datasetVersion: 1, activeflag: 1 }).sort({ + 'timestamps.created': -1, + }); + + return res.status(200).json({ + success: true, + data: { dataset }, + listOfDatasets, + }); + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err.message }); + } + }, + + populateQuestionAnswers: dataset => { + let questionAnswers = {}; + + //Summary + if (!_.isNil(dataset.datasetv2.summary.title) && !_.isEmpty(dataset.datasetv2.summary.title)) + questionAnswers['summary/title'] = dataset.datasetv2.summary.title; + if (_.isNil(questionAnswers['summary/title'])) questionAnswers['summary/title'] = dataset.name; + if (!_.isNil(dataset.datasetv2.summary.abstract) && !_.isEmpty(dataset.datasetv2.summary.abstract)) + questionAnswers['summary/abstract'] = dataset.datasetv2.summary.abstract; + if (!_.isNil(dataset.datasetv2.summary.contactPoint) && !_.isEmpty(dataset.datasetv2.summary.contactPoint)) + questionAnswers['summary/contactPoint'] = dataset.datasetv2.summary.contactPoint; + if (!_.isNil(dataset.datasetv2.summary.keywords) && !_.isEmpty(dataset.datasetv2.summary.keywords)) + questionAnswers['summary/keywords'] = module.exports.returnAsArray(dataset.datasetv2.summary.keywords); + if (!_.isNil(dataset.datasetv2.summary.alternateIdentifiers) && !_.isEmpty(dataset.datasetv2.summary.alternateIdentifiers)) + questionAnswers['summary/alternateIdentifiers'] = dataset.datasetv2.summary.alternateIdentifiers; + if (!_.isNil(dataset.datasetv2.summary.doiName) && !_.isEmpty(dataset.datasetv2.summary.doiName)) + questionAnswers['summary/doiName'] = dataset.datasetv2.summary.doiName; + //Documentation + if (!_.isNil(dataset.datasetv2.documentation.description) && !_.isEmpty(dataset.datasetv2.documentation.description)) + questionAnswers['properties/documentation/description'] = dataset.datasetv2.documentation.description; + if (!_.isNil(dataset.datasetv2.documentation.associatedMedia) && !_.isEmpty(dataset.datasetv2.documentation.associatedMedia)) + questionAnswers['properties/documentation/associatedMedia'] = module.exports.returnAsArray( + dataset.datasetv2.documentation.associatedMedia + ); + if (!_.isNil(dataset.datasetv2.documentation.isPartOf) && !_.isEmpty(dataset.datasetv2.documentation.isPartOf)) + questionAnswers['properties/documentation/isPartOf'] = dataset.datasetv2.documentation.isPartOf; + //Coverage + if (!_.isNil(dataset.datasetv2.coverage.spatial) && !_.isEmpty(dataset.datasetv2.coverage.spatial)) + questionAnswers['properties/coverage/spatial'] = dataset.datasetv2.coverage.spatial; + if (!_.isNil(dataset.datasetv2.coverage.typicalAgeRange) && !_.isEmpty(dataset.datasetv2.coverage.typicalAgeRange)) + questionAnswers['properties/coverage/typicalAgeRange'] = dataset.datasetv2.coverage.typicalAgeRange; + if ( + !_.isNil(dataset.datasetv2.coverage.physicalSampleAvailability) && + !_.isEmpty(dataset.datasetv2.coverage.physicalSampleAvailability) + ) + questionAnswers['properties/coverage/physicalSampleAvailability'] = module.exports.returnAsArray( + dataset.datasetv2.coverage.physicalSampleAvailability + ); + if (!_.isNil(dataset.datasetv2.coverage.followup) && !_.isEmpty(dataset.datasetv2.coverage.followup)) + questionAnswers['properties/coverage/followup'] = dataset.datasetv2.coverage.followup; + if (!_.isNil(dataset.datasetv2.coverage.pathway) && !_.isEmpty(dataset.datasetv2.coverage.pathway)) + questionAnswers['properties/coverage/pathway'] = dataset.datasetv2.coverage.pathway; + //Provenance + //Origin + if (!_.isNil(dataset.datasetv2.provenance.origin.purpose) && !_.isEmpty(dataset.datasetv2.provenance.origin.purpose)) + questionAnswers['properties/provenance/origin/purpose'] = module.exports.returnAsArray(dataset.datasetv2.provenance.origin.purpose); + if (!_.isNil(dataset.datasetv2.provenance.origin.source) && !_.isEmpty(dataset.datasetv2.provenance.origin.source)) + questionAnswers['properties/provenance/origin/source'] = module.exports.returnAsArray(dataset.datasetv2.provenance.origin.source); + if ( + !_.isNil(dataset.datasetv2.provenance.origin.collectionSituation) && + !_.isEmpty(dataset.datasetv2.provenance.origin.collectionSituation) + ) + questionAnswers['properties/provenance/origin/collectionSituation'] = module.exports.returnAsArray( + dataset.datasetv2.provenance.origin.collectionSituation + ); + //Temporal + if ( + !_.isNil(dataset.datasetv2.provenance.temporal.accrualPeriodicity) && + !_.isEmpty(dataset.datasetv2.provenance.temporal.accrualPeriodicity) + ) + questionAnswers['properties/provenance/temporal/accrualPeriodicity'] = dataset.datasetv2.provenance.temporal.accrualPeriodicity; + if ( + !_.isNil(dataset.datasetv2.provenance.temporal.distributionReleaseDate) && + !_.isEmpty(dataset.datasetv2.provenance.temporal.distributionReleaseDate) + ) + questionAnswers['properties/provenance/temporal/distributionReleaseDate'] = module.exports.returnAsDate( + dataset.datasetv2.provenance.temporal.distributionReleaseDate + ); + if (!_.isNil(dataset.datasetv2.provenance.temporal.startDate) && !_.isEmpty(dataset.datasetv2.provenance.temporal.startDate)) + questionAnswers['properties/provenance/temporal/startDate'] = module.exports.returnAsDate( + dataset.datasetv2.provenance.temporal.startDate + ); + if (!_.isNil(dataset.datasetv2.provenance.temporal.endDate) && !_.isEmpty(dataset.datasetv2.provenance.temporal.endDate)) + questionAnswers['properties/provenance/temporal/endDate'] = module.exports.returnAsDate( + dataset.datasetv2.provenance.temporal.endDate + ); + if (!_.isNil(dataset.datasetv2.provenance.temporal.timeLag) && !_.isEmpty(dataset.datasetv2.provenance.temporal.timeLag)) + questionAnswers['properties/provenance/temporal/timeLag'] = dataset.datasetv2.provenance.temporal.timeLag; + //Accessibility + //Usage + if ( + !_.isNil(dataset.datasetv2.accessibility.usage.dataUseLimitation) && + !_.isEmpty(dataset.datasetv2.accessibility.usage.dataUseLimitation) + ) + questionAnswers['properties/accessibility/usage/dataUseLimitation'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.usage.dataUseLimitation + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.usage.dataUseRequirements) && + !_.isEmpty(dataset.datasetv2.accessibility.usage.dataUseRequirements) + ) + questionAnswers['properties/accessibility/usage/dataUseRequirements'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.usage.dataUseRequirements + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.usage.resourceCreator) && + !_.isEmpty(dataset.datasetv2.accessibility.usage.resourceCreator) + ) + questionAnswers['properties/accessibility/usage/resourceCreator'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.usage.resourceCreator + ); + if (!_.isNil(dataset.datasetv2.accessibility.usage.investigations) && !_.isEmpty(dataset.datasetv2.accessibility.usage.investigations)) + questionAnswers['properties/accessibility/usage/investigations'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.usage.investigations + ); + if (!_.isNil(dataset.datasetv2.accessibility.usage.isReferencedBy) && !_.isEmpty(dataset.datasetv2.accessibility.usage.isReferencedBy)) + questionAnswers['properties/accessibility/usage/isReferencedBy'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.usage.isReferencedBy + ); + //Access + if (!_.isNil(dataset.datasetv2.accessibility.access.accessRights) && !_.isEmpty(dataset.datasetv2.accessibility.access.accessRights)) + questionAnswers['properties/accessibility/access/accessRights'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.access.accessRights + ); + if (!_.isNil(dataset.datasetv2.accessibility.access.accessService) && !_.isEmpty(dataset.datasetv2.accessibility.access.accessService)) + questionAnswers['properties/accessibility/access/accessService'] = dataset.datasetv2.accessibility.access.accessService; + if ( + !_.isNil(dataset.datasetv2.accessibility.access.accessRequestCost) && + !_.isEmpty(dataset.datasetv2.accessibility.access.accessRequestCost) + ) + questionAnswers['properties/accessibility/access/accessRequestCost'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.access.accessRequestCost + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.access.deliveryLeadTime) && + !_.isEmpty(dataset.datasetv2.accessibility.access.deliveryLeadTime) + ) + questionAnswers['properties/accessibility/access/deliveryLeadTime'] = dataset.datasetv2.accessibility.access.deliveryLeadTime; + if (!_.isNil(dataset.datasetv2.accessibility.access.jurisdiction) && !_.isEmpty(dataset.datasetv2.accessibility.access.jurisdiction)) + questionAnswers['properties/accessibility/access/jurisdiction'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.access.jurisdiction + ); + if (!_.isNil(dataset.datasetv2.accessibility.access.dataProcessor) && !_.isEmpty(dataset.datasetv2.accessibility.access.dataProcessor)) + questionAnswers['properties/accessibility/access/dataProcessor'] = dataset.datasetv2.accessibility.access.dataProcessor; + if ( + !_.isNil(dataset.datasetv2.accessibility.access.dataController) && + !_.isEmpty(dataset.datasetv2.accessibility.access.dataController) + ) + questionAnswers['properties/accessibility/access/dataController'] = dataset.datasetv2.accessibility.access.dataController; + //FormatAndStandards + if ( + !_.isNil(dataset.datasetv2.accessibility.formatAndStandards.vocabularyEncodingScheme) && + !_.isEmpty(dataset.datasetv2.accessibility.formatAndStandards.vocabularyEncodingScheme) + ) + questionAnswers['properties/accessibility/formatAndStandards/vocabularyEncodingScheme'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.formatAndStandards.vocabularyEncodingScheme + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.formatAndStandards.conformsTo) && + !_.isEmpty(dataset.datasetv2.accessibility.formatAndStandards.conformsTo) + ) + questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.formatAndStandards.conformsTo + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.formatAndStandards.language) && + !_.isEmpty(dataset.datasetv2.accessibility.formatAndStandards.language) + ) + questionAnswers['properties/accessibility/formatAndStandards/language'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.formatAndStandards.language + ); + if ( + !_.isNil(dataset.datasetv2.accessibility.formatAndStandards.format) && + !_.isEmpty(dataset.datasetv2.accessibility.formatAndStandards.format) + ) + questionAnswers['properties/accessibility/formatAndStandards/format'] = module.exports.returnAsArray( + dataset.datasetv2.accessibility.formatAndStandards.format + ); + //EnrichmentAndLinkage + if ( + !_.isNil(dataset.datasetv2.enrichmentAndLinkage.qualifiedRelation) && + !_.isEmpty(dataset.datasetv2.enrichmentAndLinkage.qualifiedRelation) + ) + questionAnswers['properties/enrichmentAndLinkage/qualifiedRelation'] = module.exports.returnAsArray( + dataset.datasetv2.enrichmentAndLinkage.qualifiedRelation + ); + if (!_.isNil(dataset.datasetv2.enrichmentAndLinkage.derivation) && !_.isEmpty(dataset.datasetv2.enrichmentAndLinkage.derivation)) + questionAnswers['properties/enrichmentAndLinkage/derivation'] = module.exports.returnAsArray( + dataset.datasetv2.enrichmentAndLinkage.derivation + ); + if (!_.isNil(dataset.datasetv2.enrichmentAndLinkage.tools) && !_.isEmpty(dataset.datasetv2.enrichmentAndLinkage.tools)) + questionAnswers['properties/enrichmentAndLinkage/tools'] = module.exports.returnAsArray(dataset.datasetv2.enrichmentAndLinkage.tools); + //Observations + if (!_.isNil(dataset.datasetv2.observations.observations) && !_.isEmpty(dataset.datasetv2.observations.observations)) + questionAnswers['properties/observations/observations'] = dataset.datasetv2.observations.observations; + + return questionAnswers; + }, + + returnAsArray: value => { + if (typeof value === 'string') return [value]; + return value; + }, + + returnAsDate: value => { + return moment(new Date(value)).format('DD/MM/YYYY'); + }, + + populateStructuralMetadata: dataset => { + let structuralMetadata = []; + + for (const dataClass of dataset.datasetfields.technicaldetails) { + for (const dataElement of dataClass.elements) { + structuralMetadata.push({ + tableName: dataClass.label, + tableDescription: dataClass.description, + columnName: dataElement.label, + columnDescription: dataElement.description, + dataType: dataElement.dataType.label, + sensitive: '', + }); + } + } + + return structuralMetadata; + }, + + //POST api/v1/dataset-onboarding + createNewDatasetVersion: async (req, res) => { + try { + const publisherID = req.body.publisherID || null; + const pid = req.body.pid || null; + const currentVersionId = req.body.currentVersionId || null; + + //If no publisher then return error + if (!publisherID) return res.status(404).json({ status: 'error', message: 'Dataset publisher could not be found.' }); + + const publisherData = await PublisherModel.find({ _id: publisherID }).lean(); + let publisherObject = { + summary: { + publisher: { + identifier: publisherID, + name: publisherData[0].publisherDetails.name, + memberOf: publisherData[0].publisherDetails.memberOf, + }, + }, + }; + + //If publisher but no pid then new dataset - create new pid and version is 1.0.0 + if (!pid) { + let uuid = ''; + while (uuid === '') { + uuid = uuidv4(); + if ((await Data.find({ pid: uuid }).length) === 0) uuid = ''; + } + + let uniqueID = ''; + while (uniqueID === '') { + uniqueID = parseInt(Math.random().toString().replace('0.', '')); + if ((await Data.find({ id: uniqueID }).length) === 0) uniqueID = ''; + } + + let data = new Data(); + data.pid = uuid; + data.datasetVersion = '1.0.0'; + data.id = uniqueID; + data.datasetid = 'New dataset'; + data.name = `New dataset ${moment(Date.now()).format('D MMM YYYY HH:mm')}`; + data.datasetv2 = publisherObject; + data.type = 'dataset'; + data.activeflag = 'draft'; + data.source = 'HDRUK MDC'; + data.is5Safes = publisherData[0].allowAccessRequestManagement; + data.timestamps.created = Date.now(); + data.timestamps.updated = Date.now(); + data.questionAnswers = JSON.stringify({ 'summary/title': `New dataset ${moment(Date.now()).format('D MMM YYYY HH:mm')}` }); + await data.save(); + + return res.status(200).json({ success: true, data: { id: data._id } }); + } else { + //check does a version already exist with the pid that is in draft + let isDraftDataset = await Data.findOne({ pid, activeflag: 'draft' }, { _id: 1 }); + + if (!_.isNil(isDraftDataset)) { + //if yes then return with error + return res.status(200).json({ success: true, data: { id: isDraftDataset._id, draftExists: true } }); + } + + //else create new version of currentVersionId and send back new id + let datasetToCopy = await Data.findOne({ _id: currentVersionId }); + + if (_.isNil(datasetToCopy)) { + return res.status(404).json({ status: 'error', message: 'Dataset to copy is not found' }); + } + + //create new uniqueID + let uniqueID = ''; + while (uniqueID === '') { + uniqueID = parseInt(Math.random().toString().replace('0.', '')); + if ((await Data.find({ id: uniqueID }).length) === 0) uniqueID = ''; + } + + //incremenet the dataset version + let newVersion = module.exports.incrementVersion([1, 0, 0], datasetToCopy.datasetVersion); + + let data = new Data(); + data.pid = pid; + data.datasetVersion = newVersion; + data.id = uniqueID; + data.datasetid = 'New dataset version'; + data.name = datasetToCopy.name; + data.datasetv2 = publisherObject; + data.type = 'dataset'; + data.activeflag = 'draft'; + data.source = 'HDRUK MDC'; + data.is5Safes = publisherData[0].allowAccessRequestManagement; + data.questionAnswers = datasetToCopy.questionAnswers; + data.structuralMetadata = datasetToCopy.structuralMetadata; + data.percentageCompleted = datasetToCopy.percentageCompleted; + data.timestamps.created = Date.now(); + data.timestamps.updated = Date.now(); + await data.save(); + + return res.status(200).json({ success: true, data: { id: data._id } }); + } + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err.message }); + } + }, + + incrementVersion: (masks, version) => { + if (typeof masks === 'string') { + version = masks; + masks = [0, 0, 0]; + } + + let bitMap = ['major', 'minor', 'patch']; + let bumpAt = 'patch'; + let oldVer = version.match(/\d+/g); + + for (let i = 0; i < masks.length; ++i) { + if (masks[i] === 1) { + bumpAt = bitMap[i]; + break; + } + } + + let bumpIdx = bitMap.indexOf(bumpAt); + let newVersion = []; + for (let i = 0; i < oldVer.length; ++i) { + if (i < bumpIdx) { + newVersion[i] = +oldVer[i]; + } else if (i === bumpIdx) { + newVersion[i] = +oldVer[i] + 1; + } else { + newVersion[i] = 0; + } + } + + return newVersion.join('.'); + }, + + //PATCH api/v1/dataset-onboarding/:id + updateDatasetVersionDataElement: async (req, res) => { + try { + // 1. Id is the _id object in mongoo.db not the generated id or dataset Id + const { + params: { id }, + body: data, + } = req; + // 2. Destructure body and update only specific fields by building a segregated non-user specified update object + let updateObj = module.exports.buildUpdateObject({ + ...data, + user: req.user, + }); + // 3. Find data request by _id to determine current status + let dataset = await Data.findOne({ _id: id }); + // 4. Check access record + if (!dataset) { + return res.status(404).json({ status: 'error', message: 'Dataset not found.' }); + } + // 5. Update record object + if (_.isEmpty(updateObj)) { + if (data.key !== 'structuralMetadata') { + return res.status(404).json({ status: 'error', message: 'Update failed' }); + } else { + let structuralMetadata = JSON.parse(data.rows); + + if (_.isEmpty(structuralMetadata)) { + return res.status(404).json({ status: 'error', message: 'Update failed' }); + } else { + Data.findByIdAndUpdate( + { _id: id }, + { structuralMetadata, percentageCompleted: data.percentageCompleted, 'timestamps.updated': Date.now() }, + { new: true }, + err => { + if (err) { + console.error(err); + throw err; + } + } + ); + + return res.status(200).json(); + } + } + } else { + module.exports.updateApplication(dataset, updateObj).then(dataset => { + const { unansweredAmendments = 0, answeredAmendments = 0, dirtySchema = false } = dataset; + if (dirtySchema) { + accessRequestRecord.jsonSchema = JSON.parse(accessRequestRecord.jsonSchema); + accessRequestRecord = amendmentController.injectAmendments(accessRequestRecord, constants.userTypes.APPLICANT, req.user); + } + let data = { + status: 'success', + unansweredAmendments, + answeredAmendments, + }; + if (dirtySchema) { + data = { + ...data, + jsonSchema: accessRequestRecord.jsonSchema, + }; + } + + if (updateObj.updatedQuestionId === 'summary/title') { + let questionAnswers = JSON.parse(updateObj.questionAnswers); + let title = questionAnswers['summary/title']; + + if (title.length >= 2) { + Data.findByIdAndUpdate({ _id: id }, { name: title, 'timestamps.updated': Date.now() }, { new: true }, err => { + if (err) { + console.error(err); + throw err; + } + }); + data.name = title; + } + } + + // 6. Return new data object + return res.status(200).json(data); + }); + } + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err.message }); + } + }, + + //POST api/v1/dataset-onboarding/:id + submitDatasetVersion: async (req, res) => { + try { + // 1. id is the _id object in mongoo.db not the generated id or dataset Id + const id = req.params.id || null; + + if (!id) return res.status(404).json({ status: 'error', message: 'Dataset _id could not be found.' }); + + // 3. Check user type and authentication to submit application + /* let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } */ + + //update dataset to inreview - constants.datatsetStatuses.INREVIEW + + let updatedDataset = await Data.findOneAndUpdate( + { _id: id }, + { activeflag: constants.datatsetStatuses.INREVIEW, 'timestamps.updated': Date.now(), 'timestamps.submitted': Date.now() } + ); + + /* , err => { + if (err) return res.send(err); + return res.json({ success: true }); + }); */ + + //emails / notifications + /* await module.exports.createNotifications( + accessRecord.submissionType === constants.submissionTypes.INITIAL + ? constants.notificationTypes.SUBMITTED + : constants.notificationTypes.RESUBMITTED, + {}, + accessRecord, + req.user + ); */ + + return res.status(200).json({ status: 'success' }); + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err.message }); + } + }, + + buildUpdateObject: data => { + let updateObj = {}; + let { questionAnswers, updatedQuestionId, user, jsonSchema = '', percentageCompleted } = data; + if (questionAnswers) { + updateObj = { ...updateObj, questionAnswers, updatedQuestionId, user, percentageCompleted, 'timestamps.updated': Date.now() }; + } + + if (!_.isEmpty(jsonSchema)) { + updateObj = { ...updateObj, jsonSchema, 'timestamps.updated': Date.now() }; + } + + return updateObj; + }, + + updateApplication: async (accessRecord, updateObj) => { + // 1. Extract properties + let { activeflag, _id } = accessRecord; + let { updatedQuestionId = '', user, percentageCompleted } = updateObj; + // 2. If application is in progress, update initial question answers + if (activeflag === constants.datatsetStatuses.DRAFT) { + await Data.findByIdAndUpdate(_id, updateObj, { new: true }, err => { + if (err) { + console.error(err); + throw err; + } + }); + return accessRecord; + // 3. Else if application has already been submitted make amendment + } else if (activeflag === constants.applicationStatuses.INREVIEW || activeflag === constants.applicationStatuses.SUBMITTED) { + if (_.isNil(updateObj.questionAnswers)) { + return accessRecord; + } + let updatedAnswer = JSON.parse(updateObj.questionAnswers)[updatedQuestionId]; + accessRecord = amendmentController.handleApplicantAmendment(accessRecord.toObject(), updatedQuestionId, '', updatedAnswer, user); + await DataRequestModel.replaceOne({ _id }, accessRecord, err => { + if (err) { + console.error(err); + throw err; + } + }); + return accessRecord; + } + }, + + //PUT api/v1/dataset-onboarding/:id + changeDatasetVersionStatus: async (req, res) => { + try { + // 1. Id is the _id object in MongoDb not the generated id or dataset Id + // 2. Get the userId + const id = req.params.id || null; + let { _id, id: userId } = req.user; + let { applicationStatus, applicationStatusDesc = '' } = req.body; + + if (!id) return res.status(404).json({ status: 'error', message: 'Dataset _id could not be found.' }); + + if (applicationStatus === 'approved') { + let dataset = await Data.findOne({ _id: id }); + if (!dataset) return res.status(404).json({ status: 'error', message: 'Dataset could not be found.' }); + + dataset.questionAnswers = JSON.parse(dataset.questionAnswers); + const publisherData = await PublisherModel.find({ _id: dataset.datasetv2.summary.publisher.identifier }).lean(); + + //1. create new version on MDC with version number and take datasetid and store + let metadataCatalogueLink = process.env.MDC_Config_HDRUK_metadataUrl || 'https://modelcatalogue.cs.ox.ac.uk/hdruk-preprod'; + const loginDetails = { + username: process.env.MDC_Config_HDRUK_username || '', + password: process.env.MDC_Config_HDRUK_password || '', + }; //Paul - move to env variables + await axios + .post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { + withCredentials: true, + timeout: 5000, + }) + .then(async session => { + axios.defaults.headers.Cookie = session.headers['set-cookie'][0]; // get cookie from request + + let jsonData = JSON.stringify(await module.exports.buildJSONFile(dataset)); + fs.writeFileSync(__dirname + `/datasetfiles/${dataset._id}.json`, jsonData); + + var data = new FormData(); + data.append('folderId', publisherData[0].mdcFolderId); + data.append('importFile', fs.createReadStream(__dirname + `/datasetfiles/${dataset._id}.json`)); + data.append('finalised', 'false'); + data.append('importAsNewDocumentationVersion', 'true'); + + await axios + .post( + metadataCatalogueLink + '/api/dataModels/import/ox.softeng.metadatacatalogue.core.spi.json/JsonImporterService/1.1', + data, + { + withCredentials: true, + timeout: 5000, + headers: { + ...data.getHeaders(), + }, + } + ) + .then(async newDatasetVersion => { + let newDatasetVersionId = newDatasetVersion.data.items[0].id; + fs.unlinkSync(__dirname + `/datasetfiles/${dataset._id}.json`); + + const updatedDatasetDetails = { + documentationVersion: dataset.datasetVersion, + }; + + await axios + .put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}`, updatedDatasetDetails, { + withCredentials: true, + timeout: 5000, + }) + .catch(err => { + console.log('Error when trying to update the version number on the MDC - ' + err.message); + }); + + await axios + .put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}/finalise`, { + withCredentials: true, + timeout: 5000, + }) + .catch(err => { + console.log('Error when trying to finalise the dataset on the MDC - ' + err.message); + }); + + // Adding to DB + let observations = await module.exports.buildObservations(dataset.questionAnswers); + + let datasetv2Object = { + identifier: newDatasetVersionId, + version: dataset.datasetVersion, + issued: Date.now(), + modified: Date.now(), + revisions: [], + summary: { + title: dataset.questionAnswers['summary/title'] || '', + abstract: dataset.questionAnswers['summary/abstract'] || '', + publisher: { + identifier: publisherData[0]._id.toString(), + name: publisherData[0].publisherDetails.name, + logo: publisherData[0].publisherDetails.logo || '', + description: publisherData[0].publisherDetails.description || '', + contactPoint: publisherData[0].publisherDetails.contactPoint || [], + memberOf: publisherData[0].publisherDetails.memberOf, + accessRights: publisherData[0].publisherDetails.accessRights || [], + deliveryLeadTime: publisherData[0].publisherDetails.deliveryLeadTime || '', + accessService: publisherData[0].publisherDetails.accessService || '', + accessRequestCost: publisherData[0].publisherDetails.accessRequestCost || '', + dataUseLimitation: publisherData[0].publisherDetails.dataUseLimitation || [], + dataUseRequirements: publisherData[0].publisherDetails.dataUseRequirements || [], + }, + contactPoint: dataset.questionAnswers['summary/contactPoint'] || '', + keywords: dataset.questionAnswers['summary/keywords'] || [], + alternateIdentifiers: dataset.questionAnswers['summary/alternateIdentifiers'] || [], + doiName: dataset.questionAnswers['summary/doiName'] || '', + }, + documentation: { + description: dataset.questionAnswers['properties/documentation/description'] || '', + associatedMedia: dataset.questionAnswers['properties/documentation/associatedMedia'] || [], + isPartOf: dataset.questionAnswers['properties/documentation/isPartOf'] || [], + }, + coverage: { + spatial: dataset.questionAnswers['properties/coverage/spatial'] || '', + typicalAgeRange: dataset.questionAnswers['properties/coverage/typicalAgeRange'] || '', + physicalSampleAvailability: dataset.questionAnswers['properties/coverage/physicalSampleAvailability'] || [], + followup: dataset.questionAnswers['properties/coverage/followup'] || '', + pathway: dataset.questionAnswers['properties/coverage/pathway'] || '', + }, + provenance: { + origin: { + purpose: dataset.questionAnswers['properties/provenance/origin/purpose'] || [], + source: dataset.questionAnswers['properties/provenance/origin/source'] || [], + collectionSituation: dataset.questionAnswers['properties/provenance/origin/collectionSituation'] || [], + }, + temporal: { + accrualPeriodicity: dataset.questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', + distributionReleaseDate: dataset.questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', + startDate: dataset.questionAnswers['properties/provenance/temporal/startDate'] || '', + endDate: dataset.questionAnswers['properties/provenance/temporal/endDate'] || '', + timeLag: dataset.questionAnswers['properties/provenance/temporal/timeLag'] || '', + }, + }, + accessibility: { + usage: { + dataUseLimitation: dataset.questionAnswers['properties/accessibility/usage/dataUseLimitation'] || [], + dataUseRequirements: dataset.questionAnswers['properties/accessibility/usage/dataUseRequirements'] || [], + resourceCreator: dataset.questionAnswers['properties/accessibility/usage/resourceCreator'] || '', + investigations: dataset.questionAnswers['properties/accessibility/usage/investigations'] || [], + isReferencedBy: dataset.questionAnswers['properties/accessibility/usage/isReferencedBy'] || [], + }, + access: { + accessRights: dataset.questionAnswers['properties/accessibility/access/accessRights'] || [], + accessService: dataset.questionAnswers['properties/accessibility/access/accessService'] || '', + accessRequestCost: dataset.questionAnswers['properties/accessibility/access/accessRequestCost'] || '', + deliveryLeadTime: dataset.questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', + jurisdiction: dataset.questionAnswers['properties/accessibility/access/jurisdiction'] || [], + dataProcessor: dataset.questionAnswers['properties/accessibility/access/dataController'] || '', + dataController: dataset.questionAnswers['properties/accessibility/access/dataProcessor'] || '', + }, + formatAndStandards: { + vocabularyEncodingScheme: + dataset.questionAnswers['properties/accessibility/formatAndStandards/vocabularyEncodingScheme'] || [], + conformsTo: dataset.questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] || [], + language: dataset.questionAnswers['properties/accessibility/formatAndStandards/language'] || [], + format: dataset.questionAnswers['properties/accessibility/formatAndStandards/format'] || [], + }, + }, + enrichmentAndLinkage: { + qualifiedRelation: dataset.questionAnswers['properties/enrichmentAndLinkage/qualifiedRelation'] || [], + derivation: dataset.questionAnswers['properties/enrichmentAndLinkage/derivation'] || [], + tools: dataset.questionAnswers['properties/enrichmentAndLinkage/tools'] || [], + }, + observations: observations, + }; + + let previousDataset = await Data.findOneAndUpdate({ pid: dataset.pid, activeflag: 'active' }, { activeflag: 'archive' }); + let previousCounter = 0; + if (previousDataset) previousCounter = previousDataset.counter || 0; + + //get technicaldetails and metadataQuality + let technicalDetails = await module.exports.buildTechnicalDetails(dataset.structuralMetadata); + let metadataQuality = await module.exports.buildMetadataQuality(datasetv2Object); + + await Data.findOneAndUpdate( + { _id: id }, + { + datasetid: newDatasetVersionId, + datasetVersion: dataset.datasetVersion, + name: dataset.questionAnswers['summary/title'] || '', + description: dataset.questionAnswers['properties/documentation/abstract'] || '', + activeflag: 'active', + tags: { + features: dataset.questionAnswers['summary/keywords'] || [], + }, + hasTechnicalDetails: !_.isEmpty(technicalDetails) ? true : false, + 'timestamps.updated': Date.now(), + 'timestamps.published': Date.now(), + counter: previousCounter, + datasetfields: { + publisher: `${publisherData[0].publisherDetails.memberOf} > ${publisherData[0].publisherDetails.name}`, + geographicCoverage: dataset.questionAnswers['properties/coverage/spatial'] || '', + physicalSampleAvailability: dataset.questionAnswers['properties/coverage/physicalSampleAvailability'] || [], + abstract: dataset.questionAnswers['summary/abstract'] || '', + releaseDate: dataset.questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', + accessRequestDuration: dataset.questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', + //conformsTo: dataset.questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] || '', + //accessRights: dataset.questionAnswers['properties/accessibility/access/accessRights'] || '', + //jurisdiction: dataset.questionAnswers['properties/accessibility/access/jurisdiction'] || '', + datasetStartDate: dataset.questionAnswers['properties/provenance/temporal/startDate'] || '', + datasetEndDate: dataset.questionAnswers['properties/provenance/temporal/endDate'] || '', + //statisticalPopulation: datasetMDC.statisticalPopulation, + ageBand: dataset.questionAnswers['properties/coverage/typicalAgeRange'] || '', + contactPoint: dataset.questionAnswers['summary/contactPoint'] || '', + periodicity: dataset.questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', + + metadataquality: metadataQuality, + //datautility: dataUtility ? dataUtility : {}, + //metadataschema: metadataSchema && metadataSchema.data ? metadataSchema.data : {}, + technicaldetails: technicalDetails, + //versionLinks: versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : [], + phenotypes: [], + }, + datasetv2: datasetv2Object, + applicationStatusDesc: applicationStatusDesc, + } + ); + }) + .catch(err => { + console.log('Error when trying to create new dataset on the MDC - ' + err.message); + }); + }) + .catch(err => { + console.log('Error when trying to login to MDC - ' + err.message); + }); + + await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { + console.log('Error when trying to logout of the MDC - ' + err.message); + }); + + filtersService.optimiseFilters('dataset'); + + return res.status(200).json({ status: 'success' }); + } else if (applicationStatus === 'rejected') { + await Data.findOneAndUpdate( + { _id: id }, + { + activeflag: constants.datatsetStatuses.REJECTED, + applicationStatusDesc: applicationStatusDesc, + 'timestamps.rejected': Date.now(), + 'timestamps.updated': Date.now(), + } + ); + + return res.status(200).json({ status: 'success' }); + } else if (applicationStatus === 'archived') { + //await Data.findOneAndUpdate({ _id: id }, { activeflag: constants.datatsetStatuses.ARCHIVED }); + } else if (applicationStatus === 'unarchived') { + //await Data.findOneAndUpdate({ _id: id }, { activeflag: constants.datatsetStatuses.ARCHIVED }); + } + + if (applicationStatusDesc) { + accessRecord.applicationStatusDesc = inputSanitizer.removeNonBreakingSpaces(applicationStatusDesc); + isDirty = true; + } + + // 3. Check user type and authentication to submit application + /* let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } */ + + //update dataset to inreview - constants.datatsetStatuses.INREVIEW + + //let updatedDataset = await Data.findOneAndUpdate({ _id: id }, { activeflag: constants.datatsetStatuses.INREVIEW }); + } catch (err) { + console.error(err.message); + res.status(500).json({ + status: 'error', + message: 'An error occurred updating the dataset status', + }); + } + }, + + buildObservations: async observationsData => { + let observationsArray = []; + let regex = new RegExp('properties/observation/', 'g'); + + let observationQuestions = []; + Object.keys(observationsData).forEach(item => { + if (item.match(regex)) { + observationQuestions.push({ key: item, value: observationsData[item] }); + } + }); + + let observationUniqueIds = ['']; + observationQuestions.forEach(item => { + let [, uniqueId] = item.key.split('_'); + if (!_.isEmpty(uniqueId) && !observationUniqueIds.find(x => x === uniqueId)) { + observationUniqueIds.push(uniqueId); + } + }); + + observationUniqueIds.forEach(uniqueId => { + let entry = {}; + if (uniqueId === '') { + observationQuestions.forEach(question => { + if (!question.key.includes('_')) { + let [, key] = question.key.split('properties/observation/'); + let newEntry = { [key]: question.value }; + entry = { ...entry, ...newEntry }; + } + }); + } else { + observationQuestions.forEach(question => { + if (question.key.includes(uniqueId)) { + let [keyLong] = question.key.split('_'); + let [, key] = keyLong.split('properties/observation/'); + let newEntry = { [key]: question.value }; + entry = { ...entry, ...newEntry }; + } + }); + } + observationsArray.push(entry); + }); + + return observationsArray; + }, + + buildTechnicalDetails: async structuralMetadata => { + let technicalDetailsClasses = []; + + const orderedMetadata = _.map(_.groupBy(_.orderBy(structuralMetadata, ['tableName'], ['asc']), 'tableName'), (children, tableName) => ({ + tableName, + children, + })); + + orderedMetadata.forEach(item => { + let technicalDetailsElements = []; + item.children.forEach(child => { + technicalDetailsElements.push({ + label: child.columnName, + description: child.columnDescription, + domainType: 'DataElement', + dataType: { + label: child.dataType, + domainType: 'PrimitiveType', + }, + }); + }); + + technicalDetailsClasses.push({ + label: item.children[0].tableName, + description: item.children[0].tableDescription, + domainType: 'DataClass', + elements: technicalDetailsElements, + }); + }); + + return technicalDetailsClasses; + }, + + buildJSONFile: async dataset => { + let jsonFile = {}; + let metadata = []; + let childDataClasses = []; + let regex = new RegExp('properties/observation/', 'g'); + + let observationQuestions = []; + Object.keys(dataset.questionAnswers).forEach(item => { + if (item.match(regex)) { + observationQuestions.push({ key: item, value: dataset.questionAnswers[item] }); + } else { + const newDatasetCatalogueItems = { + namespace: 'org.healthdatagateway', + key: item, + value: dataset.questionAnswers[item], + }; + metadata.push(newDatasetCatalogueItems); + } + }); + + let observationUniqueIds = ['']; + observationQuestions.forEach(item => { + let [, uniqueId] = item.key.split('_'); + if (!_.isEmpty(uniqueId) && !observationUniqueIds.find(x => x === uniqueId)) { + observationUniqueIds.push(uniqueId); + } + }); + + let observations = []; + observationUniqueIds.forEach(uniqueId => { + let entry = {}; + if (uniqueId === '') { + observationQuestions.forEach(question => { + if (!question.key.includes('_')) { + let [, key] = question.key.split('properties/observation/'); + let newEntry = { [key]: question.value }; + entry = { ...entry, ...newEntry }; + } + }); + } else { + observationQuestions.forEach(question => { + if (question.key.includes(uniqueId)) { + let [keyLong] = question.key.split('_'); + let [, key] = keyLong.split('properties/observation/'); + let newEntry = { [key]: question.value }; + entry = { ...entry, ...newEntry }; + } + }); + } + observations.push(entry); + }); + + if (!_.isEmpty(observations)) { + const newDatasetCatalogueItems = { + namespace: 'org.healthdatagateway', + key: 'properties/observations/observations', + value: JSON.stringify(observations), + }; + metadata.push(newDatasetCatalogueItems); + } + + const orderedMetadata = _.map( + _.groupBy(_.orderBy(dataset.structuralMetadata, ['tableName'], ['asc']), 'tableName'), + (children, tableName) => ({ tableName, children }) + ); + + orderedMetadata.forEach(item => { + let childDataElements = []; + item.children.forEach(child => { + childDataElements.push({ + label: child.columnName, + description: child.columnDescription, + dataType: { + label: child.dataType, + domainType: 'PrimitiveType', + }, + }); + }); + + childDataClasses.push({ + label: item.children[0].tableName, + description: item.children[0].tableDescription, + childDataElements: childDataElements, + }); + }); + + jsonFile = { + dataModel: { + label: dataset.questionAnswers['summary/title'], + description: dataset.questionAnswers['summary/abstract'], + type: 'Data Asset', + metadata: metadata, + childDataClasses: childDataClasses, + }, + }; + + return jsonFile; + }, + + //GET api/v1/data-access-request/checkUniqueTitle + checkUniqueTitle: async (req, res) => { + let { pid, title = '' } = req.query; + let regex = new RegExp(`^${title}$`, 'i'); + let dataset = await Data.findOne({ name: regex, pid: { $ne: pid } }); + return res.status(200).json({ isUniqueTitle: dataset ? false : true }); + }, + + //GET api/v1/data-access-request/checkUniqueTitle + buildMetadataQuality: async (dataset, pid) => { + //VALIDATION_WEIGHTS_PATH = os.path.join(CWD, 'config', 'weights', 'latest', 'weights.v2.json') + let weights = { + //'1: Summary': { + identifier: 0.026845638, + 'summary.title': 0.026845638, + 'summary.abstract': 0.026845638, + 'summary.contactPoint': 0.026845638, + 'summary.keywords': 0.026845638, + 'summary.doiName': 0.026845638, + 'summary.publisher.name': 0.026845638, + 'summary.publisher.contactPoint': 0.0, + 'summary.publisher.memberOf': 0.006711409, + //}, + //'2: Documentation': { + 'documentation.description': 0.026845638, + 'documentation.associatedMedia': 0.0, + 'documentation.isPartOf': 0.0, + //}, + //'3: Coverage': { + 'coverage.spatial': 0.026845638, + 'coverage.typicalAgeRange': 0.026845638, + 'coverage.physicalSampleAvailability': 0.026845638, + 'coverage.followup': 0.006711409, + 'coverage.pathway': 0.006711409, + //}, + //'4: Provenance': { + 'provenance.origin.purpose': 0.006711409, + 'provenance.origin.source': 0.006711409, + 'provenance.origin.collectionSituation': 0.006711409, + 'provenance.temporal.accrualPeriodicity': 0.026845638, + 'provenance.temporal.distributionReleaseDate': 0.0, + 'provenance.temporal.startDate': 0.026845638, + 'provenance.temporal.endDate': 0.0, + 'provenance.temporal.timeLag': 0.006711409, + //}, + //'5: Accessibility': { + 'accessibility.usage.dataUseLimitation': 0.026845638, + 'accessibility.usage.dataUseRequirements': 0.026845638, + 'accessibility.usage.resourceCreator': 0.026845638, + 'accessibility.usage.investigations': 0.006711409, + 'accessibility.usage.isReferencedBy': 0.006711409, + 'accessibility.access.accessRights': 0.026845638, + 'accessibility.access.accessService': 0.006711409, + 'accessibility.access.accessRequestCost': 0.026845638, + 'accessibility.access.deliveryLeadTime': 0.026845638, + 'accessibility.access.jurisdiction': 0.026845638, + 'accessibility.access.dataController': 0.026845638, + 'accessibility.access.dataProcessor': 0.0, + 'accessibility.formatAndStandards.vocabularyEncodingScheme': 0.026845638, + 'accessibility.formatAndStandards.conformsTo': 0.026845638, + 'accessibility.formatAndStandards.language': 0.026845638, + 'accessibility.formatAndStandards.format': 0.026845638, + //}, + //'6: Enrichment & Linkage': { + 'enrichmentAndLinkage.qualifiedRelation': 0.006711409, + 'enrichmentAndLinkage.derivation': 0.006711409, + 'enrichmentAndLinkage.tools': 0.006711409, + //}, + //'7. Observations': { + 'observation.observedNode': 0.026845638, + 'observation.measuredValue': 0.026845638, + 'observation.disambiguatingDescription': 0.0, + 'observation.observationDate': 0.0, + 'observation.measuredProperty': 0.0, + //}, + //'8. Structural metadata': { + 'structuralMetadata.dataClassesCount': 0.026845638, + 'structuralMetadata.tableName': 0.026845638, + 'structuralMetadata.tableDescription': 0.026845638, + 'structuralMetadata.columnName': 0.026845638, + 'structuralMetadata.columnDescription': 0.026845638, + 'structuralMetadata.dataType': 0.026845638, + 'structuralMetadata.sensitive': 0.026845638, + //}, + }; + + let metadataquality = { + schema_version: '2.0.1', + pid: '', + id: '', + publisher: '', + title: '', + weighted_quality_rating: 'Not Rated', + weighted_quality_score: 0, + weighted_completeness_percent: 0, + weighted_error_percent: 0, + }; + + metadataquality.pid = pid; + metadataquality.id = dataset.identifier; + metadataquality.publisher = dataset.summary.publisher.memberOf + ' > ' + dataset.summary.publisher.name; + metadataquality.title = dataset.summary.title; + + let completeness = []; + let totalCount = 0, + totalWeight = 0; + + Object.entries(weights).forEach(([key, weight]) => { + let datasetValue = module.exports.getDatatsetValue(dataset, key); + + if (key === 'identifier') { + completeness.push({ weight, value: datasetValue }); + totalCount++; + totalWeight += weight; + } else if (key === 'structuralMetadata') { + completeness.push({ weight, value: datasetValue }); + totalCount++; + totalWeight += weight; + } + if (datasetValue) { + completeness.push({ value: datasetValue, weight, score: weight }); + totalCount++; + totalWeight += weight; + } else { + completeness.push({ value: datasetValue, weight, score: 0 }); + } + + //special rules around provenance.temporal.accrualPeriodicity = CONTINUOUS + }); + + let schema = {}; + + let rawdata = fs.readFileSync(__dirname + '/schema.json'); + schema = JSON.parse(rawdata); + + const ajv = new Ajv({ strict: false, allErrors: true }); + const validate = ajv.compile(schema); + const valid = validate(dataset); + + let errors = []; + let errorCount = 0, + errorWeight = 0; + + Object.entries(weights).forEach(([key, weight]) => { + let updatedKey = '/' + key.replace(/\./g, '/'); + let errorIndex = Object.keys(validate.errors).find(key => validate.errors[key].dataPath === updatedKey); + if (errorIndex) { + errors.push({ value: key, scor: weight }); + errorCount += 1; + errorWeight += weight; + } else { + errors.push({ value: key, scor: 0 }); + } + }); + + metadataquality.weighted_completeness_percent = Number(100 * totalWeight).toFixed(2); + metadataquality.weighted_error_percent = Number(100 * errorWeight).toFixed(2); + metadataquality.weighted_quality_score = Number(50 * (totalWeight + (1 - errorWeight))).toFixed(2); + + let rating = 'Not Rated'; + if (metadataquality.weighted_quality_score > 60 && metadataquality.weighted_quality_score <= 70) rating = 'Bronze'; + else if (metadataquality.weighted_quality_score > 70 && metadataquality.weighted_quality_score <= 80) rating = 'Silver'; + else if (metadataquality.weighted_quality_score > 80 && metadataquality.weighted_quality_score <= 90) rating = 'Gold'; + else if (metadataquality.weighted_quality_score > 90) rating = 'Platinum'; + metadataquality.weighted_quality_rating = rating; + + return metadataquality; + }, + + getDatatsetValue(dataset, field) { + return field.split('.').reduce(function (o, k) { + return o && o[k]; + }, dataset); + }, + + createNotifications: async (type, context, team) => { + const teamName = getTeamName(team); + let options = {}; + let html = ''; + + switch (type) { + case constants.notificationTypes.MEMBERREMOVED: + // 1. Get user removed + const { removedUser } = context; + // 2. Create user notifications + notificationBuilder.triggerNotificationMessage( + [removedUser.id], + `You have been removed from the team ${teamName}`, + 'team unlinked', + teamName + ); + // 3. Create email + options = { + teamName, + }; + html = emailGenerator.generateRemovedFromTeam(options); + emailGenerator.sendEmail([removedUser], constants.hdrukEmail, `You have been removed from the team ${teamName}`, html, false); + break; + case constants.notificationTypes.MEMBERADDED: + // 1. Get users added + const { newUsers } = context; + const newUserIds = newUsers.map(user => user.id); + // 2. Create user notifications + notificationBuilder.triggerNotificationMessage( + newUserIds, + `You have been added to the team ${teamName} on the HDR UK Innovation Gateway`, + 'team', + teamName + ); + // 3. Create email for reviewers + options = { + teamName, + role: constants.roleTypes.REVIEWER, + }; + html = emailGenerator.generateAddedToTeam(options); + emailGenerator.sendEmail( + newUsers, + constants.hdrukEmail, + `You have been added as a reviewer to the team ${teamName} on the HDR UK Innovation Gateway`, + html, + false + ); + // 4. Create email for managers + options = { + teamName, + role: constants.roleTypes.MANAGER, + }; + html = emailGenerator.generateAddedToTeam(options); + emailGenerator.sendEmail( + newUsers, + constants.hdrukEmail, + `You have been added as a manager to the team ${teamName} on the HDR UK Innovation Gateway`, + html, + false + ); + break; + case constants.notificationTypes.MEMBERROLECHANGED: + break; + } + }, + + createNotifications: async (type, context, accessRecord, user) => { + // Project details from about application if 5 Safes + let { aboutApplication = {} } = accessRecord; + if (typeof aboutApplication === 'string') { + aboutApplication = JSON.parse(accessRecord.aboutApplication); + } + let { projectName = 'No project name set' } = aboutApplication; + let { projectId, _id, workflow = {}, dateSubmitted = '', jsonSchema, questionAnswers } = accessRecord; + if (_.isEmpty(projectId)) { + projectId = _id; + } + // Parse the schema + if (typeof jsonSchema === 'string') { + jsonSchema = JSON.parse(accessRecord.jsonSchema); + } + if (typeof questionAnswers === 'string') { + questionAnswers = JSON.parse(accessRecord.questionAnswers); + } + let { pages, questionPanels, questionSets: questions } = jsonSchema; + // Publisher details from single dataset + let { + datasetfields: { contactPoint, publisher }, + } = accessRecord.datasets[0]; + let datasetTitles = accessRecord.datasets.map(dataset => dataset.name).join(', '); + // Main applicant (user obj) + let { firstname: appFirstName, lastname: appLastName, email: appEmail } = accessRecord.mainApplicant; + // Requesting user + let { firstname, lastname } = user; + // Instantiate default params + let custodianManagers = [], + custodianUserIds = [], + managerUserIds = [], + emailRecipients = [], + options = {}, + html = '', + attachmentContent = '', + filename = '', + jsonContent = {}, + authors = [], + attachments = []; + let applicants = datarequestUtil.extractApplicantNames(questionAnswers).join(', '); + // Fall back for single applicant on short application form + if (_.isEmpty(applicants)) { + applicants = `${appFirstName} ${appLastName}`; + } + // Get authors/contributors (user obj) + if (!_.isEmpty(accessRecord.authors)) { + authors = accessRecord.authors.map(author => { + let { firstname, lastname, email, id } = author; + return { firstname, lastname, email, id }; + }); + } + // Deconstruct workflow context if passed + let { + workflowName = '', + stepName = '', + reviewerNames = '', + reviewSections = '', + nextStepName = '', + stepReviewers = [], + stepReviewerUserIds = [], + currentDeadline = '', + remainingReviewers = [], + remainingReviewerUserIds = [], + dateDeadline, + } = context; + + switch (type) { + case constants.notificationTypes.STATUSCHANGE: + // 1. Create notifications + // Custodian manager and current step reviewer notifications + if (_.has(accessRecord.datasets[0].toObject(), 'publisher.team.users')) { + // Retrieve all custodian manager user Ids and active step reviewers + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + let activeStep = workflowController.getActiveWorkflowStep(workflow); + stepReviewers = workflowController.getStepReviewers(activeStep); + // Create custodian notification + let statusChangeUserIds = [...custodianManagers, ...stepReviewers].map(user => user.id); + await notificationBuilder.triggerNotificationMessage( + statusChangeUserIds, + `${appFirstName} ${appLastName}'s Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${firstname} ${lastname}`, + 'data access request', + accessRecord._id + ); + } + // Create applicant notification + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Your Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + 'data access request', + accessRecord._id + ); + + // Create authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + authors.map(author => author.id), + `A Data Access Request you are contributing to for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + 'data access request', + accessRecord._id + ); + } + + // 2. Send emails to relevant users + // Aggregate objects for custodian and applicant + emailRecipients = [accessRecord.mainApplicant, ...custodianManagers, ...stepReviewers, ...accessRecord.authors]; + if (!dateSubmitted) ({ updatedAt: dateSubmitted } = accessRecord); + // Create object to pass through email data + options = { + id: accessRecord._id, + applicationStatus: context.applicationStatus, + applicationStatusDesc: context.applicationStatusDesc, + publisher, + projectId, + projectName, + datasetTitles, + dateSubmitted, + applicants, + }; + // Create email body content + html = emailGenerator.generateDARStatusChangedEmail(options); + // Send email + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + `Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, + html, + false + ); + break; + case constants.notificationTypes.SUBMITTED: + // 1. Create notifications + // Custodian notification + if (_.has(accessRecord.datasets[0].toObject(), 'publisher.team.users')) { + // Retrieve all custodian user Ids to generate notifications + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + custodianUserIds = custodianManagers.map(user => user.id); + await notificationBuilder.triggerNotificationMessage( + custodianUserIds, + `A Data Access Request has been submitted to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, + 'data access request', + accessRecord._id + ); + } else { + const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || contactPoint; + custodianManagers = [{ email: dataCustodianEmail }]; + } + // Applicant notification + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Your Data Access Request for ${datasetTitles} was successfully submitted to ${publisher}`, + 'data access request', + accessRecord._id + ); + // Contributors/authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + accessRecord.authors.map(author => author.id), + `A Data Access Request you are contributing to for ${datasetTitles} was successfully submitted to ${publisher} by ${firstname} ${lastname}`, + 'data access request', + accessRecord._id + ); + } + // 2. Send emails to custodian and applicant + // Create object to pass to email generator + options = { + userType: '', + userEmail: appEmail, + publisher, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + }; + // Iterate through the recipient types + for (let emailRecipientType of constants.submissionEmailRecipientTypes) { + // Establish email context object + options = { + ...options, + userType: emailRecipientType, + submissionType: constants.submissionTypes.INITIAL, + }; + // Build email template + ({ html, jsonContent } = await emailGenerator.generateEmail(questions, pages, questionPanels, questionAnswers, options)); + // Send emails to custodian team members who have opted in to email notifications + if (emailRecipientType === 'dataCustodian') { + emailRecipients = [...custodianManagers]; + // Generate json attachment for external system integration + attachmentContent = Buffer.from(JSON.stringify({ id: accessRecord._id, ...jsonContent })).toString('base64'); + filename = `${helper.generateFriendlyId(accessRecord._id)} ${moment().format().toString()}.json`; + attachments = [await emailGenerator.generateAttachment(filename, attachmentContent, 'application/json')]; + } else { + // Send email to main applicant and contributors if they have opted in to email notifications + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; + } + // Send email + if (!_.isEmpty(emailRecipients)) { + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + `Data Access Request has been submitted to ${publisher} for ${datasetTitles}`, + html, + false, + attachments + ); + } + } + break; + case constants.notificationTypes.RESUBMITTED: + // 1. Create notifications + // Custodian notification + if (_.has(accessRecord.datasets[0], 'publisher.team.users')) { + // Retrieve all custodian user Ids to generate notifications + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + custodianUserIds = custodianManagers.map(user => user.id); + await notificationBuilder.triggerNotificationMessage( + custodianUserIds, + `A Data Access Request has been resubmitted with updates to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, + 'data access request', + accessRecord._id + ); + } else { + const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || contactPoint; + custodianManagers = [{ email: dataCustodianEmail }]; + } + // Applicant notification + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Your Data Access Request for ${datasetTitles} was successfully resubmitted with updates to ${publisher}`, + 'data access request', + accessRecord._id + ); + // Contributors/authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + accessRecord.authors.map(author => author.id), + `A Data Access Request you are contributing to for ${datasetTitles} was successfully resubmitted with updates to ${publisher} by ${firstname} ${lastname}`, + 'data access request', + accessRecord._id + ); + } + // 2. Send emails to custodian and applicant + // Create object to pass to email generator + options = { + userType: '', + userEmail: appEmail, + publisher, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + }; + // Iterate through the recipient types + for (let emailRecipientType of constants.submissionEmailRecipientTypes) { + // Establish email context object + options = { + ...options, + userType: emailRecipientType, + submissionType: constants.submissionTypes.RESUBMISSION, + }; + // Build email template + ({ html, jsonContent } = await emailGenerator.generateEmail(questions, pages, questionPanels, questionAnswers, options)); + // Send emails to custodian team members who have opted in to email notifications + if (emailRecipientType === 'dataCustodian') { + emailRecipients = [...custodianManagers]; + // Generate json attachment for external system integration + attachmentContent = Buffer.from(JSON.stringify({ id: accessRecord._id, ...jsonContent })).toString('base64'); + filename = `${helper.generateFriendlyId(accessRecord._id)} ${moment().format().toString()}.json`; + attachments = [await emailGenerator.generateAttachment(filename, attachmentContent, 'application/json')]; + } else { + // Send email to main applicant and contributors if they have opted in to email notifications + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; + } + // Send email + if (!_.isEmpty(emailRecipients)) { + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + `Data Access Request to ${publisher} for ${datasetTitles} has been updated with updates`, + html, + false, + attachments + ); + } + } + break; + case constants.notificationTypes.CONTRIBUTORCHANGE: + // 1. Deconstruct authors array from context to compare with existing Mongo authors + const { newAuthors, currentAuthors } = context; + // 2. Determine authors who have been removed + let addedAuthors = [...newAuthors].filter(author => !currentAuthors.includes(author)); + // 3. Determine authors who have been added + let removedAuthors = [...currentAuthors].filter(author => !newAuthors.includes(author)); + // 4. Create emails and notifications for added/removed contributors + // Set required data for email generation + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + }; + // Notifications for added contributors + if (!_.isEmpty(addedAuthors)) { + options.change = 'added'; + html = emailGenerator.generateContributorEmail(options); + // Find related user objects and filter out users who have not opted in to email communications + let addedUsers = await UserModel.find({ + id: { $in: addedAuthors }, + }).populate('additionalInfo'); + + await notificationBuilder.triggerNotificationMessage( + addedUsers.map(user => user.id), + `You have been added as a contributor for a Data Access Request to ${publisher} by ${firstname} ${lastname}`, + 'data access request', + accessRecord._id + ); + await emailGenerator.sendEmail( + addedUsers, + constants.hdrukEmail, + `You have been added as a contributor for a Data Access Request to ${publisher} by ${firstname} ${lastname}`, + html, + false + ); + } + // Notifications for removed contributors + if (!_.isEmpty(removedAuthors)) { + options.change = 'removed'; + html = await emailGenerator.generateContributorEmail(options); + // Find related user objects and filter out users who have not opted in to email communications + let removedUsers = await UserModel.find({ + id: { $in: removedAuthors }, + }).populate('additionalInfo'); + + await notificationBuilder.triggerNotificationMessage( + removedUsers.map(user => user.id), + `You have been removed as a contributor from a Data Access Request to ${publisher} by ${firstname} ${lastname}`, + 'data access request unlinked', + accessRecord._id + ); + await emailGenerator.sendEmail( + removedUsers, + constants.hdrukEmail, + `You have been removed as a contributor from a Data Access Request to ${publisher} by ${firstname} ${lastname}`, + html, + false + ); + } + break; + case constants.notificationTypes.STEPOVERRIDE: + // 1. Create reviewer notifications + notificationBuilder.triggerNotificationMessage( + stepReviewerUserIds, + `${firstname} ${lastname} has approved a Data Access Request application phase that you were assigned to review`, + 'data access request', + accessRecord._id + ); + // 2. Create reviewer emails + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + dateSubmitted, + ...context, + }; + html = emailGenerator.generateStepOverrideEmail(options); + emailGenerator.sendEmail( + stepReviewers, + constants.hdrukEmail, + `${firstname} ${lastname} has approved a Data Access Request application phase that you were assigned to review`, + html, + false + ); + break; + case constants.notificationTypes.REVIEWSTEPSTART: + // 1. Create reviewer notifications + notificationBuilder.triggerNotificationMessage( + stepReviewerUserIds, + `You are required to review a new Data Access Request application for ${publisher} by ${moment(currentDeadline).format( + 'D MMM YYYY HH:mm' + )}`, + 'data access request', + accessRecord._id + ); + // 2. Create reviewer emails + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + dateSubmitted, + ...context, + }; + html = emailGenerator.generateNewReviewPhaseEmail(options); + emailGenerator.sendEmail( + stepReviewers, + constants.hdrukEmail, + `You are required to review a new Data Access Request application for ${publisher} by ${moment(currentDeadline).format( + 'D MMM YYYY HH:mm' + )}`, + html, + false + ); + break; + case constants.notificationTypes.FINALDECISIONREQUIRED: + // 1. Get managers for publisher + custodianManagers = teamController.getTeamMembersByRole(accessRecord.publisherObj.team, constants.roleTypes.MANAGER); + managerUserIds = custodianManagers.map(user => user.id); + + // 2. Create manager notifications + notificationBuilder.triggerNotificationMessage( + managerUserIds, + `Action is required as a Data Access Request application for ${publisher} is now awaiting a final decision`, + 'data access request', + accessRecord._id + ); + // 3. Create manager emails + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + dateSubmitted, + ...context, + }; + html = emailGenerator.generateFinalDecisionRequiredEmail(options); + emailGenerator.sendEmail( + custodianManagers, + constants.hdrukEmail, + `Action is required as a Data Access Request application for ${publisher} is now awaiting a final decision`, + html, + false + ); + break; + case constants.notificationTypes.DEADLINEWARNING: + // 1. Create reviewer notifications + await notificationBuilder.triggerNotificationMessage( + remainingReviewerUserIds, + `The deadline is approaching for a Data Access Request application you are reviewing`, + 'data access request', + accessRecord._id + ); + // 2. Create reviewer emails + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + workflowName, + stepName, + reviewSections, + reviewerNames, + nextStepName, + dateDeadline, + }; + html = await emailGenerator.generateReviewDeadlineWarning(options); + await emailGenerator.sendEmail( + remainingReviewers, + constants.hdrukEmail, + `The deadline is approaching for a Data Access Request application you are reviewing`, + html, + false + ); + break; + case constants.notificationTypes.DEADLINEPASSED: + // 1. Get all managers + custodianManagers = teamController.getTeamMembersByRole(accessRecord.publisherObj.team, constants.roleTypes.MANAGER); + managerUserIds = custodianManagers.map(user => user.id); + // 2. Combine managers and reviewers remaining + let deadlinePassedUserIds = [...remainingReviewerUserIds, ...managerUserIds]; + let deadlinePassedUsers = [...remainingReviewers, ...custodianManagers]; + + // 3. Create notifications + await notificationBuilder.triggerNotificationMessage( + deadlinePassedUserIds, + `The deadline for a Data Access Request review phase has now elapsed`, + 'data access request', + accessRecord._id + ); + // 4. Create emails + options = { + id: accessRecord._id, + projectName, + projectId, + datasetTitles, + userName: `${appFirstName} ${appLastName}`, + actioner: `${firstname} ${lastname}`, + applicants, + workflowName, + stepName, + reviewSections, + reviewerNames, + nextStepName, + dateDeadline, + }; + html = await emailGenerator.generateReviewDeadlinePassed(options); + await emailGenerator.sendEmail( + deadlinePassedUsers, + constants.hdrukEmail, + `The deadline for a Data Access Request review phase has now elapsed`, + html, + false + ); + break; + } + }, +}; diff --git a/src/resources/dataset/datasetonboarding.route.js b/src/resources/dataset/datasetonboarding.route.js new file mode 100644 index 00000000..3e95ce75 --- /dev/null +++ b/src/resources/dataset/datasetonboarding.route.js @@ -0,0 +1,41 @@ +import express from 'express'; +import passport from 'passport'; +const router = express.Router(); +const datasetOnboardingController = require('./datasetonboarding.controller'); + +// @route PUT api/v1/dataset-onboarding/checkUniqueTitle +// @desc PUT Update the status of a dataset +// @access Private - Custodian Manager/Reviewer ? +router.get('/checkUniqueTitle', passport.authenticate('jwt'), datasetOnboardingController.checkUniqueTitle); + +// @route GET api/v1/dataset-onboarding/:id +// @desc GET Dataset version based on _id +// @access Private - Custodian Manager/Reviewer ? +router.get('/:id', passport.authenticate('jwt'), datasetOnboardingController.getDatasetVersion); + +// @route GET api/v1/dataset-onboarding/publisher/:publisherID +// @desc GET Datasets for a publisher +// @access Private - Custodian Manager/Reviewer ? +router.get('/publisher/:publisherID', passport.authenticate('jwt'), datasetOnboardingController.getDatasetsByPublisher); + +// @route POST api/v1/dataset-onboarding +// @desc POST Create a new dataset version +// @access Private - Custodian Manager/Reviewer ? +router.post('/', passport.authenticate('jwt'), datasetOnboardingController.createNewDatasetVersion); + +// @route PATCH api/v1/dataset-onboarding/:id +// @desc PATCH Update a field in a dataset +// @access Private - Custodian Manager/Reviewer ? +router.patch('/:id', passport.authenticate('jwt'), datasetOnboardingController.updateDatasetVersionDataElement); + +// @route POST api/v1/dataset-onboarding/:id +// @desc POST Submit a new dataset version +// @access Private - Custodian Manager/Reviewer ? +router.post('/:id', passport.authenticate('jwt'), datasetOnboardingController.submitDatasetVersion); + +// @route PUT api/v1/dataset-onboarding/:id +// @desc PUT Update the status of a dataset +// @access Private - Custodian Manager/Reviewer ? +router.put('/:id', passport.authenticate('jwt'), datasetOnboardingController.changeDatasetVersionStatus); + +module.exports = router; diff --git a/src/resources/dataset/schema.json b/src/resources/dataset/schema.json new file mode 100644 index 00000000..638fc4f4 --- /dev/null +++ b/src/resources/dataset/schema.json @@ -0,0 +1,1670 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://hdruk.github.io/schemata/schema/dataset/latest/dataset.schema.json", + "type": "object", + "title": "HDR UK Dataset Schema", + "description": "HDR UK Dataset Metadata JSONSchema", + "version": "2.0.1", + "required": ["identifier", "version", "revisions", "issued", "modified", "summary", "accessibility"], + "additionalProperties": false, + "properties": { + "identifier": { + "$id": "#/properties/identifier", + "title": "Dataset identifier", + "$comment": "http://purl.org/dc/terms/identifier", + "examples": [ + ["226fb3f1-4471-400a-8c39-2b66d46a39b6", "https://web.www.healthdatagateway.org/dataset/226fb3f1-4471-400a-8c39-2b66d46a39b6"] + ], + "description": "System dataset identifier", + "anyOf": [ + { + "$ref": "#/definitions/uuidv4" + }, + { + "$ref": "#/definitions/url" + } + ] + }, + "version": { + "$id": "#/properties/version", + "title": "Dataset Version", + "description": "Dataset metadata version", + "$ref": "#/definitions/semver", + "examples": ["1.1.0"] + }, + "revisions": { + "$id": "#/properties/revisions", + "title": "Dataset Revisions", + "description": "Revisions of Dataset metadata", + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/revision" + } + ] + } + }, + "issued": { + "$id": "#/properties/issued", + "title": "Creation Date", + "$comment": "dcat:issued", + "description": "Dataset Metadata Creation Date", + "type": "string", + "format": "date-time" + }, + "modified": { + "$id": "#/properties/modified", + "title": "Modification Date", + "$comment": "dcat:modified", + "description": "Dataset Metadata Creation Date", + "type": "string", + "format": "date-time" + }, + "summary": { + "$id": "#/properties/summary", + "title": "Summary", + "description": "Summary metadata must be completed by Data Custodians onboarding metadata into the Innovation Gateway MVP.", + "$ref": "#/definitions/summary" + }, + "documentation": { + "$id": "#/properties/documentation", + "title": "Documentation", + "description": "Documentation can include a rich text description of the dataset or links to media such as documents, images, presentations, videos or links to data dictionaries, profiles or dashboards. Organisations are required to confirm that they have permission to distribute any additional media.", + "$ref": "#/definitions/documentation" + }, + "coverage": { + "$id": "#/properties/coverage", + "title": "Coverage", + "description": "This information includes attributes for geographical and temporal coverage, cohort details etc. to enable a deeper understanding of the dataset content so that researchers can make decisions about the relevance of the underlying data.", + "$ref": "#/definitions/coverage" + }, + "provenance": { + "$id": "#/properties/provenance", + "title": "Provenance", + "description": "Provenance information allows researchers to understand data within the context of its origins and can be an indicator of quality, authenticity and timeliness.", + "$ref": "#/definitions/provenance" + }, + "accessibility": { + "$id": "#/properties/accessibility", + "title": "Accessibility", + "description": "Accessibility information allows researchers to understand access, usage, limitations, formats, standards and linkage or interoperability with toolsets.", + "$ref": "#/definitions/accessibility" + }, + "enrichmentAndLinkage": { + "$id": "#/properties/enrichmentAndLinkage", + "title": "Enrichment and Linkage", + "description": "This section includes information about related datasets that may have previously been linked, as well as indicating if there is the opportunity to link to other datasets in the future. If a dataset has been enriched and/or derivations, scores and existing tools are available this section allows providers to indicate this to researchers.", + "$ref": "#/definitions/enrichmentAndLinkage" + }, + "observations": { + "$id": "#/properties/observations", + "title": "Observations", + "$comment": "https://schema.org/observation", + "description": "Multiple observations about the dataset may be provided and users are expected to provide at least one observation (1..*). We will be supporting the schema.org observation model (https://schema.org/Observation) with default values. Users will be encouraged to provide their own statistical populations as the project progresses. Example: <b> Statistical Population 1 </b> type: StatisticalPopulation populationType: Persons numConstraints: 0 <b> Statistical Population 2 </b> type: StatisticalPopulation populationType: Events numConstraints: 0 <b> Statistical Population 3 </b> type: StatisticalPopulation populationType: Findings numConstraints: 0 typeOf: Observation observedNode: <b> Statistical Population 1 </b> measuredProperty: count measuredValue: 32937 observationDate: “2017”", + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/observation" + } + ] + } + } + }, + "definitions": { + "revision": { + "$id": "#/definitions/revision", + "required": ["version", "url"], + "additionalProperties": false, + "properties": { + "version": { + "$id": "#/definitions/revision/version", + "description": "Semantic Version", + "$ref": "#/definitions/semver" + }, + "url": { + "$id": "#/definitions/revision/url", + "description": "URL endpoint to obtain the version", + "$ref": "#/definitions/url" + } + } + }, + "summary": { + "$id": "#/definitions/summary", + "required": ["title", "abstract", "publisher", "contactPoint", "keywords"], + "additionalProperties": false, + "properties": { + "title": { + "$id": "#/summary/title", + "$comment": "dct:title. title is reserved word in json schema", + "title": "Title", + "description": "Title of the dataset limited to 80 characters. It should provide a short description of the dataset and be unique across the gateway. If your title is not unique, please add a prefix with your organisation name or identifier to differentiate it from other datasets within the Gateway. Please avoid acronyms wherever possible. Good titles should summarise the content of the dataset and if relevant, the region the dataset covers.", + "examples": [["North West London COVID-19 Patient Level Situation Report"]], + "allOf": [ + { + "$ref": "#/definitions/eightyCharacters" + } + ] + }, + "abstract": { + "$id": "#/summary/abstract", + "title": "Dataset Abstract", + "description": "Provide a clear and brief descriptive signpost for researchers who are searching for data that may be relevant to their research. The abstract should allow the reader to determine the scope of the data collection and accurately summarise its content. The optimal length is one paragraph (limited to 255 characters) and effective abstracts should avoid long sentences and abbreviations where possible", + "$comment": "dct:abstract", + "examples": [ + "CPRD Aurum contains primary care data contributed by General Practitioner (GP) practices using EMIS Web® including patient registration information and all care events that GPs have chosen to record as part of their usual medical practice." + ], + "allOf": [ + { + "$ref": "#/definitions/abstractText" + } + ] + }, + "publisher": { + "$id": "#/summary/publisher", + "$comment": "Conforms to spec, but this MAY be an an object of organisation. https://schema.org/publisher", + "title": "Dataset publisher", + "description": "This is the organisation responsible for running or supporting the data access request process, as well as publishing and maintaining the metadata. In most this will be the same as the HDR UK Organisation (Hub or Alliance Member). However, in some cases this will be different i.e. Tissue Directory are an HDR UK Gateway organisation but coordinate activities across a number of data publishers i.e. Cambridge Blood and Stem Cell Biobank.", + "allOf": [ + { + "$ref": "#/definitions/organisation" + } + ] + }, + "contactPoint": { + "$id": "#/summary/contactPoint", + "title": "Contact Point", + "$comment": "dcat:contactPoint", + "default": "Defaulted to the contact point of the primary organisation of the user however, can be overridden for specific datasets", + "examples": ["SAILDatabank@swansea.ac.uk"], + "description": "Please provide a valid email address that can be used to coordinate data access requests with the publisher. Organisations are expected to provide a dedicated email address associated with the data access request process. Notes: An employee’s email address can only be provided on a temporary basis and if one is provided an explicit consent must be obtained for this purpose.", + "allOf": [ + { + "$ref": "#/definitions/emailAddress" + } + ] + }, + "keywords": { + "$id": "#/summary/keywords", + "$comment": "dcat:keyword. May be an array of strings or comma seperated list.", + "title": "Keywords", + "description": "Please provide relevant and specific keywords that can improve the SEO of your dataset as a comma separated list. Notes: Onboarding portal will suggest keywords based on title, abstract and description. We are compiling a standardised list of keywords and synonyms across datasets to make filtering easier for users.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/eightyCharacters" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "alternateIdentifiers": { + "$id": "#/summary/alternateIdentifiers", + "$comment": "DATA-CITE alternate-identifiers used. Note, will support comma separated list for backwards compatibility with other systems", + "title": "Alternate dataset identifiers", + "description": "Alternate dataset identifiers or local identifiers", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/shortDescription" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "doiName": { + "$id": "#/summary/doiName", + "title": "Digital Object Identifier", + "description": "All HDR UK registered datasets should either have a Digital Object Identifier (DOI) or be working towards obtaining one. If a DOI is available, please provide the DOI.", + "allOf": [ + { + "$ref": "#/definitions/doi" + } + ], + "$comment": "Vocabulary: DOI Data Dictionary ", + "examples": ["10.3399/bjgp17X692645"] + } + } + }, + "organisation": { + "$id": "#/definitions/organisation", + "type": "object", + "required": ["name", "contactPoint"], + "title": "Organisation Metadata", + "description": "Describes an organisation for purposes of discovery and identification.", + "properties": { + "identifier": { + "$id": "#/organisation/identifier", + "title": "Organisation Identifier", + "description": "Please provide a Grid.ac identifier (see https://www.grid.ac/institutes) for your organisation. If your organisation does not have a Grid.ac identifier please use the “suggest and institute” function here: https://www.grid.ac/institutes#", + "$comment": "https://schema.org/identifier", + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + }, + "name": { + "$id": "#/organisation/name", + "title": "Organisation Name", + "description": "Name of the organisation", + "$comment": "https://schema.org/name", + "allOf": [ + { + "$ref": "#/definitions/eightyCharacters" + } + ] + }, + "logo": { + "$id": "#/organisation/logo", + "title": "Organisation Logo", + "description": "Please provide a logo associated with the Gateway Organisation using a valid URL. The following formats will be accepted .jpg, .png or .svg.", + "$comment": "https://schema.org/logo", + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + }, + "description": { + "$id": "#/organisation/description", + "title": "Organisation Description", + "description": "Please provide a URL that describes the organisation.", + "$comment": "https://schema.org/description", + "allOf": [ + { + "$ref": "#/definitions/description" + } + ] + }, + "contactPoint": { + "$id": "#/organisation/contactPoint", + "title": "Organisation Contact Point", + "description": "Organisation contact point(s)", + "$comment": "https://schema.org/contactPoint", + "anyOf": [ + { + "$ref": "#/definitions/emailAddress" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/emailAddress" + } + ] + } + } + ] + }, + "memberOf": { + "$id": "#/organisation/memberOf", + "title": "Organisation Membership", + "description": "Please indicate if the organisation is an Alliance Member or a Hub.", + "$comment": "https://schema.org/memberOf", + "allOf": [ + { + "$ref": "#/definitions/memberOf" + } + ] + }, + "accessRights": { + "$id": "#/organisation/accessRights", + "title": "Organisation Default Access Rights", + "$comment": "dct:access_rights", + "description": "The URL of a webpage where the data access request process and/or guidance is provided. If there is more than one access process i.e. industry vs academic please provide both.", + "anyOf": [ + { + "$ref": "#/definitions/url" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/url" + } + } + ] + }, + "deliveryLeadTime": { + "$id": "#/member/deliveryLeadTime", + "title": "Access Request Duration", + "$comment": "https://schema.org/deliveryLeadTime", + "description": "Please provide an indication of the typical processing times based on the types of requests typically received. Note: This value will be used as default access request duration for all datasets submitted by the organisation. However, there will be the opportunity to overwrite this value for each dataset.", + "allOf": [ + { + "$ref": "#/definitions/deliveryLeadTime" + } + ] + }, + "accessService": { + "$id": "#/member/accessService", + "title": "Organisation Access Service", + "$comment": "dcat:accessService", + "examples": ["https://cnfl.extge.co.uk/display/GERE/Research+Environment+User+Guide"], + "description": "Please provide a brief description of the data access services that are available including: environment that is currently available to researchers;additional consultancy and services;any indication of costs associated. If no environment is currently available, please indicate the current plans and timelines when and how data will be made available to researchers Note: This value will be used as default access environment for all datasets submitted by the organisation. However, there will be the opportunity to overwrite this value for each dataset.", + "allOf": [ + { + "$ref": "#/definitions/longDescription" + } + ] + }, + "accessRequestCost": { + "$id": "#/member/accessRequestCost", + "title": "Organisation Access Request Cost", + "$comment": "No standard identified", + "description": "Please provide link(s) to a webpage or a short description detailing the commercial model for processing data access requests for the organisation (if available) Definition: Indication of commercial model or cost (in GBP) for processing each data access request by the data custodian.", + "allOf": [ + { + "$ref": "#/definitions/shortDescription" + } + ] + }, + "dataUseLimitation": { + "$id": "#/member/dataUseLimitation", + "title": "Data Use Limitation", + "$comment": "https://www.ebi.ac.uk/ols/ontologies/duo/terms?iri=http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000001", + "description": "Please provide an indication of consent permissions for datasets and/or materials, and relates to the purposes for which datasets and/or material might be removed, stored or used. Notes: where there are existing data-sharing arrangements such as the HDR UK HUB data sharing agreement or the NIHR HIC data sharing agreement this should be indicated within access rights. This value will be used as terms for all datasets submitted by the organisation. However, there will be the opportunity to overwrite this value for each dataset.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/dataUseLimitation" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "dataUseRequirements": { + "$id": "#/member/dataUseRequirements", + "title": "Data Use Requirements", + "description": "Please indicate fit here are any additional conditions set for use if any, multiple requirements may be provided. Please ensure that these restrictions are documented in access rights information.", + "$comment": "https://www.ebi.ac.uk/ols/ontologies/duo/terms?iri=http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000001", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/dataUseRequirements" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + } + } + }, + "documentation": { + "$id": "#/definitions/member", + "additionalProperties": false, + "properties": { + "description": { + "$id": "#/properties/documentation/description", + "title": "Description", + "$comment": "dc:description, https://schema.org/description", + "description": "A free-text description of the record.", + "allOf": [ + { + "$ref": "#/definitions/description" + } + ] + }, + "associatedMedia": { + "$id": "#/properties/documentation/associatedMedia", + "title": "Associated Media", + "$comment": "https://schema.org/associatedMedia", + "examples": ["PDF Document that describes study protocol"], + "description": "Please provide any media associated with the Gateway Organisation using a valid URI for the content. This is an opportunity to provide additional context that could be useful for researchers wanting to understand more about the dataset and its relevance to their research question. The following formats will be accepted .jpg, .png or .svg, .pdf, .xslx or .docx. Note: media asset can be hosted by the organisation or uploaded using the onboarding portal.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "isPartOf": { + "$id": "#/properties/documentation/isPartOf", + "title": "Group", + "$comment": "https://schema.org/isPartOf NOTE: we may make Groups first class citizens so the are navigable", + "default": "NOT APPLICABLE", + "examples": ["Hospital Episodes Statistics datasets (A&E, APC, OP, AC MSDS)."], + "description": "Please complete only if the dataset is part of a group or family", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/url" + }, + { + "$ref": "#/definitions/eightyCharacters" + }, + { + "type": "string", + "enum": ["NOT APPLICABLE"] + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + } + } + }, + "coverage": { + "$id": "#/definitions/coverage", + "additionalProperties": false, + "properties": { + "spatial": { + "$id": "#/properties/coverage/spatial", + "title": "Geographic Coverage", + "$comment": "dct:spatial", + "description": "The geographical area covered by the dataset. It is recommended that links are to entries in a well-maintained gazetteer such as https://www.geonames.org/ or https://what3words.com/daring.lion.race.", + "examples": ["https://www.geonames.org/2635167/united-kingdom-of-great-britain-and-northern-ireland.html"], + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + }, + "typicalAgeRange": { + "$id": "#/properties/coverage/typicalAgeRange", + "title": "Age Range", + "$comment": "https://schema.org/typicalAgeRange", + "description": "Please indicate the age range in whole years of participants in the dataset. Please provide range in the following format ‘[min age] – [max age]’ where both the minimum and maximum are whole numbers (integers).", + "allOf": [ + { + "$ref": "#/definitions/ageRange" + } + ] + }, + "physicalSampleAvailability": { + "$id": "#/properties/coverage/physicalSampleAvailability", + "title": "Physical Sample Availability", + "$comment": "No standard identified. Used enumeration from the UK Tissue Directory.", + "examples": ["BONE MARROW"], + "description": "Availability of physical samples associated with the dataset. If samples are available, please indicate the types of samples that are available. More than one type may be provided. If sample are not yet available, please provide “AVAILABILITY TO BE CONFIRMED”. If samples are not available, then please provide “NOT AVAILABLE”.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "contains": { + "$ref": "#/definitions/physicalSampleAvailability" + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "followup": { + "$id": "#/properties/coverage/followup", + "title": "Followup", + "$comment": "No standard identified", + "default": "UNKNOWN", + "description": "If known, what is the typical time span that a patient appears in the dataset (follow up period)", + "allOf": [ + { + "$ref": "#/definitions/followup" + } + ] + }, + "pathway": { + "$id": "#/properties/coverage/pathway", + "title": "Pathway", + "$comment": "No standard identified", + "description": "Please indicate if the dataset is representative of the patient pathway and any limitations the dataset may have with respect to pathway coverage. This could include if the dataset is from a single speciality or area, a single tier of care, linked across two tiers (e.g. primary and secondary care), or an integrated care record covering the whole patient pathway.", + "allOf": [ + { + "$ref": "#/definitions/description" + } + ] + } + } + }, + "provenance": { + "$id": "#/definitions/provenance", + "required": ["temporal"], + "additionalProperties": false, + "properties": { + "origin": { + "$id": "#/definitions/provenance/origin", + "allOf": [ + { + "$ref": "#/definitions/origin" + } + ] + }, + "temporal": { + "$id": "#/definitions/provenance/temporal", + "allOf": [ + { + "$ref": "#/definitions/temporal" + } + ] + } + } + }, + "origin": { + "$id": "#/definitions/origin", + "additionalProperties": false, + "properties": { + "purpose": { + "$id": "#/properties/provenance/origin/purpose", + "title": "Purpose", + "$comment": "https://ddialliance.org/Specification/DDI-Lifecycle/3.3/XMLSchema/FieldLevelDocumentation/", + "description": "Pleases indicate the purpose(s) that the dataset was collected.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": [ + { + "$ref": "#/definitions/purpose" + } + ], + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "source": { + "$id": "#/properties/provenance/origin/source", + "title": "Source", + "$comment": "https://dublincore.org/specifications/dublin-core/dcmi-terms/#source", + "description": "Pleases indicate the source of the data extraction", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": [ + { + "$ref": "#/definitions/source" + } + ], + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "collectionSituation": { + "$id": "#/properties/provenance/origin/collectionSituation", + "title": "Setting", + "$comment": "https://ddialliance.org/Specification/DDI-Lifecycle/3.2/XMLSchema/FieldLevelDocumentation/", + "description": "Pleases indicate the setting(s) where data was collected. Multiple settings may be provided", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": [ + { + "$ref": "#/definitions/setting" + } + ], + "uniqueItems": true, + "minItems": 1 + } + ] + } + } + }, + "temporal": { + "$id": "#/definitions/temporal", + "required": ["accrualPeriodicity", "timeLag", "startDate"], + "additionalProperties": false, + "properties": { + "accrualPeriodicity": { + "$id": "#/properties/provenance/temporal/accrualPeriodicity", + "title": "Periodicity", + "$comment": "dct:accrualPeriodicity", + "default": "", + "description": "Please indicate the frequency of distribution release. If a dataset is distributed regularly please choose a distribution release periodicity from the constrained list and indicate the next release date. When the release date becomes historical, a new release date will be calculated based on the publishing periodicity. If a dataset has been published and will remain static please indicate that it is static and indicated when it was released. If a dataset is released on an irregular basis or “on-demand” please indicate that it is Irregular and leave release date as null. If a dataset can be published in real-time or near-real-time please indicate that it is continuous and leave release date as null. Notes: see https://www.dublincore.org/specifications/dublin-core/collection-description/frequency/", + "allOf": [ + { + "$ref": "#/definitions/periodicity" + } + ] + }, + "distributionReleaseDate": { + "$id": "#/properties/provenance/temporal/distributionReleaseDate", + "title": "Release Date", + "$comment": "dcat:distribution_release_date", + "description": "Date of the latest release of the dataset. If this is a regular release i.e. quarterly, or this is a static dataset please complete this alongside Periodicity. If this is Irregular or Continuously released please leave this blank. Notes: Periodicity and release date will be used to determine when the next release is expected. E.g. if the release date is documented as 01/01/2020 and it is now 20/04/2020 and there is a quarterly release schedule, the latest release will be calculated as 01/04/2020.", + "anyOf": [ + { + "type": "string", + "format": "date" + }, + { + "type": "string", + "format": "date-time" + } + ] + }, + "startDate": { + "$id": "#/properties/provenance/temporal/startDate", + "title": "Start Date", + "$comment": "dcat:startDate", + "description": "The start of the time period that the dataset provides coverage for. If there are multiple cohorts in the dataset with varying start dates, please provide the earliest date and use the description or the media attribute to provide more information.", + "anyOf": [ + { + "type": "string", + "format": "date" + }, + { + "type": "string", + "format": "date-time" + } + ] + }, + "endDate": { + "$id": "#/properties/provenance/temporal/endDate", + "title": "End Date", + "$comment": "dcat:endDate", + "description": "The end of the time period that the dataset provides coverage for. If the dataset is “Continuous” and has no known end date, please state continuous. If there are multiple cohorts in the dataset with varying end dates, please provide the latest date and use the description or the media attribute to provide more information.", + "anyOf": [ + { + "type": "string", + "format": "date" + }, + { + "type": "string", + "format": "date-time" + }, + { + "type": "string", + "enum": ["CONTINUOUS"] + } + ] + }, + "timeLag": { + "$id": "#/properties/provenance/temporal/timeLag", + "title": "Time Lag", + "$comment": "No standard identified", + "description": "Please indicate the typical time-lag between an event and the data for that event appearing in the dataset", + "allOf": [ + { + "$ref": "#/definitions/timeLag" + } + ] + } + } + }, + "accessibility": { + "$id": "#/definitions/accessibility", + "required": ["access"], + "additionalProperties": false, + "properties": { + "usage": { + "$id": "#/definitions/accessibility/usage", + "title": "Usage", + "description": "This section includes information about how the data can be used and how it is currently being used", + "$ref": "#/definitions/usage" + }, + "access": { + "$id": "#/definitions/accessibility/access", + "title": "Access", + "description": "This section includes information about data access", + "$ref": "#/definitions/access" + }, + "formatAndStandards": { + "$id": "#/definitions/accessibility/formatAndStandards", + "title": "Format and Standards", + "description": "Section includes technical attributes for language vocabularies, sizes etc. and gives researchers facts about and processing the underlying data in the dataset.", + "$ref": "#/definitions/formatAndStandards" + } + } + }, + "usage": { + "$id": "#/definitions/usage", + "additionalProperties": false, + "properties": { + "dataUseLimitation": { + "$id": "#/properties/accessibility/usage/dataUseLimitation", + "title": "Data Use Limitation", + "$comment": "https://www.ebi.ac.uk/ols/ontologies/duo/terms?iri=http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000001", + "description": "Please provide an indication of consent permissions for datasets and/or materials, and relates to the purposes for which datasets and/or material might be removed, stored or used. NOTE: we have extended the DUO to include a value for NO LINKAGE", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/dataUseLimitation" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "dataUseRequirements": { + "$id": "#/properties/accessibility/usage/dataUseRequirements", + "title": "Data Use Requirements", + "description": "Please indicate fit here are any additional conditions set for use if any, multiple requirements may be provided. Please ensure that these restrictions are documented in access rights information.", + "$comment": "https://www.ebi.ac.uk/ols/ontologies/duo/terms?iri=http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000001", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/dataUseRequirements" + } + ] + }, + "uniqueItems": true, + "minItems": 1 + } + ] + }, + "resourceCreator": { + "$id": "#/properties/accessibility/usage/resourceCreator", + "title": "Citation Requirements", + "$comment": "dct:creator", + "description": "Please provide the text that you would like included as part of any citation that credits this dataset. This is typically just the name of the publisher. No employee details should be provided.", + "allOf": [ + { + "$ref": "#/definitions/shortDescription" + } + ] + }, + "investigations": { + "title": "Investigations", + "$comment": "No standard identified", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + } + } + ] + }, + "isReferencedBy": { + "title": "Citations", + "$comment": "dct:isReferencedBy ", + "description": "Please provide the keystone paper associated with the dataset. Also include a list of known citations, if available and should be links to existing resources where the dataset has been used or referenced. Please provide multiple entries, or if you are using a csv upload please provide them as a tab separated list.", + "anyOf": [ + { + "$ref": "#/definitions/doi" + }, + { + "type": "array", + "items": [ + { + "$ref": "#/definitions/doi" + } + ] + } + ] + } + } + }, + "access": { + "$id": "#/definitions/access", + "required": ["accessRights", "jurisdiction", "dataController"], + "additionalProperties": false, + "properties": { + "accessRights": { + "$id": "#/properties/accessibility/access/accessRights", + "title": "Access Rights", + "$comment": "dct:access_rights NOTE: need to ensure that this is consistent across the organisation info and the dataset info", + "anyOf": [ + { + "type": "string", + "pattern": "^In Progress$" + }, + { + "$ref": "#/definitions/url" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/url" + } + } + ] + }, + "accessService": { + "$id": "#/properties/accessibility/access/accessService", + "title": "Access Service", + "$comment": "dcat:accessService ", + "examples": ["https://cnfl.extge.co.uk/display/GERE/Research+Environment+User+Guide"], + "description": "Please provide a brief description of the data access services that are available including: environment that is currently available to researchers;additional consultancy and services;any indication of costs associated. If no environment is currently available, please indicate the current plans and timelines when and how data will be made available to researchers Note: This value will be used as default access environment for all datasets submitted by the organisation. However, there will be the opportunity to overwrite this value for each dataset.", + "allOf": [ + { + "$ref": "#/definitions/longDescription" + } + ] + }, + "accessRequestCost": { + "$id": "#/properties/accessibility/access/accessRequestCost", + "title": "Organisation Access Request Cost", + "$comment": "No standard identified", + "description": "Please provide link(s) to a webpage detailing the commercial model for processing data access requests for the organisation (if available) Definition: Indication of commercial model or cost (in GBP) for processing each data access request by the data custodian.", + "allOf": [ + { + "$ref": "#/definitions/longDescription" + } + ] + }, + "deliveryLeadTime": { + "$id": "#/properties/accessibility/access/deliveryLeadTime", + "title": "Access Request Duration", + "$comment": "https://schema.org/deliveryLeadTime", + "description": "Please provide an indication of the typical processing times based on the types of requests typically received.", + "allOf": [ + { + "$ref": "#/definitions/deliveryLeadTime" + } + ] + }, + "jurisdiction": { + "$id": "#/properties/accessibility/access/jurisdiction", + "title": "Jurisdiction", + "default": "GB-ENG", + "$comment": "http://purl.org/dc/terms/Jurisdiction FIXME: Add ISO 3166-2 Subdivision code pattern", + "description": "Please use country code from ISO 3166-1 country codes and the associated ISO 3166-2 for regions, cities, states etc. for the country/state under whose laws the data subjects’ data is collected, processed and stored.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/isocountrycode" + } + ] + } + } + ] + }, + "dataController": { + "$id": "#/properties/accessibility/access/dataController", + "title": "Data Controller", + "$comment": " dpv:DataController", + "description": "Data Controller means a person/entity who (either alone or jointly or in common with other persons/entities) determines the purposes for which and the way any Data Subject data, specifically personal data or are to be processed.", + "allOf": [ + { + "$ref": "#/definitions/longDescription" + } + ] + }, + "dataProcessor": { + "$id": "#/properties/accessibility/access/dataProcessor", + "title": "Data Processor", + "$comment": "dpv:DataProcessor", + "description": "A Data Processor, in relation to any Data Subject data, specifically personal data, means any person/entity (other than an employee of the data controller) who processes the data on behalf of the data controller.", + "allOf": [ + { + "$ref": "#/definitions/longDescription" + } + ] + } + } + }, + "formatAndStandards": { + "$id": "#/definitions/formatAndStandards", + "additionalProperties": false, + "required": ["vocabularyEncodingScheme", "conformsTo", "language", "format"], + "properties": { + "vocabularyEncodingScheme": { + "$id": "#/properties/accessibility/formatAndStandards/vocabularyEncodingScheme", + "title": "Controlled Vocabulary", + "$comment": "https://www.dublincore.org/specifications/dublin-core/dcmi-terms/#http://purl.org/dc/dcam/VocabularyEncodingScheme", + "default": "LOCAL", + "description": "List any relevant terminologies / ontologies / controlled vocabularies, such as ICD 10 Codes, NHS Data Dictionary National Codes or SNOMED CT International, that are being used by the dataset. If the controlled vocabularies are local standards, please make that explicit. If you are using a standard that has not been included in the list, please use “other” and contact support desk to ask for an addition. Notes: More than one vocabulary may be provided.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/controlledVocabulary" + } + ] + }, + "minItems": 0 + } + ] + }, + "conformsTo": { + "$id": "#/properties/accessibility/formatAndStandards/conformsTo", + "title": "Conforms To", + "$comment": "dct:conformsTo", + "default": "LOCAL", + "description": "List standardised data models that the dataset has been stored in or transformed to, such as OMOP or FHIR. If the data is only available in a local format, please make that explicit. If you are using a standard that has not been included in the list, please use “other” and contact support desk to ask for an addition.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": [ + { + "$ref": "#/definitions/standardisedDataModels" + } + ] + } + ] + }, + "language": { + "$id": "#/properties/accessibility/formatAndStandards/language", + "title": "Language", + "description": "This should list all the languages in which the dataset metadata and underlying data is made available.", + "default": "en", + "$comment": "dct:language. FIXME: Conforms to spec, but may be a list of strings given cardinality 1:*. Validate against external list of languages. Resources defined by the Library of Congress (ISO 639-1, ISO 639-2) SHOULD be used.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/language" + } + ] + }, + "minItems": 1 + } + ] + }, + "format": { + "$id": "#/properties/accessibility/formatAndStandards/format", + "title": "Format", + "description": "If multiple formats are available please specify. See application, audio, image, message, model, multipart, text, video, https://www.iana.org/assignments/media-types/media-types.xhtml Note: If your file format is not included in the current list of formats, please indicate other. If you are using the HOP you will be directed to a service desk page where you can request your additional format. If not please go to: https://metadata.atlassian.net/servicedesk/customer/portal/4 to request your format.", + "$comment": "http://purl.org/dc/terms/format", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/format" + } + ] + }, + "minItems": 1 + } + ] + } + } + }, + "enrichmentAndLinkage": { + "$id": "#/definitions/enrichmentAndLinkage", + "additionalProperties": false, + "properties": { + "qualifiedRelation": { + "$id": "#/properties/enrichmentAndLinkage/qualifiedRelation", + "title": "Linked Datasets", + "$comment": "dcat:qualifiedRelation", + "description": "If applicable, please provide the DOI of other datasets that have previously been linked to this dataset and their availability. If no DOI is available, please provide the title of the datasets that can be linked, where possible using the same title of a dataset previously onboarded to the HOP. Note: If all the datasets from Gateway organisation can be linked please indicate “ALL” and the onboarding portal will automate linkage across the datasets submitted.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + } + } + ] + }, + "derivation": { + "title": "Derivations", + "$comment": "prov:Derivation", + "description": "Indicate if derived datasets or predefined extracts are available and the type of derivation available. Notes. Single or multiple dimensions can be provided as a derived extract alongside the dataset.", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/abstractText" + } + ] + } + } + ] + }, + "tools": { + "$id": "#/properties/enrichmentAndLinkage/tools", + "title": "Tools", + "$comment": "No standard identified. We encourage users to adopt a model along the lines of https://www.ga4gh.org/news/tool-registry-service-api-enabling-an-interoperable-library-of-genomics-analysis-tools/", + "description": "Please provide the URL of any analysis tools or models that have been created for this dataset and are available for further use. Multiple tools may be provided. Note: We encourage users to adopt a model along the lines of https://www.ga4gh.org/news/tool-registry-service-api-enabling-an-interoperable-library-of-genomics-analysis-tools/", + "anyOf": [ + { + "$ref": "#/definitions/commaSeparatedValues" + }, + { + "type": "array", + "items": { + "allOf": [ + { + "$ref": "#/definitions/url" + } + ] + }, + "minItems": 0 + } + ] + } + } + }, + "observation": { + "$id": "#/definitions/observation", + "required": ["observedNode", "measuredValue", "observationDate", "measuredProperty"], + "additionalProperties": false, + "properties": { + "observedNode": { + "$id": "#/properties/observation/observedNode", + "title": "Statistical Population", + "$comment": "https://schema.org/observedNode", + "examples": ["PERSONS"], + "description": "Please select one of the following statistical populations for you observation", + "allOf": [ + { + "$ref": "#/definitions/statisticalPopulationConstrained" + } + ] + }, + "measuredValue": { + "$id": "#/properties/observation/measuredValue", + "title": "Measured Value", + "$comment": "https://schema.org/measuredValue", + "description": "Please provide the population size associated with the population type the dataset i.e. 1000 people in a study, or 87 images (MRI) of Knee Usage Note: Used with Statistical Population, which specifies the type of the population in the dataset.", + "type": "integer" + }, + "disambiguatingDescription": { + "$id": "#/properties/observation/disambiguatingDescription", + "title": "Disambiguating Description", + "$comment": "https://schema.org/disambiguatingDescription ", + "description": "If SNOMED CT term does not provide sufficient detail, please provide a description that disambiguates the population type.", + "allOf": [ + { + "$ref": "#/definitions/abstractText" + } + ] + }, + "observationDate": { + "$id": "#/properties/observation/observationDate", + "title": "Observation Date", + "$comment": "https://schema.org/observationDate", + "default": "release date", + "description": "Please provide the date that the observation was made. Some datasets may be continuously updated and the number of records will change regularly, so the observation date provides users with the date that the analysis or query was run to generate the particular observation. Multiple observations can be made i.e. an observation of cumulative COVID positive cases by specimen on the 1/1/2021 could be 2M. On the 8/1/2021 a new observation could be 2.1M. Users can add multiple observations.", + "anyOf": [ + { + "type": "string", + "format": "date" + }, + { + "type": "string", + "format": "date-time" + } + ] + }, + "measuredProperty": { + "$id": "#/properties/observation/measuredProperty", + "title": "Measured Property", + "$comment": "https://schema.org/measuredProperty", + "default": "COUNT", + "description": "Initially this will be defaulted to \"COUNT\"", + "allOf": [ + { + "type": "string", + "enum": ["COUNT", "Count", "count"] + } + ] + } + } + }, + "uuidv4": { + "type": "string", + "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", + "minLength": 36, + "maxLength": 36 + }, + "semver": { + "type": "string", + "pattern": "^([0-9]+)\\.([0-9]+)\\.([0-9]+)$" + }, + "url": { + "type": "string", + "format": "uri" + }, + "eightyCharacters": { + "type": "string", + "minLength": 2, + "maxLength": 80 + }, + "abstractText": { + "type": "string", + "minLength": 5, + "maxLength": 255 + }, + "emailAddress": { + "type": "string", + "format": "email" + }, + "shortDescription": { + "type": "string", + "minLength": 2, + "maxLength": 1000 + }, + "description": { + "type": "string", + "minLength": 2, + "maxLength": 3000 + }, + "longDescription": { + "type": "string", + "minLength": 2, + "maxLength": 5000 + }, + "commaSeparatedValues": { + "type": "string", + "pattern": "([^,]+)" + }, + "doi": { + "type": "string", + "pattern": "^10.\\d{4,9}/[-._;()/:a-zA-Z0-9]+$" + }, + "ageRange": { + "type": "string", + "pattern": "(150|1[0-4][0-9]|[0-9]|[1-8][0-9]|9[0-9])-(150|1[0-4][0-9]|[0-9]|[1-8][0-9]|9[0-9])" + }, + "format": { + "$comment": "FIXME: Conforms to spec, but may be a list of strings given cardinality 1:*. Validate against external list of formats, e.g. https://www.iana.org/assignments/media-types/media-types.xhtml", + "type": "string", + "minLength": 1 + }, + "isocountrycode": { + "$comment": "FIXME: Add ISO 3166-2 Subdivision code pattern", + "type": "string", + "pattern": "^[A-Z]{2}(-[A-Z]{2,3})?$" + }, + "memberOf": { + "type": "string", + "enum": ["HUB", "ALLIANCE", "OTHER", "NCS"] + }, + "physicalSampleAvailability": { + "type": "string", + "enum": [ + "NOT AVAILABLE", + "BONE MARROW", + "CANCER CELL LINES", + "CORE BIOPSY", + "CDNA OR MRNA", + "DNA", + "FAECES", + "IMMORTALIZED CELL LINES", + "MICRORNA", + "PERIPHERAL BLOOD CELLS", + "PLASMA", + "PM TISSUE", + "PRIMARY CELLS", + "RNA", + "SALIVA", + "SERUM", + "SWABS", + "TISSUE", + "URINE", + "WHOLE BLOOD", + "AVAILABILITY TO BE CONFIRMED", + "OTHER" + ] + }, + "followup": { + "type": "string", + "enum": ["0 - 6 MONTHS", "6 - 12 MONTHS", "1 - 10 YEARS", "> 10 YEARS", "UNKNOWN", "CONTINUOUS", "OTHER"] + }, + "periodicity": { + "type": "string", + "enum": [ + "STATIC", + "IRREGULAR", + "CONTINUOUS", + "BIENNIAL", + "ANNUAL", + "BIANNUAL", + "QUARTERLY", + "BIMONTHLY", + "MONTHLY", + "BIWEEKLY", + "WEEKLY", + "SEMIWEEKLY", + "DAILY", + "OTHER" + ] + }, + "purpose": { + "type": "string", + "enum": ["STUDY", "DISEASE REGISTRY", "TRIAL", "CARE", "AUDIT", "ADMINISTRATIVE", "FINANCIAL", "STATUTORY", "OTHER"] + }, + "source": { + "type": "string", + "enum": ["EPR", "ELECTRONIC SURVEY", "LIMS", "OTHER INFORMATION SYSTEM", "PAPER BASED", "FREETEXT NLP", "MACHINE GENERATED", "OTHER"] + }, + "setting": { + "type": "string", + "enum": [ + "CLINIC", + "PRIMARY CARE", + "ACCIDENT AND EMERGENCY", + "OUTPATIENTS", + "IN-PATIENTS", + "SERVICES", + "COMMUNITY", + "HOME", + "PRIVATE", + "PHARMACY", + "SOCIAL CARE", + "LOCAL AUTHORITY", + "NATIONAL GOVERNMENT", + "OTHER" + ] + }, + "timeLag": { + "type": "string", + "enum": [ + "LESS 1 WEEK", + "1-2 WEEKS", + "2-4 WEEKS", + "1-2 MONTHS", + "2-6 MONTHS", + "MORE 6 MONTHS", + "VARIABLE", + "NO TIMELAG", + "NOT APPLICABLE", + "OTHER" + ] + }, + "dataUseLimitation": { + "type": "string", + "enum": [ + "GENERAL RESEARCH USE", + "COMMERCIAL RESEARCH USE", + "GENETIC STUDIES ONLY", + "NO GENERAL METHODS RESEARCH", + "NO RESTRICTION", + "GEOGRAPHICAL RESTRICTIONS", + "INSTITUTION SPECIFIC RESTRICTIONS", + "NOT FOR PROFIT USE", + "PROJECT SPECIFIC RESTRICTIONS", + "RESEARCH SPECIFIC RESTRICTIONS", + "USER SPECIFIC RESTRICTION", + "RESEARCH USE ONLY", + "NO LINKAGE" + ] + }, + "dataUseRequirements": { + "type": "string", + "enum": [ + "COLLABORATION REQUIRED", + "PROJECT SPECIFIC RESTRICTIONS", + "ETHICS APPROVAL REQUIRED", + "INSTITUTION SPECIFIC RESTRICTIONS", + "GEOGRAPHICAL RESTRICTIONS", + "PUBLICATION MORATORIUM", + "PUBLICATION REQUIRED", + "RETURN TO DATABASE OR RESOURCE", + "TIME LIMIT ON USE", + "DISCLOSURE CONTROL", + "NOT FOR PROFIT USE", + "USER SPECIFIC RESTRICTION" + ] + }, + "deliveryLeadTime": { + "type": "string", + "enum": ["LESS 1 WEEK", "1-2 WEEKS", "2-4 WEEKS", "1-2 MONTHS", "2-6 MONTHS", "MORE 6 MONTHS", "VARIABLE", "NOT APPLICABLE", "OTHER"] + }, + "standardisedDataModels": { + "type": "string", + "enum": [ + "HL7 FHIR", + "HL7 V2", + "HL7 CDA", + "HL7 CCOW", + "LOINC", + "DICOM", + "I2B2", + "IHE", + "OMOP", + "OPENEHR", + "SENTINEL", + "PCORNET", + "CDISC", + "NHS DATA DICTIONARY", + "NHS SCOTLAND DATA DICTIONARY", + "NHS WALES DATA DICTIONARY", + "LOCAL", + "OTHER" + ] + }, + "controlledVocabulary": { + "type": "string", + "enum": [ + "LOCAL", + "OPCS4", + "READ", + "SNOMED CT", + "SNOMED RT", + "DM PLUS D", + "DM+D", + "NHS NATIONAL CODES", + "NHS SCOTLAND NATIONAL CODES", + "NHS WALES NATIONAL CODES", + "ODS", + "LOINC", + "ICD10", + "ICD10CM", + "ICD10PCS", + "ICD9CM", + "ICD9", + "ICDO3", + "AMT", + "APC", + "ATC", + "CIEL", + "HPO", + "CPT4", + "DPD", + "DRG", + "HEMONC", + "JMDC", + "KCD7", + "MULTUM", + "NAACCR", + "NDC", + "NDFRT", + "OXMIS", + "RXNORM", + "RXNORM EXTENSION", + "SPL", + "OTHER" + ] + }, + "language": { + "type": "string", + "enum": [ + "aa", + "ab", + "ae", + "af", + "ak", + "am", + "an", + "ar", + "as", + "av", + "ay", + "az", + "ba", + "be", + "bg", + "bh", + "bi", + "bm", + "bn", + "bo", + "br", + "bs", + "ca", + "ce", + "ch", + "co", + "cr", + "cs", + "cu", + "cv", + "cy", + "da", + "de", + "dv", + "dz", + "ee", + "el", + "en", + "eo", + "es", + "et", + "eu", + "fa", + "ff", + "fi", + "fj", + "fo", + "fr", + "fy", + "ga", + "gd", + "gl", + "gn", + "gu", + "gv", + "ha", + "he", + "hi", + "ho", + "hr", + "ht", + "hu", + "hy", + "hz", + "ia", + "id", + "ie", + "ig", + "ii", + "ik", + "io", + "is", + "it", + "iu", + "ja", + "jv", + "ka", + "kg", + "ki", + "kj", + "kk", + "kl", + "km", + "kn", + "ko", + "kr", + "ks", + "ku", + "kv", + "kw", + "ky", + "la", + "lb", + "lg", + "li", + "ln", + "lo", + "lt", + "lu", + "lv", + "mg", + "mh", + "mi", + "mk", + "ml", + "mn", + "mr", + "ms", + "mt", + "my", + "na", + "nb", + "nd", + "ne", + "ng", + "nl", + "nn", + "no", + "nr", + "nv", + "ny", + "oc", + "oj", + "om", + "or", + "os", + "pa", + "pi", + "pl", + "ps", + "pt", + "qu", + "rm", + "rn", + "ro", + "ru", + "rw", + "sa", + "sc", + "sd", + "se", + "sg", + "si", + "sk", + "sl", + "sm", + "sn", + "so", + "sq", + "sr", + "ss", + "st", + "su", + "sv", + "sw", + "ta", + "te", + "tg", + "th", + "ti", + "tk", + "tl", + "tn", + "to", + "tr", + "ts", + "tt", + "tw", + "ty", + "ug", + "uk", + "ur", + "uz", + "ve", + "vi", + "vo", + "wa", + "wo", + "xh", + "yi", + "yo", + "za", + "zh", + "zu" + ] + }, + "statisticalPopulationConstrained": { + "type": "string", + "enum": ["PERSONS", "EVENTS", "FINDINGS"] + } + } +} diff --git a/src/resources/dataset/utils/datasetonboarding.util.js b/src/resources/dataset/utils/datasetonboarding.util.js new file mode 100644 index 00000000..d8834a88 --- /dev/null +++ b/src/resources/dataset/utils/datasetonboarding.util.js @@ -0,0 +1,218 @@ +import _ from 'lodash'; +import constants from '../../utilities/constants.util'; +import teamController from '../../team/team.controller'; +import moment from 'moment'; + +const injectQuestionActions = (jsonSchema, userType, applicationStatus, role = '') => { + let formattedSchema = {}; + if (userType === constants.userTypes.CUSTODIAN) { + formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][role][applicationStatus] }; + } else { + //let test = JSON.stringify(constants.userQuestionActions[userType][applicationStatus]); + //questionActions: [{"key":"guidance","icon":"far fa-question-circle","color":"#475da7","toolTip":"Guidance","order":1}] + formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][applicationStatus] }; + } + return formattedSchema; +}; + +const getUserPermissionsForApplication = (application, userId, _id) => { + try { + let authorised = false, + isTeamMember = false, + userType = ''; + // Return default unauthorised with no user type if incorrect params passed + if (!application || !userId || !_id) { + return { authorised, userType }; + } + // Check if the user is a custodian team member and assign permissions if so + if (_.has(application.datasets[0], 'publisher.team')) { + isTeamMember = teamController.checkTeamPermissions('', application.datasets[0].publisher.team, _id); + } else if (_.has(application, 'publisherObj.team')) { + isTeamMember = teamController.checkTeamPermissions('', application.publisherObj.team, _id); + } + if (isTeamMember) { + userType = constants.userTypes.CUSTODIAN; + authorised = true; + } + // If user is not authenticated as a custodian, check if they are an author or the main applicant + if (application.applicationStatus === constants.applicationStatuses.INPROGRESS || _.isEmpty(userType)) { + if (application.authorIds.includes(userId) || application.userId === userId) { + userType = constants.userTypes.APPLICANT; + authorised = true; + } + } + return { authorised, userType }; + } catch (error) { + console.error(error); + return { authorised: false, userType: '' }; + } +}; + +const extractApplicantNames = questionAnswers => { + let fullnames = [], + autoCompleteLookups = { fullname: ['email'] }; + // spread questionAnswers to new var + let qa = { ...questionAnswers }; + // get object keys of questionAnswers + let keys = Object.keys(qa); + // loop questionAnswer keys + for (const key of keys) { + // get value of key + let value = qa[key]; + // split the key up for unique purposes + let [qId] = key.split('_'); + // check if key in lookup + let lookup = autoCompleteLookups[`${qId}`]; + // if key exists and it has an object do relevant data setting + if (typeof lookup !== 'undefined' && typeof value === 'object') { + switch (qId) { + case 'fullname': + fullnames.push(value.name); + break; + } + } + } + return fullnames; +}; + +const findQuestion = (questionsArr, questionId) => { + // 1. Define child object to allow recursive calls + let child; + // 2. Exit from function if no children are present + if (!questionsArr) return {}; + // 3. Iterate through questions in the current level to locate question by Id + for (const questionObj of questionsArr) { + // 4. Return the question if it is located + if (questionObj.questionId === questionId) return questionObj; + // 5. Recursively call the find question function on child elements to find question Id + if (typeof questionObj.input === 'object' && typeof questionObj.input.options !== 'undefined') { + questionObj.input.options + .filter(option => { + return typeof option.conditionalQuestions !== 'undefined' && option.conditionalQuestions.length > 0; + }) + .forEach(option => { + if(!child) { + child = findQuestion(option.conditionalQuestions, questionId); + } + }); + } + // 6. Return the child question + if (child) return child; + } +}; + +const updateQuestion = (questionsArr, question) => { + // 1. Extract question Id + let { questionId } = question; + let found = false; + // 2. Recursive function to iterate through each level of questions + questionsArr.forEach(function iter(currentQuestion, index, currentArray) { + // 3. Prevent unnecessary computation by exiting loop if question was found + if (found) { + return; + } + // 4. If the current question matches the target question, replace with updated question + if (currentQuestion.questionId === questionId) { + currentArray[index] = { ...question }; + found = true; + return; + } + // 5. If target question has not been identified, recall function with child questions + if (_.has(currentQuestion, 'input.options')) { + currentQuestion.input.options.forEach(option => { + if (_.has(option, 'conditionalQuestions')) { + Array.isArray(option.conditionalQuestions) && option.conditionalQuestions.forEach(iter); + } + }); + } + }); + // 6. Return the updated question array + return questionsArr; +}; + +const setQuestionState = (question, questionAlert, readOnly) => { + // 1. Find input object for question + const { input = {} } = question; + // 2. Assemble question in readOnly true/false mode + question = { + ...question, + input: { + ...input, + questionAlert, + readOnly, + }, + }; + // 3. Recursively set readOnly mode for children + if (_.has(question, 'input.options')) { + question.input.options.forEach(function iter(currentQuestion) { + // 4. If current question contains an input, set readOnly mode + if (_.has(currentQuestion, 'input')) { + currentQuestion.input.readOnly = readOnly; + } + // 5. Recall the iteration with each child question + if (_.has(currentQuestion, 'conditionalQuestions')) { + currentQuestion.conditionalQuestions.forEach(option => { + if (_.has(option, 'input.options')) { + Array.isArray(option.input.options) && option.input.options.forEach(iter); + } else { + option.input.readOnly = readOnly; + } + }); + } + }); + } + return question; +}; + +const buildQuestionAlert = (userType, iterationStatus, completed, amendment, user, publisher) => { + // 1. Use a try catch to prevent conditions where the combination of params lead to no question alert required + try { + // 2. Static mapping allows us to determine correct flag to show based on scenario (params) + const questionAlert = { + ...constants.navigationFlags[userType][iterationStatus][completed], + }; + // 3. Extract data from amendment + let { requestedBy, updatedBy, dateRequested, dateUpdated } = amendment; + // 4. Update audit fields to 'you' if the action was performed by the current user + requestedBy = matchCurrentUser(user, requestedBy); + updatedBy = matchCurrentUser(user, updatedBy); + // 5. Update the generic question alerts to match the scenario + let relevantActioner = !_.isNil(updatedBy) ? updatedBy : userType === constants.userTypes.CUSTODIAN ? requestedBy : publisher; + questionAlert.text = questionAlert.text.replace( + '#NAME#', + relevantActioner + ); + questionAlert.text = questionAlert.text.replace( + '#DATE#', + userType === !_.isNil(dateUpdated) + ? moment(dateUpdated).format('Do MMM YYYY') + : moment(dateRequested).format('Do MMM YYYY') + ); + // 6. Return the built question alert + return questionAlert; + } catch (err) { + return {}; + } +}; + +const matchCurrentUser = (user, auditField) => { + // 1. Extract the name of the current user + const { firstname, lastname } = user; + // 2. Compare current user to audit field supplied e.g. 'updated by' + if (auditField === `${firstname} ${lastname}`) { + // 3. Update audit field value to 'you' if name matches current user + return 'You'; + } + // 4. Return updated audit field + return auditField; +}; + +export default { + injectQuestionActions: injectQuestionActions, + getUserPermissionsForApplication: getUserPermissionsForApplication, + extractApplicantNames: extractApplicantNames, + findQuestion: findQuestion, + updateQuestion: updateQuestion, + buildQuestionAlert: buildQuestionAlert, + setQuestionState: setQuestionState, +}; diff --git a/src/resources/dataset/v1/dataset.route.js b/src/resources/dataset/v1/dataset.route.js index 0f04cf63..5a6b639e 100644 --- a/src/resources/dataset/v1/dataset.route.js +++ b/src/resources/dataset/v1/dataset.route.js @@ -2,9 +2,10 @@ import express from 'express'; import { Data } from '../../tool/data.model'; import { loadDataset, loadDatasets } from './dataset.service'; import { getAllTools } from '../../tool/data.repository'; -import _ from 'lodash'; +import { isEmpty, isNil } from 'lodash'; import escape from 'escape-html'; import { Course } from '../../course/course.model'; +import { filtersService } from '../../filters/dependency'; import * as Sentry from '@sentry/node'; const router = express.Router(); const rateLimit = require('express-rate-limit'); @@ -33,7 +34,10 @@ router.post('/', async (req, res) => { throw new Error('cache error test'); } - loadDatasets(parsedBody.override || false); + loadDatasets(parsedBody.override || false).then(() => { + filtersService.optimiseFilters('dataset'); + }); + return res.status(200).json({ success: true, message: 'Caching started' }); } catch (err) { Sentry.captureException(err); @@ -69,7 +73,7 @@ router.get('/pidList/', datasetLimiter, async (req, res) => { // @access Public router.get('/:datasetID', async (req, res) => { let { datasetID = '' } = req.params; - if (_.isEmpty(datasetID)) { + if (isEmpty(datasetID)) { return res.status(400).json({ success: false }); } @@ -80,17 +84,17 @@ router.get('/:datasetID', async (req, res) => { let dataVersion = await Data.findOne({ datasetid: datasetID }); // if found then set the datasetID to the pid of the found dataset - if (!_.isNil(dataVersion)) { + if (!isNil(dataVersion)) { datasetID = dataVersion.pid; } // find the active dataset using the pid let dataset = await Data.findOne({ pid: datasetID, activeflag: 'active' }); - if (_.isNil(dataset)) { + if (isNil(dataset)) { // if no active version found look for the next latest version using the pid and set the isDatasetArchived flag to true dataset = await Data.findOne({ pid: datasetID, activeflag: 'archive' }).sort({ createdAt: -1 }); - if (_.isNil(dataset)) { + if (isNil(dataset)) { try { // if still not found then look up the MDC for the dataset dataset = await loadDataset(datasetID); @@ -160,6 +164,31 @@ router.get('/:datasetID', async (req, res) => { }); }); + //Check for datasetv2.enrichmentAndLinkage.qualifiedRelation + if (!isEmpty(dataset.datasetv2)) { + let qualifiedRelation = dataset.datasetv2.enrichmentAndLinkage.qualifiedRelation; + let newListofQualifiedRelation = []; + for (const relation of qualifiedRelation) { + if (relation.toLowerCase() === 'all') { + let relatedDatasets = await Data.find( + { + 'datasetfields.publisher': dataset.datasetfields.publisher, + activeflag: 'active', + }, + { name: 1 } + ).lean(); + + for (const datasets of relatedDatasets) { + newListofQualifiedRelation.push(datasets.name); + } + //Paul - Future, will need to update to use publisherID if ever moving dataset to its own collection + } + } + + const qualifiedRelationFiltered = qualifiedRelation.filter(relation => relation.toLowerCase() !== 'all'); + dataset.datasetv2.enrichmentAndLinkage.qualifiedRelation = [...qualifiedRelationFiltered, ...newListofQualifiedRelation]; + } + return res.json({ success: true, isLatestVersion, isDatasetArchived, data: dataset }); }); diff --git a/src/resources/dataset/v1/dataset.service.js b/src/resources/dataset/v1/dataset.service.js index 76358d45..199dff05 100644 --- a/src/resources/dataset/v1/dataset.service.js +++ b/src/resources/dataset/v1/dataset.service.js @@ -3,6 +3,7 @@ import { MetricsData } from '../../stats/metrics.model'; import axios from 'axios'; import * as Sentry from '@sentry/node'; import { v4 as uuidv4 } from 'uuid'; +import { PublisherModel } from '../../publisher/publisher.model'; export async function loadDataset(datasetID) { var metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; @@ -197,7 +198,7 @@ export async function loadDataset(datasetID) { } export async function loadDatasets(override) { - console.error('Starting run at ' + Date()); + console.log('Starting run at ' + Date()); let metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; let datasetsMDCCount = await new Promise(function (resolve, reject) { @@ -224,9 +225,13 @@ export async function loadDatasets(override) { if (datasetsMDCCount === 'Update failed') return; - //Compare counts from HDR and MDC, if greater drop of 10%+ then stop process and email support queue + // Compare counts from HDR and MDC, if greater drop of 10%+ then stop process and email support queue var datasetsHDRCount = await Data.countDocuments({ type: 'dataset', activeflag: 'active' }); + // Get active custodians on HDR Gateway + const publishers = await PublisherModel.find().select('name').lean(); + const onboardedCustodians = publishers.map(publisher => publisher.name); + if ((datasetsMDCCount / datasetsHDRCount) * 100 < 90 && !override) { Sentry.addBreadcrumb({ category: 'Caching', @@ -239,7 +244,7 @@ export async function loadDatasets(override) { return; } - //datasetsMDCCount = 10; //For testing to limit the number brought down + //datasetsMDCCount = 1; //For testing to limit the number brought down var datasetsMDCList = await new Promise(function (resolve, reject) { axios @@ -275,7 +280,7 @@ export async function loadDatasets(override) { level: Sentry.Severity.Error, }); Sentry.captureException(err); - console.error("Unable to get metadata quality value " + err.message); + console.error('Unable to get metadata quality value ' + err.message); }); const phenotypesList = await axios @@ -287,7 +292,7 @@ export async function loadDatasets(override) { level: Sentry.Severity.Error, }); Sentry.captureException(err); - console.error("Unable to get metadata quality value " + err.message); + console.error('Unable to get metadata quality value ' + err.message); }); const dataUtilityList = await axios @@ -299,7 +304,7 @@ export async function loadDatasets(override) { level: Sentry.Severity.Error, }); Sentry.captureException(err); - console.error("Unable to get data utility " + err.message); + console.error('Unable to get data utility ' + err.message); }); var datasetsMDCIDs = []; @@ -422,6 +427,10 @@ export async function loadDatasets(override) { let datasetv2Object = populateV2datasetObject(datasetV2.data.items); + // Detect if dataset uses 5 Safes form for access + const is5Safes = onboardedCustodians.includes(datasetMDC.publisher); + const hasTechnicalDetails = technicaldetails.length > 0; + if (datasetHDR) { //Edit if (!datasetHDR.pid) { @@ -454,7 +463,7 @@ export async function loadDatasets(override) { let keywordArray = splitString(datasetMDC.keywords); let physicalSampleAvailabilityArray = splitString(datasetMDC.physicalSampleAvailability); let geographicCoverageArray = splitString(datasetMDC.geographicCoverage); - + // Update dataset await Data.findOneAndUpdate( { datasetid: datasetMDC.id }, { @@ -463,6 +472,8 @@ export async function loadDatasets(override) { name: datasetMDC.title, description: datasetMDC.description, source: 'HDRUK MDC', + is5Safes, + hasTechnicalDetails, activeflag: 'active', license: datasetMDC.license, tags: { @@ -545,6 +556,8 @@ export async function loadDatasets(override) { data.type = 'dataset'; data.activeflag = 'active'; data.source = 'HDRUK MDC'; + data.is5Safes = is5Safes; + data.hasTechnicalDetails = hasTechnicalDetails; data.name = datasetMDC.title; data.description = datasetMDC.description; @@ -610,7 +623,6 @@ export async function loadDatasets(override) { ); saveUptime(); - console.log('Update Completed at ' + Date()); return; } diff --git a/src/resources/filters/dependency.js b/src/resources/filters/dependency.js new file mode 100644 index 00000000..76db019a --- /dev/null +++ b/src/resources/filters/dependency.js @@ -0,0 +1,8 @@ +import FiltersRepository from './filters.repository'; +import FiltersService from './filters.service'; +import DatasetRepository from '../dataset/dataset.repository'; + +const datasetRepository = new DatasetRepository(); + +export const filtersRepository = new FiltersRepository(); +export const filtersService = new FiltersService(filtersRepository, datasetRepository); diff --git a/src/resources/filters/filters.controller.js b/src/resources/filters/filters.controller.js new file mode 100644 index 00000000..59c1e888 --- /dev/null +++ b/src/resources/filters/filters.controller.js @@ -0,0 +1,43 @@ +import Controller from '../base/controller'; + +export default class FiltersController extends Controller { + constructor(filtersService) { + super(filtersService); + this.filtersService = filtersService; + } + + async getFilters(req, res) { + try { + // Extract id parameter from query string + const { id } = req.params; + // If no id provided, it is a bad request + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a filters identifier', + }); + } + // Find the filters + let filters = await this.filtersService.getFilters(id, req.query); + // Return if no filters found + if (!filters) { + return res.status(404).json({ + success: false, + message: 'A filter could not be found with the provided id', + }); + } + // Return the filters + return res.status(200).json({ + success: true, + data: filters, + }); + } catch (err) { + // Return error response if something goes wrong + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'A server error occurred, please try again', + }); + } + } +} diff --git a/src/resources/filters/filters.entity.js b/src/resources/filters/filters.entity.js new file mode 100644 index 00000000..37cc3ec8 --- /dev/null +++ b/src/resources/filters/filters.entity.js @@ -0,0 +1,43 @@ +import Entity from '../base/entity'; +import * as mapper from './filters.mapper'; +import { isEmpty, isNil } from 'lodash'; +import { findNodeInTree, formatFilterOptions, updateTree } from './utils/filters.util'; + +export default class FiltersClass extends Entity { + constructor(obj) { + super(); + Object.assign(this, obj); + } + + mapDto() { + if (!this.id) { + console.error('Failed to load filters'); + return; + } + // 1. the data tree we want to update + let filters = mapper[`${this.id}Filters`]; + // 2. this.keys reperesents the filters data in db for the id + const filterKeys = Object.keys(this.keys); + // 3. avoid expensive call if no data present + if(!isEmpty(this.keys)) { + // 4. loop over filterKeys + for (const filterKey of filterKeys) { + let newFilterOptions = []; + // 5. track new variable for filter values from our db + let filterValues = this.keys[filterKey]; + // 6. check if filterKey exists in our tree, return {} or undefined + let nodeItem = findNodeInTree(filters, filterKey); + // 7. if exists find and update tree + if (!isNil(nodeItem) && filterValues.length) { + // 8. build the new options for the filters within tree + newFilterOptions = formatFilterOptions(filterValues); + // 9. insert new options into tree + filters = updateTree(filters, filterKey, newFilterOptions); + } + } + } + return filters; + } +} + + diff --git a/src/resources/filters/filters.mapper.js b/src/resources/filters/filters.mapper.js new file mode 100644 index 00000000..b1ffb1ad --- /dev/null +++ b/src/resources/filters/filters.mapper.js @@ -0,0 +1,502 @@ +export const datasetFilters = [ + { + id: 1, + label: 'Publisher', + key: 'publisher', + dataPath: 'datasetfields.publisher', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 2, + label: 'Keywords', + key: 'features', + alias: 'datasetfeatures', + dataPath: 'tags.features', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 3, + label: 'Phenotype', + key: 'phenotypes', + dataPath: 'datasetfields.phenotypes', + type: 'elementMatch', + matchField: 'name', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 4, + label: 'Coverage', + key: 'coverage', + dataPath: 'datasetv2.coverage', + tooltip: 'The geographical area covered by the dataset.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 5, + label: 'Spatial', + key: 'spatial', + dataPath: 'datasetv2.coverage.spatial', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: ['England'], + }, + { + id: 6, + label: 'Physical sample availability', + key: 'physicalSampleAvailability', + dataPath: 'datasetv2.coverage.physicalSampleAvailability', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 7, + label: 'Follow up', + key: 'followup', + dataPath: 'datasetv2.coverage.followup', + type: 'contains', + tooltip: 'The typical time span that a patient appears in the dataset', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 8, + label: 'Provenance', + key: 'provenancev2', + dataPath: 'datasetv2.provenance', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 9, + label: 'Purpose', + key: 'purpose', + dataPath: 'datasetv2.provenance.origin.purpose', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 10, + label: 'Source', + key: 'source', + dataPath: 'datasetv2.provenance.origin.source', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 11, + label: 'Collection situation', + key: 'collectionSituation', + dataPath: 'datasetv2.provenance.origin.collectionSituation', + type: 'contains', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 12, + label: 'Accrual periodicity', + key: 'accrualPeriodicity', + dataPath: 'datasetv2.provenance.temporal.accrualPeriodicity', + type: 'contains', + tooltip: 'The frequency of publishing.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 13, + label: 'Time lag ', + key: 'timeLag', + dataPath: 'datasetv2.provenance.temporal.timeLag', + type: 'contains', + tooltip: 'The typical time-lag between an event and the data for that event appearing in the dataset.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 14, + label: 'Access', + key: 'accessibility', + dataPath: 'datasetv2.accessibility', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 15, + label: 'Delivery lead time', + key: 'deliveryLeadTime', + dataPath: 'datasetv2.accessibility.access.deliveryLeadTime', + type: 'contains', + tooltip: 'Please provide an indication of the typical processing times based on the types of requests typically received.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 16, + label: 'Jurisdiction', + key: 'jurisdiction', + dataPath: 'datasetv2.accessibility.access.jurisdiction', + type: 'contains', + tooltip: + 'Select the country/state under whose laws the data subjects’ data is collected, processed and stored. Select all that apply.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 17, + label: 'Format and standards', + key: 'formatAndStandards', + dataPath: 'datasetv2.accessibility.formatAndStandards', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 18, + label: 'Vocabulary encoding scheme', + key: 'vocabularyEncodingScheme', + dataPath: 'datasetv2.accessibility.formatAndStandards.vocabularyEncodingScheme', + type: 'contains', + tooltip: 'Terminologies, ontologies and controlled vocabularies being used by the dataset.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 19, + label: 'Conforms to', + key: 'conformsTo', + dataPath: 'datasetv2.accessibility.formatAndStandards.conformsTo', + type: 'contains', + tooltip: 'Standardised data models that the dataset has been stored in or transformed to.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 20, + label: 'Language', + key: 'language', + dataPath: 'datasetv2.accessibility.formatAndStandards.language', + type: 'contains', + tooltip: 'Standardised data models that the dataset has been stored in or transformed to.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 21, + label: 'Data utility', + key: 'datautility', + dataPath: 'datasetfields.datautility', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 22, + label: 'Documentation', + key: 'documentation', + dataPath: 'datasetfields.datautility.documentation', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 23, + label: 'Additional documentation and support', + key: 'availability_of_additional_documentation_and_support', + dataPath: 'datasetfields.datautility.availability_of_additional_documentation_and_support', + type: 'contains', + tooltip: 'Available dataset documentation in addition to the data dictionary.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 24, + label: 'Data model', + key: 'data_model', + dataPath: 'datasetfields.datautility.data_model', + type: 'contains', + tooltip: 'Availability of clear, documented data model.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 25, + label: 'Data dictionary', + key: 'data_dictionary', + dataPath: 'datasetfields.datautility.data_dictionary', + type: 'contains', + tooltip: 'Provided documented data dictionary and terminologies.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 26, + label: 'Provenance', + key: 'provenance', + dataPath: 'datasetfields.datautility.provenance', + type: 'contains', + tooltip: 'Clear descriptions of source and history of the dataset, providing a ‘transparent data pipeline’.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 27, + label: 'Technical quality', + key: 'technicalquality', + dataPath: 'datasetfields.datautility.technicalquality', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 28, + label: 'Data Quality Management Process', + key: 'data_quality_management_process', + dataPath: 'datasetfields.datautility.data_quality_management_process', + type: 'contains', + tooltip: 'Available dataset documentation in addition to the data dictionary.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 29, + label: 'Access and provision', + key: 'accessandprovision', + dataPath: 'datasetfields.datautility.accessandprovision', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 30, + label: 'Allowable uses', + key: 'allowable_uses', + dataPath: 'datasetfields.datautility.allowable_uses', + type: 'contains', + tooltip: 'Allowable dataset usages as per the licencing agreement.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 31, + label: 'Time lag', + key: 'time_lag', + dataPath: 'datasetfields.datautility.time_lag', + type: 'contains', + tooltip: 'Lag between the data being collected and added to the dataset.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 32, + label: 'Timeliness', + key: 'timeliness', + dataPath: 'datasetfields.datautility.timeliness', + type: 'contains', + tooltip: 'Average data access request timeframe.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 33, + label: 'Value and interest', + key: 'valueandinterest', + dataPath: 'datasetfields.datautility.valueandinterest', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 34, + label: 'Linkages', + key: 'linkages', + dataPath: 'datasetfields.datautility.linkages', + type: 'contains', + tooltip: 'Ability to link with other datasets.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 35, + label: 'Data Enrichments', + key: 'data_enrichments', + dataPath: 'datasetfields.datautility.data_enrichments', + type: 'contains', + tooltip: 'Data sources enriched with annotations, image labels, phenomes, derivations, NLP derived data labels.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + { + id: 36, + label: 'Coverage', + key: 'dataUtility.coverage', + dataPath: 'datasetfields.datautility.coverage', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [ + { + id: 37, + label: 'Pathway coverage', + key: 'pathway_coverage', + dataPath: 'datasetfields.datautility.pathway_coverage', + type: 'contains', + tooltip: 'Representation of multi-disciplinary healthcare data.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + { + id: 38, + label: 'Length of follow up', + key: 'length_of_follow_up', + dataPath: 'datasetfields.datautility.length_of_follow_up', + type: 'contains', + tooltip: 'Data sources enriched with annotations, image labels, phenomes, derivations, NLP derived data labels.', + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [], + highlighted: [], + }, + ], + }, + ], + }, + { + id: 39, + label: 'Technical Metadata', + key: 'technicaldetails', + dataPath: 'hasTechnicalDetails', + type: 'boolean', + tooltip: null, + closed: true, + isSearchable: false, + selectedCount: 0, + filters: [{ id: 999, label: 'Contains Technical Metadata', value: 'Contains Technical Metadata', checked: false }], + highlighted: ['contains technical metadata'], + }, +]; diff --git a/src/resources/filters/filters.model.js b/src/resources/filters/filters.model.js new file mode 100644 index 00000000..488ca9c1 --- /dev/null +++ b/src/resources/filters/filters.model.js @@ -0,0 +1,19 @@ +import { model, Schema } from 'mongoose'; + +import FiltersClass from './filters.entity'; + +const filtersSchema = new Schema( + { + id: String, + keys: { type: Schema.Types.Mixed, default: {} }, + }, + { + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); + +// Load entity class +filtersSchema.loadClass(FiltersClass); + +export const Filters = model('Filters', filtersSchema); diff --git a/src/resources/filters/filters.repository.js b/src/resources/filters/filters.repository.js new file mode 100644 index 00000000..c241258e --- /dev/null +++ b/src/resources/filters/filters.repository.js @@ -0,0 +1,22 @@ +import Repository from '../base/repository'; +import { Filters } from './filters.model'; + +export default class FiltersRepository extends Repository { + constructor() { + super(Filters); + this.filters = Filters; + } + + async getFilters(id, query = {}, options) { + query = { ...query, id }; + return this.findOne(query, options); + } + + async updateFilterSet(filters, type) { + await Filters.findOneAndUpdate({ id: type }, { keys: filters }, { upsert: true }, (err) => { + if(err) { + console.error(err.message); + } + }); + } +} diff --git a/src/resources/filters/filters.route.js b/src/resources/filters/filters.route.js new file mode 100644 index 00000000..3560bf40 --- /dev/null +++ b/src/resources/filters/filters.route.js @@ -0,0 +1,13 @@ +import express from 'express'; +import FiltersController from './filters.controller'; +import { filtersService } from './dependency'; + +const router = express.Router(); +const filtersController = new FiltersController(filtersService); + +// @route GET /api/v2/filters/id +// @desc Returns a filter selection based on filter ID provided +// @access Public +router.get('/:id', (req, res) => filtersController.getFilters(req, res)); + +module.exports = router; \ No newline at end of file diff --git a/src/resources/filters/filters.service.js b/src/resources/filters/filters.service.js new file mode 100644 index 00000000..26cdc10a --- /dev/null +++ b/src/resources/filters/filters.service.js @@ -0,0 +1,126 @@ +import { isArray, isEmpty, isNil, uniq } from 'lodash'; +import helper from '../utilities/helper.util'; + +export default class FiltersService { + constructor(filtersRepository, datasetRepository) { + this.filtersRepository = filtersRepository; + this.datasetRepository = datasetRepository; + } + + async getFilters(id, query = {}) { + // 1. Get filters from repository for the entity type and query provided + const options = { lean: false }; + let filters = await this.filtersRepository.getFilters(id, query, options); + if(filters) { + filters = filters.mapDto(); + } + return filters; + } + + async optimiseFilters(type) { + // 1. Build filters from type using entire Db collection + const filters = await this.buildFilters(type, { activeflag: 'active' }); + // 2. Save updated filter values to filter cache + //await this.saveFilters(filters, type); + await this.filtersRepository.updateFilterSet(filters, type); + } + + async buildFilters(type, query = {}, useCache = false) { + // 1. Use cached filters if instructed, need to remove type when all v2 filters come on + if (useCache && type === 'dataset') { + const options = { lean: true }; + const { keys: filters = {} } = await this.filtersRepository.getFilters(type, {}, options) || {}; + return filters; + } + + let filters = {}, + sortedFilters = {}, + entities = [], + fields = ''; + + // 2. Query Db for required entity if array of entities has not been passed + switch (type) { + case 'dataset': + // Get minimal payload to build filters + fields = `hasTechnicalDetails, + tags.features, + datasetfields.datautility,datasetfields.publisher,datasetfields.phenotypes, + datasetv2.coverage,datasetv2.provenance.origin,datasetv2.provenance.temporal,datasetv2.accessibility.access,datasetv2.accessibility.formatAndStandards`; + entities = await this.datasetRepository.getDatasets({ ...query, fields }, { lean: true }); + break; + } + // 3. Loop over each entity + entities.forEach(entity => { + // 4. Get the filter values provided by each entity + const filterValues = this.getFilterValues(entity, type); + // 5. Iterate through each filter value/property + for (const key in filterValues) { + let values = []; + // 6. Normalise string and array data by maintaining only arrays in 'values' + if (isArray(filterValues[key])) { + if (!isEmpty(filterValues[key]) && !isNil(filterValues[key])) { + values = filterValues[key].filter(value => !isEmpty(value.toString().trim())); + } + } else { + if (!isEmpty(filterValues[key]) && !isNil(filterValues[key])) { + values = [filterValues[key]]; + } + } + // 7. Populate running filters with all values + if (!filters[key]) { + filters[key] = [...values]; + } else { + filters[key] = [...filters[key], ...values]; + } + } + }); + // 8. Iterate through each filter + Object.keys(filters).forEach(filterKey => { + // 9. Set filter values to title case and remove white space + filters[filterKey] = filters[filterKey].map(value => helper.toTitleCase(value.toString().trim())); + // 10. Distinct filter values + const distinctFilter = uniq(filters[filterKey]); + // 11. Sort filter values and update final object + sortedFilters[filterKey] = distinctFilter.sort(function (a, b) { + return a.toString().toLowerCase().localeCompare(b.toString().toLowerCase()); + }); + }); + return sortedFilters; + } + + getFilterValues(entity, type) { + let filterValues = {}; + // 1. Switch between entity type for varying filters + switch (type) { + case 'dataset': + // 2. Extract all properties used for filtering + if (isEmpty(entity.datasetv2)) { + delete entity.datasetv2; + } + const { + tags: { features = [] } = {}, + datasetfields: { datautility = {}, publisher = '', phenotypes = [] } = {}, + datasetv2: { + coverage = {}, + provenance: { origin = {}, temporal = {} } = {}, + accessibility: { access = {}, formatAndStandards = {} }, + } = { coverage: {}, provenance: {}, accessibility: {} }, + } = entity; + // 3. Create flattened filter props object + filterValues = { + publisher, + phenotypes: [...phenotypes.map(phenotype => phenotype.name)], + features, + ...datautility, + ...coverage, + ...origin, + ...temporal, + ...access, + ...formatAndStandards, + }; + break; + } + // 4. Return filter values + return filterValues; + } +} diff --git a/src/resources/filters/utils/filters.util.js b/src/resources/filters/utils/filters.util.js new file mode 100644 index 00000000..5991b120 --- /dev/null +++ b/src/resources/filters/utils/filters.util.js @@ -0,0 +1,51 @@ +import { v4 as uuidv4 } from 'uuid'; + +export const findNodeInTree = (tree, key) => { + // 1. find if key matches //datasetFeatures + let found = tree.find(node => node.alias === key || node.key === key); + // 2. if not found do while + if (!found) { + let i = 0; + // 3. make sure current tree loop has a length + while(!found && i < tree.length) { + // 4. check current iteration has filters to avoid expense recursive call + if (tree[i].filters && tree[i].filters.length) { + // 5. assign recursive call to found + found = findNodeInTree(tree[i].filters, key); + } + // 6. increment count of i + i++; + } + } + // 7. return node || recursive call + return found; +}; + +export const updateTree = (tree, key, values) => { + // 1. declare iter + let iter = () => {}; + // 2. loop tree with callback + tree.forEach(iter = (node) => { + // 3. if found update filters + if (node.key === key) { + // 5. set filter values + node.filters = values; + } + // 6. if has filters recall iter with new filters + Array.isArray(node.filters) && node.filters.forEach(iter); + }); + + return tree; +} + +export const formatFilterOptions = (filters) => { + // 1. map over the filters and build new options to return + return [...filters].map((value) => { + return { + id: uuidv4(), + label: value, + value: value, + checked: false + } + }); +} diff --git a/src/resources/message/message.model.js b/src/resources/message/message.model.js index d34eddfd..61502ec2 100644 --- a/src/resources/message/message.model.js +++ b/src/resources/message/message.model.js @@ -22,6 +22,7 @@ const MessageSchema = new Schema( 'added collection', 'review', 'data access request', + 'data access request received', 'data access request unlinked', 'team', 'team unlinked', @@ -29,6 +30,11 @@ const MessageSchema = new Schema( 'workflow' ], }, + publisherName: { + type: String, + default: '', + trim: true + }, createdBy: { type: Schema.Types.ObjectId, ref: 'User', diff --git a/src/resources/publisher/publisher.controller.js b/src/resources/publisher/publisher.controller.js index 1d858d21..52529268 100644 --- a/src/resources/publisher/publisher.controller.js +++ b/src/resources/publisher/publisher.controller.js @@ -38,14 +38,12 @@ module.exports = { 'datasetfields.publisher': req.params.id, }) .populate('publisher') - .select( - 'datasetid name description datasetfields.abstract _id datasetfields.publisher datasetfields.contactPoint publisher' - ); + .select('datasetid name description datasetfields.abstract _id datasetfields.publisher datasetfields.contactPoint publisher'); if (!datasets) { return res.status(404).json({ success: false }); } // 2. Map datasets to flatten datasetfields nested object - datasets = datasets.map((dataset) => { + datasets = datasets.map(dataset => { let { _id, datasetid: datasetId, @@ -81,31 +79,23 @@ module.exports = { try { // 1. Deconstruct the request let { _id } = req.user; + // 2. Lookup publisher team - const publisher = await PublisherModel.findOne({ - name: req.params.id, - }).populate('team', 'members'); + const publisher = await PublisherModel.findOne({ name: req.params.id }).populate('team', 'members').lean(); if (!publisher) { return res.status(404).json({ success: false }); } // 3. Check the requesting user is a member of the custodian team let found = false; - if (_.has(publisher.toObject(), 'team.members')) { - let { members } = publisher.team.toObject(); - found = members.some((el) => el.memberid.toString() === _id.toString()); + if (_.has(publisher, 'team.members')) { + let { members } = publisher.team; + found = members.some(el => el.memberid.toString() === _id.toString()); } - if (!found) - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + if (!found) return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); //Check if current use is a manager - let isManager = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - publisher.team.toObject(), - _id - ); + let isManager = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, publisher.team, _id); let applicationStatus = ['inProgress']; //If the current user is not a manager then push 'Submitted' into the applicationStatus array @@ -121,14 +111,12 @@ module.exports = { let applications = await DataRequestModel.find({ $and: [ { - $or: [ - { dataSetId: { $in: datasetIds } }, - { datasetIds: { $elemMatch: { $in: datasetIds } } }, - ], + $or: [{ dataSetId: { $in: datasetIds } }, { datasetIds: { $elemMatch: { $in: datasetIds } } }], }, { applicationStatus: { $nin: applicationStatus } }, ], }) + .select('-jsonSchema -questionAnswers -files') .sort({ updatedAt: -1 }) .populate([ { @@ -144,15 +132,16 @@ module.exports = { }, }, }, - { - path: 'workflow.steps.reviewers', - select: 'firstname lastname' - } - ]); + { + path: 'workflow.steps.reviewers', + select: 'firstname lastname', + }, + ]) + .lean(); if (!isManager) { - applications = applications.filter((app) => { - let { workflow = {} } = app.toObject(); + applications = applications.filter(app => { + let { workflow = {} } = app; if (_.isEmpty(workflow)) { return app; } @@ -167,9 +156,7 @@ module.exports = { }); let elapsedSteps = [...steps].slice(0, activeStepIndex + 1); - let found = elapsedSteps.some((step) => - step.reviewers.some((reviewer) => reviewer._id.equals(_id)) - ); + let found = elapsedSteps.some(step => step.reviewers.some(reviewer => reviewer._id.equals(_id))); if (found) { return app; @@ -179,22 +166,14 @@ module.exports = { // 6. Append projectName and applicants let modifiedApplications = [...applications] - .map((app) => { - return datarequestController.createApplicationDTO( - app.toObject(), - constants.userTypes.CUSTODIAN, - _id.toString() - ); + .map(app => { + return datarequestController.createApplicationDTO(app, constants.userTypes.CUSTODIAN, _id.toString()); }) .sort((a, b) => b.updatedAt - a.updatedAt); - let avgDecisionTime = datarequestController.calculateAvgDecisionTime( - applications - ); + let avgDecisionTime = datarequestController.calculateAvgDecisionTime(applications); // 7. Return all applications - return res - .status(200) - .json({ success: true, data: modifiedApplications, avgDecisionTime, canViewSubmitted: isManager }); + return res.status(200).json({ success: true, data: modifiedApplications, avgDecisionTime, canViewSubmitted: isManager }); } catch (err) { console.error(err.message); return res.status(500).json({ @@ -235,41 +214,26 @@ module.exports = { } // 2. Check the requesting user is a member of the team let { _id: userId } = req.user; - let authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - workflows[0].publisher.team.toObject(), - userId - ); + let authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, workflows[0].publisher.team.toObject(), userId); // 3. If not return unauthorised if (!authorised) { return res.status(401).json({ success: false }); } // 4. Build workflows - workflows = workflows.map((workflow) => { - let { - active, - _id, - id, - workflowName, - version, - steps, - applications = [], - } = workflow.toObject(); + workflows = workflows.map(workflow => { + let { active, _id, id, workflowName, version, steps, applications = [] } = workflow.toObject(); let formattedSteps = [...steps].reduce((arr, item) => { let step = { ...item, - displaySections: [...item.sections].map(section => constants.darPanelMapper[section]) - } + displaySections: [...item.sections].map(section => constants.darPanelMapper[section]), + }; arr.push(step); return arr; }, []); - applications = applications.map((app) => { + applications = applications.map(app => { let { aboutApplication = {}, _id } = app; - if(typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(aboutApplication) || {}; - } let { projectName = 'No project name' } = aboutApplication; return { projectName, _id }; }); @@ -298,4 +262,4 @@ module.exports = { }); } }, -}; \ No newline at end of file +}; diff --git a/src/resources/publisher/publisher.model.js b/src/resources/publisher/publisher.model.js index bfc2dc2e..5dff8732 100644 --- a/src/resources/publisher/publisher.model.js +++ b/src/resources/publisher/publisher.model.js @@ -25,6 +25,23 @@ const PublisherSchema = new Schema( type: Boolean, default: false, }, + publisherDetails: { + name: String, + logo: String, + description: String, + contactPoint: String, + memberOf: String, + accessRights: [String], + deliveryLeadTime: String, + accessService: String, + accessRequestCost: String, + dataUseLimitation: [String], + dataUseRequirements: [String], + }, + mdcFolderId: String, + rorOrgId: String, + gridAcId: String, + allowAccessRequestManagement: { type: Boolean, default: false }, }, { toJSON: { virtuals: true }, diff --git a/src/resources/search/filter.route.js b/src/resources/search/filter.route.js index a0d18626..9b7cdccd 100644 --- a/src/resources/search/filter.route.js +++ b/src/resources/search/filter.route.js @@ -1,5 +1,7 @@ import express from 'express'; import { getObjectFilters, getFilter } from './search.repository'; +import { filtersService } from '../filters/dependency'; +import { isEqual } from 'lodash'; const router = express.Router(); @@ -7,21 +9,13 @@ const router = express.Router(); // @desc GET Get filters // @access Public router.get('/', async (req, res) => { - var searchString = req.query.search || ''; //If blank then return all - var tab = req.query.tab || ''; //If blank then return all + let searchString = req.query.search || ''; //If blank then return all + let tab = req.query.tab || ''; //If blank then return all if (tab === '') { let searchQuery = { $and: [{ activeflag: 'active' }] }; if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); await Promise.all([ - getFilter(searchString, 'dataset', 'license', false, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'tags.features', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.publisher', false, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'tool', 'tags.topic', true, getObjectFilters(searchQuery, req, 'tool')), getFilter(searchString, 'tool', 'tags.features', true, getObjectFilters(searchQuery, req, 'tool')), getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, getObjectFilters(searchQuery, req, 'tool')), @@ -37,85 +31,87 @@ router.get('/', async (req, res) => { return res.json({ success: true, allFilters: { - licenseFilter: values[0][0], - sampleFilter: values[1][0], - datasetFeatureFilter: values[2][0], - publisherFilter: values[3][0], - ageBandFilter: values[4][0], - geographicCoverageFilter: values[5][0], - phenotypesFilter: values[6][0], - - toolTopicFilter: values[6][0], - toolFeatureFilter: values[7][0], - toolLanguageFilter: values[8][0], - toolCategoryFilter: values[9][0], + toolTopicFilter: values[0][0], + toolFeatureFilter: values[1][0], + toolLanguageFilter: values[2][0], + toolCategoryFilter: values[3][0], - projectTopicFilter: values[10][0], - projectFeatureFilter: values[11][0], - projectCategoryFilter: values[12][0], + projectTopicFilter: values[4][0], + projectFeatureFilter: values[5][0], + projectCategoryFilter: values[6][0], - paperTopicFilter: values[13][0], - paperFeatureFilter: values[14][0], + paperTopicFilter: values[7][0], + paperFeatureFilter: values[8][0], }, filterOptions: { - licenseFilterOptions: values[0][1], - sampleFilterOptions: values[1][1], - datasetFeaturesFilterOptions: values[2][1], - publisherFilterOptions: values[3][1], - ageBandFilterOptions: values[4][1], - geographicCoverageFilterOptions: values[5][1], - phenotypesOptions: values[6][1], - - toolTopicsFilterOptions: values[7][1], - featuresFilterOptions: values[8][1], - programmingLanguageFilterOptions: values[9][1], - toolCategoriesFilterOptions: values[10][1], + toolTopicsFilterOptions: values[0][1], + featuresFilterOptions: values[1][1], + programmingLanguageFilterOptions: values[2][1], + toolCategoriesFilterOptions: values[3][1], - projectTopicsFilterOptions: values[11][1], - projectFeaturesFilterOptions: values[12][1], - projectCategoriesFilterOptions: values[13][1], + projectTopicsFilterOptions: values[4][1], + projectFeaturesFilterOptions: values[5][1], + projectCategoriesFilterOptions: values[6][1], - paperTopicsFilterOptions: values[14][1], - paperFeaturesFilterOptions: values[15][1], + paperTopicsFilterOptions: values[7][1], + paperFeaturesFilterOptions: values[8][1], }, }); }); } else if (tab === 'Datasets') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'dataset'); + const type = 'dataset'; - await Promise.all([ - getFilter(searchString, 'dataset', 'license', false, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'tags.features', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.publisher', false, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, activeFiltersQuery), - ]).then(values => { - return res.json({ - success: true, - allFilters: { - licenseFilter: values[0][0], - sampleFilter: values[1][0], - datasetFeatureFilter: values[2][0], - publisherFilter: values[3][0], - ageBandFilter: values[4][0], - geographicCoverageFilter: values[5][0], - phenotypesFilter: values[6][0], - }, - filterOptions: { - licenseFilterOptions: values[0][1], - sampleFilterOptions: values[1][1], - datasetFeaturesFilterOptions: values[2][1], - publisherFilterOptions: values[3][1], - ageBandFilterOptions: values[4][1], - geographicCoverageFilterOptions: values[5][1], - phenotypesOptions: values[6][1], - }, - }); + let defaultQuery = { $and: [{ activeflag: 'active', type }] }; + if (searchString.length > 0) defaultQuery['$and'].push({ $text: { $search: searchString } }); + const filterQuery = getObjectFilters(defaultQuery, req, type); + const useCachedFilters = isEqual(defaultQuery, filterQuery) && searchString.length === 0; + + const filters = await filtersService.buildFilters(type, filterQuery, useCachedFilters); + return res.json({ + success: true, + filters }); + //const matchQuery = queryObject[0][`$match`]; + //const useCachedFilters = matchQuery[`$and`] && matchQuery[`$and`].length === 2; + + // Get paged results based on query params + // const [searchResults, filters] = await Promise.all( + // collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)), + // filtersService.buildFilters(type, matchQuery, useCachedFilters) + // ); + + + // await Promise.all([ + // // getFilter(searchString, 'dataset', 'license', false, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'tags.features', true, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'datasetfields.publisher', false, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, activeFiltersQuery), + // // getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, activeFiltersQuery), + // ]).then(values => { + // return res.json({ + // success: true, + // allFilters: { + // // licenseFilter: values[0][0], + // // sampleFilter: values[1][0], + // // datasetFeatureFilter: values[2][0], + // // publisherFilter: values[3][0], + // // ageBandFilter: values[4][0], + // // geographicCoverageFilter: values[5][0], + // // phenotypesFilter: values[6][0], + // }, + // filterOptions: { + // // licenseFilterOptions: values[0][1], + // // sampleFilterOptions: values[1][1], + // // datasetFeaturesFilterOptions: values[2][1], + // // publisherFilterOptions: values[3][1], + // // ageBandFilterOptions: values[4][1], + // // geographicCoverageFilterOptions: values[5][1], + // // phenotypesOptions: values[6][1], + // }, + // }); + // }); } else if (tab === 'Tools') { let searchQuery = { $and: [{ activeflag: 'active' }] }; if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); diff --git a/src/resources/search/search.repository.js b/src/resources/search/search.repository.js index 9084b2c8..60765598 100644 --- a/src/resources/search/search.repository.js +++ b/src/resources/search/search.repository.js @@ -1,18 +1,22 @@ import { Data } from '../tool/data.model'; import { Course } from '../course/course.model'; import { Collections } from '../collections/collections.model'; +import { findNodeInTree } from '../filters/utils/filters.util'; +import { datasetFilters } from '../filters/filters.mapper'; import _ from 'lodash'; import moment from 'moment'; +import helperUtil from '../utilities/helper.util'; + +export async function getObjectResult(type, searchAll, searchQuery, startIndex, maxResults, sort) { -export function getObjectResult(type, searchAll, searchQuery, startIndex, maxResults, sort) { let collection = Data; if (type === 'course') { collection = Course; } else if (type === 'collection') { collection = Collections; } - - let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + // ie copy deep object + let newSearchQuery = _.cloneDeep(searchQuery); if (type !== 'collection') { newSearchQuery['$and'].push({ type: type }); } else { @@ -90,7 +94,9 @@ export function getObjectResult(type, searchAll, searchQuery, startIndex, maxRes 'datasetfields.abstract': 1, 'datasetfields.ageBand': 1, 'datasetfields.phenotypes': 1, - datasetv2: 1, + 'datasetv2.summary.publisher.name': 1, + 'datasetv2.summary.publisher.logo': 1, + 'datasetv2.summary.publisher.memberOf': 1, 'persons.id': 1, 'persons.firstname': 1, @@ -127,14 +133,10 @@ export function getObjectResult(type, searchAll, searchQuery, startIndex, maxRes if (searchAll) queryObject.push({ $sort: { 'courseOptions.startDate': 1 } }); else queryObject.push({ $sort: { 'courseOptions.startDate': 1, score: { $meta: 'textScore' } } }); } - - var q = collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)); - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === 'undefined') resolve([]); - else resolve(data); - }); - }); + // Get paged results based on query params + const searchResults = await collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)); + // Return data + return { data: searchResults }; } export function getObjectCount(type, searchAll, searchQuery) { @@ -297,19 +299,12 @@ export function getObjectCount(type, searchAll, searchQuery) { } export function getObjectFilters(searchQueryStart, req, type) { - var searchQuery = JSON.parse(JSON.stringify(searchQueryStart)); + let searchQuery = JSON.parse(JSON.stringify(searchQueryStart)); let { - license = '', - sampleavailability = '', - keywords = '', - publisher = '', - ageband = '', - geographiccover = '', - phenotypes = '', - programmingLanguage = '', + toolprogrammingLanguage = '', toolcategories = '', - features = '', + toolfeatures = '', tooltopics = '', projectcategories = '', projectfeatures = '', @@ -331,67 +326,53 @@ export function getObjectFilters(searchQueryStart, req, type) { } = req.query; if (type === 'dataset') { - if (license.length > 0) { - let filterTermArray = []; - license.split('::').forEach(filterTerm => { - filterTermArray.push({ license: filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (sampleavailability.length > 0) { - let filterTermArray = []; - sampleavailability.split('::').forEach(filterTerm => { - filterTermArray.push({ 'datasetfields.physicalSampleAvailability': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (keywords.length > 0) { - let filterTermArray = []; - keywords.split('::').forEach(filterTerm => { - filterTermArray.push({ 'tags.features': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (publisher.length > 0) { - let filterTermArray = []; - publisher.split('::').forEach(filterTerm => { - filterTermArray.push({ 'datasetfields.publisher': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (ageband.length > 0) { - let filterTermArray = []; - ageband.split('::').forEach(filterTerm => { - filterTermArray.push({ 'datasetfields.ageBand': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (geographiccover.length > 0) { - let filterTermArray = []; - geographiccover.split('::').forEach(filterTerm => { - filterTermArray.push({ 'datasetfields.geographicCoverage': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); - } - - if (phenotypes.length > 0) { - let filterTermArray = []; - phenotypes.split('::').forEach(filterTerm => { - filterTermArray.push({ 'datasetfields.phenotypes.name': filterTerm }); - }); - searchQuery['$and'].push({ $or: filterTermArray }); + // iterate over query string keys + for (const key of Object.keys(req.query)) { + try { + const filterValues = req.query[key].split('::'); + // check mapper for query type + const filterNode = findNodeInTree(datasetFilters, key); + if (filterNode) { + // switch on query type and build up query object + const { type = '', dataPath = '', matchField = '' } = filterNode; + switch (type) { + case 'contains': + // use regex to match without case sensitivity + searchQuery['$and'].push({ + $or: filterValues.map(value => { + return { [`${dataPath}`]: { $regex: helperUtil.escapeRegexChars(value), $options: 'i' } }; + }), + }); + break; + case 'elementMatch': + // use regex to match objects within an array without case sensitivity + searchQuery['$and'].push({ + [`${dataPath}`]: { + $elemMatch: { + $or: filterValues.map(value => { + return { [`${matchField}`]: { $regex: value, $options: 'i' } }; + }), + }, + }, + }); + break; + case 'boolean': + searchQuery['$and'].push({ [`${dataPath}`]: true }); + break; + default: + break; + } + } + } catch (err) { + console.error(err.message); + } } } if (type === 'tool') { - if (programmingLanguage.length > 0) { + if (toolprogrammingLanguage.length > 0) { let filterTermArray = []; - programmingLanguage.split('::').forEach(filterTerm => { + toolprogrammingLanguage.split('::').forEach(filterTerm => { filterTermArray.push({ 'programmingLanguage.programmingLanguage': filterTerm }); }); searchQuery['$and'].push({ $or: filterTermArray }); @@ -405,9 +386,9 @@ export function getObjectFilters(searchQueryStart, req, type) { searchQuery['$and'].push({ $or: filterTermArray }); } - if (features.length > 0) { + if (toolfeatures.length > 0) { let filterTermArray = []; - features.split('::').forEach(filterTerm => { + toolfeatures.split('::').forEach(filterTerm => { filterTermArray.push({ 'tags.features': filterTerm }); }); searchQuery['$and'].push({ $or: filterTermArray }); diff --git a/src/resources/search/search.router.js b/src/resources/search/search.router.js index f32219e6..a2fbfe05 100644 --- a/src/resources/search/search.router.js +++ b/src/resources/search/search.router.js @@ -12,8 +12,8 @@ const router = express.Router(); * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ router.get('/', async (req, res) => { - var authorID = parseInt(req.query.userID); - var searchString = req.query.search || ''; //If blank then return all + let authorID = parseInt(req.query.userID); + let searchString = req.query.search || ''; //If blank then return all //If searchString is applied, format any hyphenated words to enclose them as a phrase if (searchString.includes('-') && !searchString.includes('"')) { // Matches on any whole word containing a hyphen @@ -21,15 +21,14 @@ router.get('/', async (req, res) => { // Surround matching words in quotation marks searchString = searchString.replace(regex, '"$1"'); } - var tab = req.query.tab || ''; + let tab = req.query.tab || ''; let searchQuery = { $and: [{ activeflag: 'active' }] }; if (req.query.form) { searchQuery = { $and: [{ $or: [{ $and: [{ activeflag: 'review' }, { authors: authorID }] }, { activeflag: 'active' }] }] }; } - var searchAll = false; - + let searchAll = false; if (searchString.length > 0) { searchQuery['$and'].push({ $text: { $search: searchString } }); @@ -46,15 +45,28 @@ router.get('/', async (req, res) => { searchAll = true; } - let allResults = [], - datasetResults = [], - toolResults = [], - projectResults = [], - paperResults = [], - personResults = [], - courseResults = [], - collectionResults = []; + let results = []; + + let allResults = []; + const typeMapper = { + Datasets: 'dataset', + Tools: 'tool', + Projects: 'project', + Papers: 'paper', + People: 'person', + Courses: 'course', + Collections: 'collection', + }; + + const entityType = typeMapper[`${tab}`]; + + // if (!entityType) { + // return res.status(400, { + // success: false, + // message: 'You must pass a entity type', + // }); + // } if (tab === '') { allResults = await Promise.all([ getObjectResult( @@ -107,79 +119,19 @@ router.get('/', async (req, res) => { req.query.collectionSort || '' ), ]); - } else if (tab === 'Datasets') { - datasetResults = await Promise.all([ - getObjectResult( - 'dataset', - searchAll, - getObjectFilters(searchQuery, req, 'dataset'), - req.query.datasetIndex || 0, - req.query.maxResults || 40, - req.query.datasetSort || '' - ), - ]); - } else if (tab === 'Tools') { - toolResults = await Promise.all([ - getObjectResult( - 'tool', - searchAll, - getObjectFilters(searchQuery, req, 'tool'), - req.query.toolIndex || 0, - req.query.maxResults || 40, - req.query.toolSort || '' - ), - ]); - } else if (tab === 'Projects') { - projectResults = await Promise.all([ - getObjectResult( - 'project', - searchAll, - getObjectFilters(searchQuery, req, 'project'), - req.query.projectIndex || 0, - req.query.maxResults || 40, - req.query.projectSort || '' - ), - ]); - } else if (tab === 'Papers') { - paperResults = await Promise.all([ - getObjectResult( - 'paper', - searchAll, - getObjectFilters(searchQuery, req, 'paper'), - req.query.paperIndex || 0, - req.query.maxResults || 40, - req.query.paperSort || '' - ), - ]); - } else if (tab === 'People') { - personResults = await Promise.all([ - getObjectResult('person', searchAll, searchQuery, req.query.personIndex || 0, req.query.maxResults || 40, req.query.personSort || ''), - ]); - } else if (tab === 'Courses') { - courseResults = await Promise.all([ - getObjectResult( - 'course', - searchAll, - getObjectFilters(searchQuery, req, 'course'), - req.query.courseIndex || 0, - req.query.maxResults || 40, - 'startdate' - ), - ]); - } else if (tab === 'Collections') { - collectionResults = await Promise.all([ - getObjectResult( - 'collection', - searchAll, - getObjectFilters(searchQuery, req, 'collection'), - req.query.collectionIndex || 0, - req.query.maxResults || 40, - req.query.collectionSort || '' - ), - ]); + } else { + const sort = entityType === 'course' ? 'startdate' : req.query[`${entityType}Sort`] || ''; + results = await getObjectResult( + entityType, + searchAll, + getObjectFilters(searchQuery, req, entityType), + req.query[`${entityType}Index`] || 0, + req.query.maxResults || 40, + sort + ); } - var summaryCounts = await Promise.all([ + const summaryCounts = await Promise.all([ getObjectCount('dataset', searchAll, getObjectFilters(searchQuery, req, 'dataset')), getObjectCount('tool', searchAll, getObjectFilters(searchQuery, req, 'tool')), getObjectCount('project', searchAll, getObjectFilters(searchQuery, req, 'project')), @@ -189,17 +141,17 @@ router.get('/', async (req, res) => { getObjectCount('collection', searchAll, getObjectFilters(searchQuery, req, 'collection')), ]); - var summary = { - datasets: summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0, - tools: summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0, - projects: summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0, - papers: summaryCounts[3][0] !== undefined ? summaryCounts[3][0].count : 0, - persons: summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0, - courses: summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0, - collections: summaryCounts[6][0] !== undefined ? summaryCounts[6][0].count : 0, + const summary = { + datasetCount: summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0, + toolCount: summaryCounts[1][0]!== undefined ? summaryCounts[1][0].count : 0, + projectCount: summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0, + paperCount: summaryCounts[3][0]!== undefined ? summaryCounts[3][0].count : 0, + personCount: summaryCounts[4][0]!== undefined ? summaryCounts[4][0].count : 0, + courseCount: summaryCounts[5][0]!== undefined ? summaryCounts[5][0].count : 0, + collectionCount: summaryCounts[6][0] !== undefined ? summaryCounts[6][0].count : 0, }; - let recordSearchData = new RecordSearchData(); + const recordSearchData = new RecordSearchData(); recordSearchData.searched = searchString; recordSearchData.returned.dataset = summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0; recordSearchData.returned.tool = summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0; @@ -214,27 +166,22 @@ router.get('/', async (req, res) => { if (tab === '') { return res.json({ success: true, - datasetResults: allResults[0], - toolResults: allResults[1], - projectResults: allResults[2], - paperResults: allResults[3], - personResults: allResults[4], - courseResults: allResults[5], - collectionResults: allResults[6], + datasetResults: allResults[0].data, + toolResults: allResults[1].data, + projectResults: allResults[2].data, + paperResults: allResults[3].data, + personResults: allResults[4].data, + courseResults: allResults[5].data, + collectionResults: allResults[6].data, + summary: summary, + }); + } else { + return res.json({ + success: true, + [`${entityType}Results`]: results, summary: summary, }); } - return res.json({ - success: true, - datasetResults: datasetResults[0], - toolResults: toolResults[0], - projectResults: projectResults[0], - paperResults: paperResults[0], - personResults: personResults[0], - courseResults: courseResults[0], - collectionResults: collectionResults[0], - summary: summary, - }); }); module.exports = router; diff --git a/src/resources/stats/kpis.router.js b/src/resources/stats/kpis.router.js index 02112233..5078a8d2 100644 --- a/src/resources/stats/kpis.router.js +++ b/src/resources/stats/kpis.router.js @@ -6,324 +6,280 @@ import { DataRequestModel } from '../datarequests/datarequests.model'; const router = express.Router(); router.get('', async (req, res) => { - var selectedMonthStart = new Date(req.query.selectedDate); - selectedMonthStart.setMonth(selectedMonthStart.getMonth()); - selectedMonthStart.setDate(1); - selectedMonthStart.setHours(0, 0, 0, 0); - - var selectedMonthEnd = new Date(req.query.selectedDate); - selectedMonthEnd.setMonth(selectedMonthEnd.getMonth() + 1); - selectedMonthEnd.setDate(0); - selectedMonthEnd.setHours(23, 59, 59, 999); - - switch (req.query.kpi) { - case 'technicalmetadata': - var totalDatasetsQuery = [ - { - $facet: { - TotalDataSets: [ - { - $match: { - $and: [ - { activeflag: 'active' }, - { type: 'dataset' }, - { 'datasetfields.publisher': { $ne: 'OTHER > HEALTH DATA RESEARCH UK' } }, - { 'datasetfields.publisher': { $ne: 'HDR UK' } }, - ], + try { + var selectedMonthStart = new Date(req.query.selectedDate); + selectedMonthStart.setMonth(selectedMonthStart.getMonth()); + selectedMonthStart.setDate(1); + selectedMonthStart.setHours(0, 0, 0, 0); + + var selectedMonthEnd = new Date(req.query.selectedDate); + selectedMonthEnd.setMonth(selectedMonthEnd.getMonth() + 1); + selectedMonthEnd.setDate(0); + selectedMonthEnd.setHours(23, 59, 59, 999); + + switch (req.query.kpi) { + case 'technicalmetadata': + const technicalMetadataResults = await Data.aggregate([ + { + $facet: { + TotalDataSets: [ + { + $match: { + activeflag: 'active', + type: 'dataset', + 'datasetfields.publisher': { $nin: ['OTHER > HEALTH DATA RESEARCH UK', 'HDR UK'] }, + }, }, - }, - { $count: 'TotalDataSets' }, - ], - TotalMetaData: [ - { - $match: { - activeflag: 'active', - type: 'dataset', - 'datasetfields.technicaldetails': { - $exists: true, - $not: { - $size: 0, + { $count: 'TotalDataSets' }, + ], + TotalMetaData: [ + { + $match: { + activeflag: 'active', + type: 'dataset', + 'datasetfields.technicaldetails': { + $exists: true, + $not: { + $size: 0, + }, }, }, }, - }, - { - $count: 'TotalMetaData', - }, - ], + { + $count: 'TotalMetaData', + }, + ], + }, }, - }, - ]; - - var q = Data.aggregate(totalDatasetsQuery); - - var result; - q.exec((err, dataSets) => { - if (err) return res.json({ success: false, error: err }); - - if (typeof dataSets[0].TotalDataSets[0] === 'undefined') { - dataSets[0].TotalDataSets[0].TotalDataSets = 0; - } - if (typeof dataSets[0].TotalMetaData[0] === 'undefined') { - dataSets[0].TotalMetaData[0].TotalMetaData = 0; - } + ]); - result = res.json({ + return res.json({ success: true, data: { - totalDatasets: dataSets[0].TotalDataSets[0].TotalDataSets, - datasetsMetadata: dataSets[0].TotalMetaData[0].TotalMetaData, + totalDatasets: technicalMetadataResults[0].TotalDataSets[0].TotalDataSets || 0, + datasetsMetadata: technicalMetadataResults[0].TotalMetaData[0].TotalMetaData || 0, }, }); - }); - return result; - break; + case 'searchanddar': + var result; - case 'searchanddar': - var result; - - var aggregateQuerySearches = [ - { - $facet: { - totalMonth: [ - { $match: { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } } }, - - { - $group: { - _id: 'totalMonth', - count: { $sum: 1 }, + var aggregateQuerySearches = [ + { + $facet: { + totalMonth: [ + { $match: { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } } }, + + { + $group: { + _id: 'totalMonth', + count: { $sum: 1 }, + }, }, - }, - ], - noResultsMonth: [ - { - $match: { - $and: [ - { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, - { 'returned.dataset': 0 }, - { 'returned.tool': 0 }, - { 'returned.project': 0 }, - { 'returned.paper': 0 }, - { 'returned.person': 0 }, - ], + ], + noResultsMonth: [ + { + $match: { + $and: [ + { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, + { 'returned.dataset': 0 }, + { 'returned.tool': 0 }, + { 'returned.project': 0 }, + { 'returned.paper': 0 }, + { 'returned.person': 0 }, + ], + }, }, - }, - { - $group: { - _id: 'noResultsMonth', - count: { $sum: 1 }, + { + $group: { + _id: 'noResultsMonth', + count: { $sum: 1 }, + }, }, - }, - ], - accessRequestsMonth: [ - //used only createdAt first { "$match": { "createdAt": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} } }, - // some older fields only have timeStamp --> only timeStamp in the production db - //checking for both currently - { - $match: { - $and: [ - { - $or: [ - { createdAt: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, - { timeStamp: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, - ], - }, - { - $or: [ - { applicationStatus: 'submitted' }, - { applicationStatus: 'approved' }, - { applicationStatus: 'rejected' }, - { applicationStatus: 'inReview' }, - { applicationStatus: 'approved with conditions' }, - ], - }, - ], + ], + accessRequestsMonth: [ + { + $match: { + dateSubmitted: { $gte: selectedMonthStart, $lt: selectedMonthEnd }, + applicationStatus: { $in: ['submitted', 'approved', 'rejected', 'inReview', 'approved with conditions'] }, + }, }, - }, - ], + ], + }, }, - }, - ]; - - var q = RecordSearchData.aggregate(aggregateQuerySearches); - - var y = DataRequestModel.aggregate(aggregateQuerySearches); + ]; - q.exec((err, dataSearches) => { - if (err) return res.json({ success: false, error: err }); + var q = RecordSearchData.aggregate(aggregateQuerySearches); - if (typeof dataSearches[0].totalMonth[0] === 'undefined') { - dataSearches[0].totalMonth[0] = { count: 0 }; - } - if (typeof dataSearches[0].noResultsMonth[0] === 'undefined') { - dataSearches[0].noResultsMonth[0] = { count: 0 }; - } - - y.exec(async (err, accessRequests) => { - let hdrDatasetID = await getHdrDatasetId(); - let hdrDatasetIds = []; - hdrDatasetID.map(hdrDatasetid => { - hdrDatasetIds.push(hdrDatasetid.datasetid); - }); - let accessRequestsMonthCount = 0; + var y = DataRequestModel.aggregate(aggregateQuerySearches); + q.exec((err, dataSearches) => { if (err) return res.json({ success: false, error: err }); - accessRequests[0].accessRequestsMonth.map(accessRequest => { - if (accessRequest.dataSetId && accessRequest.dataSetId.length > 0 && !hdrDatasetIds.includes(accessRequest.dataSetId)) { - accessRequestsMonthCount++; - } - - if (accessRequest.datasetIds && accessRequest.datasetIds.length > 0) { - accessRequest.datasetIds.map(datasetid => { - if (!hdrDatasetIds.includes(datasetid)) { - accessRequestsMonthCount++; - } - }); - } - }); + if (typeof dataSearches[0].totalMonth[0] === 'undefined') { + dataSearches[0].totalMonth[0] = { count: 0 }; + } + if (typeof dataSearches[0].noResultsMonth[0] === 'undefined') { + dataSearches[0].noResultsMonth[0] = { count: 0 }; + } - result = res.json({ - success: true, - data: { - totalMonth: dataSearches[0].totalMonth[0].count, - noResultsMonth: dataSearches[0].noResultsMonth[0].count, - accessRequestsMonth: accessRequestsMonthCount, - }, + y.exec(async (err, accessRequests) => { + let hdrDatasetID = await getHdrDatasetId(); + let hdrDatasetIds = []; + hdrDatasetID.map(hdrDatasetid => { + hdrDatasetIds.push(hdrDatasetid.datasetid); + }); + let accessRequestsMonthCount = 0; + + if (err) return res.json({ success: false, error: err }); + + accessRequests[0].accessRequestsMonth.map(accessRequest => { + if (accessRequest.datasetIds && accessRequest.datasetIds.length > 0) { + accessRequest.datasetIds.map(datasetid => { + if (!hdrDatasetIds.includes(datasetid)) { + accessRequestsMonthCount++; + } + }); + } + }); + + result = res.json({ + success: true, + data: { + totalMonth: dataSearches[0].totalMonth[0].count, + noResultsMonth: dataSearches[0].noResultsMonth[0].count, + accessRequestsMonth: accessRequestsMonthCount, + }, + }); }); }); - }); - return result; - break; + return result; - case 'uptime': - const monitoring = require('@google-cloud/monitoring'); - const projectId = 'hdruk-gateway'; - const client = new monitoring.MetricServiceClient(); + case 'uptime': + const monitoring = require('@google-cloud/monitoring'); + const projectId = 'hdruk-gateway'; + const client = new monitoring.MetricServiceClient(); - var result; + var result; - const request = { - name: client.projectPath(projectId), - filter: - 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', + const request = { + name: client.projectPath(projectId), + filter: + 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', - interval: { - startTime: { - seconds: selectedMonthStart.getTime() / 1000, - }, - endTime: { - seconds: selectedMonthEnd.getTime() / 1000, + interval: { + startTime: { + seconds: selectedMonthStart.getTime() / 1000, + }, + endTime: { + seconds: selectedMonthEnd.getTime() / 1000, + }, }, - }, - aggregation: { - alignmentPeriod: { - seconds: '86400s', + aggregation: { + alignmentPeriod: { + seconds: '86400s', + }, + crossSeriesReducer: 'REDUCE_NONE', + groupByFields: ['metric.label."checker_location"', 'resource.label."instance_id"'], + perSeriesAligner: 'ALIGN_FRACTION_TRUE', }, - crossSeriesReducer: 'REDUCE_NONE', - groupByFields: ['metric.label."checker_location"', 'resource.label."instance_id"'], - perSeriesAligner: 'ALIGN_FRACTION_TRUE', - }, - }; + }; - // Writes time series data - const [timeSeries] = await client.listTimeSeries(request); - var dailyUptime = []; - var averageUptime; + // Writes time series data + const [timeSeries] = await client.listTimeSeries(request); + var dailyUptime = []; + var averageUptime; - timeSeries.forEach(data => { - data.points.forEach(data => { - dailyUptime.push(data.value.doubleValue); - }); + timeSeries.forEach(data => { + data.points.forEach(data => { + dailyUptime.push(data.value.doubleValue); + }); - averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; + averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; - result = res.json({ - success: true, - data: averageUptime, + result = res.json({ + success: true, + data: averageUptime, + }); }); - }); - - return result; - break; - case 'topdatasets': - let DarInfoMap = new Map(); + return result; - let hdrDatasetID = await getHdrDatasetId(); - let hdrDatasetIds = []; - hdrDatasetID.map(hdrDatasetid => { - hdrDatasetIds.push(hdrDatasetid.datasetid); - }); - - await getDarIds(req, selectedMonthStart, selectedMonthEnd) - .then(async data => { - for (let datasetIdObject in data) { - if (data[datasetIdObject].datasetIds && data[datasetIdObject].datasetIds.length > 0) { - for (let datasetId in data[datasetIdObject].datasetIds) { - if (!hdrDatasetIds.includes(data[datasetIdObject].datasetIds[datasetId])) { - let result = await getDarInfo(data[datasetIdObject].datasetIds[datasetId]); + case 'topdatasets': + const topDatasetResults = await DataRequestModel.aggregate([ + { + $match: { + dateSubmitted: { + $gte: selectedMonthStart, + $lt: selectedMonthEnd, + }, + applicationStatus: { + $in: ['submitted', 'approved', 'rejected', 'inReview', 'approved with conditions'], + }, + publisher: { + $nin: ['HDR UK', 'OTHER > HEALTH DATA RESEARCH UK'], + }, + }, + }, + { + $lookup: { + from: 'tools', + localField: 'datasetIds', + foreignField: 'datasetid', + as: 'datasets', + }, + }, + { + $project: { + 'datasets.name': 1, + 'datasets.datasetfields.publisher': 1, + 'datasets.pid': 1, + _id: 0, + }, + }, + { + $unwind: { + path: '$datasets', + preserveNullAndEmptyArrays: false, + }, + }, + { + $group: { + _id: '$datasets.name', + name: { + $first: '$datasets.name', + }, + publisher: { + $first: '$datasets.datasetfields.publisher', + }, + pid: { + $first: '$datasets.pid', + }, + requests: { + $sum: 1, + }, + }, + }, + { + $sort: { + requests: -1, + publisher: 1, + name: 1, + }, + }, + { + $limit: 5, + }, + ]); - if (result.length > 0) { - if (DarInfoMap.has(data[datasetIdObject].datasetIds[datasetId])) { - let count = DarInfoMap.get(data[datasetIdObject].datasetIds[datasetId]); - count.requests++; - DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], { - requests: count.requests, - name: result[0].name, - publisher: result[0].datasetfields.publisher, - }); - } else { - DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], { - requests: 1, - name: result[0].name, - publisher: result[0].datasetfields.publisher, - }); - } - } - } - } - } else if ( - data[datasetIdObject].dataSetId && - data[datasetIdObject].dataSetId.length > 0 && - !hdrDatasetIds.includes(data[datasetIdObject].dataSetId) - ) { - let result = await getDarInfo(data[datasetIdObject].dataSetId); - if (result.length > 0) { - if (DarInfoMap.has(data[datasetIdObject].dataSetId)) { - let count = DarInfoMap.get(data[datasetIdObject].dataSetId); - count.requests++; - DarInfoMap.set(data[datasetIdObject].dataSetId, { - requests: count.requests, - name: result[0].name, - publisher: result[0].datasetfields.publisher, - }); - } else { - DarInfoMap.set(data[datasetIdObject].dataSetId, { - requests: 1, - name: result[0].name, - publisher: result[0].datasetfields.publisher, - }); - } - } - } - } - }) - .catch(err => { - return res.json({ success: false, error: err }); + return res.json({ + success: true, + data: topDatasetResults, }); - - let sortedResults = Array.from(DarInfoMap).sort((a, b) => { - return b[1].requests - a[1].requests; - }); - - sortedResults = sortedResults.slice(0, 5); - - return res.json({ success: true, data: sortedResults }); - - break; + } + } catch (err) { + return res.json({ success: false, error: err.message }); } }); @@ -347,73 +303,3 @@ export const getHdrDatasetId = async () => { }); }); }; - -const getDarIds = async (req, selectedMonthStart, selectedMonthEnd) => { - return new Promise(async (resolve, reject) => { - let DarDatasetIds = DataRequestModel.find( - { - // VALUES YOU ARE CHECKING MATCH SPECIFIED CRITERIA IE. WHERE - $and: [ - { - $or: [ - { - createdAt: { - $gte: selectedMonthStart, - $lt: selectedMonthEnd, - }, - }, - { - timeStamp: { - $gte: selectedMonthStart, - $lt: selectedMonthEnd, - }, - }, - ], - }, - { - $or: [ - { applicationStatus: 'submitted' }, - { applicationStatus: 'approved' }, - { applicationStatus: 'rejected' }, - { applicationStatus: 'inReview' }, - { applicationStatus: 'approved with conditions' }, - ], - }, - ], - }, - { - // THE FIELDS YOU WANT TO RETURN - _id: 0, - dataSetId: 1, - datasetIds: 1, - } - ); - - DarDatasetIds.exec((err, data) => { - if (err) reject(err); - return resolve(data); - }); - }); -}; - -const getDarInfo = async id => { - return new Promise(async (resolve, reject) => { - let DarDatasetInfo = Data.find( - { - datasetid: id, - }, - { - _id: 0, - datasetid: 1, - name: 1, - //RETURN EMBEDDED FIELD - 'datasetfields.publisher': 1, - } - ); - - DarDatasetInfo.exec((err, data) => { - if (err) reject(err); - else resolve(data); - }); - }); -}; diff --git a/src/resources/stats/stats.router.js b/src/resources/stats/stats.router.js index 69cb8b95..75d4161c 100644 --- a/src/resources/stats/stats.router.js +++ b/src/resources/stats/stats.router.js @@ -3,7 +3,7 @@ import { RecordSearchData } from '../search/record.search.model'; import { Data } from '../tool/data.model'; import { DataRequestModel } from '../datarequests/datarequests.model'; import { getHdrDatasetId } from './kpis.router'; - +import { Course } from '../course/course.model'; const router = express.Router(); /** @@ -15,7 +15,7 @@ router.get('', async (req, res) => { try { const { query = {} } = req; - switch (query.rank) { + switch (req.query.rank) { case undefined: var result; @@ -89,6 +89,16 @@ router.get('', async (req, res) => { { $group: { _id: '$type', count: { $sum: 1 } } }, ]; + //set the aggregate queries + const courseQuery = [ + { + $match: { + $and: [{ activeflag: 'active' }], + }, + }, + { $group: { _id: '$type', count: { $sum: 1 } } }, + ]; + var q = RecordSearchData.aggregate(aggregateQuerySearches); var aggregateAccessRequests = [ @@ -106,6 +116,15 @@ router.get('', async (req, res) => { ]; var y = DataRequestModel.aggregate(aggregateAccessRequests); + let courseData = Course.aggregate(courseQuery); + + let counts = {}; //hold the type (i.e. tool, person, project, access requests) counts data + await courseData.exec((err, res) => { + if (err) return res.json({ success: false, error: err }); + + let { count = 0 } = res[0]; + counts['course'] = count; + }); q.exec((err, dataSearches) => { if (err) return res.json({ success: false, error: err }); @@ -114,7 +133,6 @@ router.get('', async (req, res) => { x.exec((errx, dataTypes) => { if (errx) return res.json({ success: false, error: errx }); - var counts = {}; //hold the type (i.e. tool, person, project, access requests) counts data for (var i = 0; i < dataTypes.length; i++) { //format the result in a clear and dynamic way counts[dataTypes[i]._id] = dataTypes[i].count; @@ -196,23 +214,194 @@ router.get('', async (req, res) => { case 'popular': let popularType = {}; if (query.type) popularType = { type: query.type }; - const popularData = await Data.aggregate([ - { - $match: { - ...popularType, - counter: { - $gt: 0, + let popularData; + + if (popularType.type !== 'course') { + popularData = await Data.aggregate([ + { + $match: { + ...popularType, + counter: { + $gt: 0, + }, + name: { + $exists: true, + }, + pid: { + $ne: 'fd8d0743-344a-4758-bb97-f8ad84a37357', //PID for HDR-UK Papers dataset + }, }, - name: { - $exists: true, + }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { + $project: { + _id: 0, + type: 1, + bio: 1, + firstname: 1, + lastname: 1, + name: 1, + categories: 1, + pid: 1, + id: 1, + counter: 1, + programmingLanguage: 1, + tags: 1, + description: 1, + activeflag: 1, + datasetv2: 1, + datasetfields: 1, + 'persons.id': 1, + 'persons.firstname': 1, + 'persons.lastname': 1, }, - pid: { - $ne: 'fd8d0743-344a-4758-bb97-f8ad84a37357', //PID for HDR-UK Papers dataset + }, + { + $group: { + _id: '$name', + type: { $first: '$type' }, + name: { $first: '$name' }, + pid: { $first: '$pid' }, + bio: { $first: '$bio' }, + firstname: { $first: '$firstname' }, + lastname: { $first: '$lastname' }, + id: { $first: '$id' }, + categories: { $first: '$categories' }, + counter: { $sum: '$counter' }, + programmingLanguage: { $first: '$programmingLanguage' }, + tags: { $first: '$tags' }, + description: { $first: '$description' }, + activeflag: { $first: '$activeflag' }, + datasetv2: { $first: '$datasetv2' }, + datasetfields: { $first: '$datasetfields' }, + persons: { $first: '$persons' }, }, }, - }, - { - $project: { + { + $sort: { + counter: -1, + name: 1, + }, + }, + { + $limit: 10, + }, + ]); + } else if (popularType.type === 'course') { + popularData = await Course.aggregate([ + { + $match: { + counter: { + $gt: 0, + }, + title: { + $exists: true, + }, + }, + }, + { + $project: { + _id: 0, + type: 1, + title: 1, + provider: 1, + courseOptions: 1, + award: 1, + domains: 1, + description: 1, + id: 1, + counter: 1, + }, + }, + { + $group: { + _id: '$title', + type: { $first: '$type' }, + title: { $first: '$title' }, + provider: { $first: '$provider' }, + courseOptions: { $first: '$courseOptions' }, + award: { $first: '$award' }, + domains: { $first: '$domains' }, + description: { $first: '$description' }, + id: { $first: '$id' }, + counter: { $sum: '$counter' }, + }, + }, + { + $sort: { + counter: -1, + title: 1, + }, + }, + { + $limit: 10, + }, + ]); + } + + return res.json({ success: true, data: popularData }); + + case 'updates': + let recentlyUpdated = Data.find({ activeflag: 'active' }).sort({ updatedon: -1 }).limit(10); + + if (req.query.type && req.query.type === 'course') { + recentlyUpdated = Course.find( + { activeflag: 'active' }, + { + _id: 0, + type: 1, + title: 1, + provider: 1, + courseOptions: 1, + award: 1, + domains: 1, + description: 1, + id: 1, + counter: 1, + updatedon: 1, + } + ) + .sort({ updatedon: -1, title: 1 }) + .limit(10); + } else if (req.query.type && req.query.type === 'dataset') { + recentlyUpdated = Data.find( + { + $and: [ + { + type: req.query.type, + activeflag: 'active', + pid: { + $ne: 'fd8d0743-344a-4758-bb97-f8ad84a37357', //Production PID for HDR-UK Papers dataset + }, + }, + ], + }, + { + _id: 0, + type: 1, + name: 1, + pid: 1, + id: 1, + counter: 1, + activeflag: 1, + datasetv2: 1, + datasetfields: 1, + updatedAt: 1, + } + ) + .sort({ updatedAt: -1, name: 1 }) + .limit(10); + } else if (req.query.type && req.query.type !== 'course' && req.query.type !== 'dataset') { + recentlyUpdated = Data.find( + { + $and: [ + { + type: req.query.type, + activeflag: 'active', + }, + ], + }, + { _id: 0, type: 1, bio: 1, @@ -220,50 +409,22 @@ router.get('', async (req, res) => { lastname: 1, name: 1, categories: 1, - pid: 1, id: 1, counter: 1, - }, - }, - { - $group: { - _id: '$name', - type: { $first: '$type' }, - name: { $first: '$name' }, - pid: { $first: '$pid' }, - bio: {$first: '$bio' }, - firstname: { $first: '$firstname' }, - lastname: { $first: '$lastname' }, - id: { $first: '$id' }, - categories: { $first: '$categories' }, - counter: { $sum: '$counter' }, - }, - }, - { - $sort: { - counter: -1, - name: 1, - }, - }, - { - $limit: 10, - }, - ]); - - return res.json({ success: true, data: popularData }); - - case 'updates': - var q = Data.find({ activeflag: 'active', counter: { $gt: 0 } }) - .sort({ updatedon: -1 }) - .limit(10); - - if (req.query.type) { - q = Data.find({ $and: [{ type: req.query.type, activeflag: 'active', updatedon: { $gt: 0 } }] }) - .sort({ counter: -1 }) + programmingLanguage: 1, + tags: 1, + description: 1, + activeflag: 1, + authors: 1, + updatedon: 1, + } + ) + .populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]) + .sort({ updatedon: -1, name: 1 }) .limit(10); } - q.exec((err, data) => { + recentlyUpdated.exec((err, data) => { if (err) return res.json({ success: false, error: err }); return res.json({ success: true, data: data }); }); diff --git a/src/resources/team/team.model.js b/src/resources/team/team.model.js index e8571a0e..ed5df37a 100644 --- a/src/resources/team/team.model.js +++ b/src/resources/team/team.model.js @@ -9,9 +9,15 @@ const TeamSchema = new Schema( members: [ { memberid: { type: Schema.Types.ObjectId, ref: 'User', required: true }, - roles: { type: [String], enum: ['reviewer', 'manager'], required: true }, + roles: { type: [String], enum: ['reviewer', 'manager', 'metadata_editor'], required: true }, dateCreated: Date, dateUpdated: Date, + notifications: [ + { + type: String, // metadataonbarding || dataaccessrequest + optIn: { type: Boolean, default: true }, + }, + ], }, ], type: String, @@ -19,6 +25,13 @@ const TeamSchema = new Schema( type: Boolean, default: true, }, + notifications: [ + { + type: String, // metadataonbarding || dataaccessrequest + optIn: { type: Boolean, default: false }, + subscribedEmails: [String], + }, + ], }, { toJSON: { virtuals: true }, diff --git a/src/resources/tool/data.model.js b/src/resources/tool/data.model.js index d199674b..90814971 100644 --- a/src/resources/tool/data.model.js +++ b/src/resources/tool/data.model.js @@ -75,6 +75,8 @@ const DataSchema = new Schema( //dataset related fields source: String, + is5Safes: Boolean, + hasTechnicalDetails: Boolean, datasetid: String, pid: String, datasetVersion: String, @@ -103,6 +105,19 @@ const DataSchema = new Schema( phenotypes: [], }, datasetv2: {}, + questionAnswers: {}, + structuralMetadata: [], + percentageCompleted: {}, + applicationStatusDesc: String, + timestamps: { + updated: Date, + created: Date, + submitted: Date, + published: Date, + rejected: Date, + archived: Date, + }, + datasetVersionIsV1: { type: Boolean, default: false }, //not used rating: Number, diff --git a/src/resources/tool/data.repository.js b/src/resources/tool/data.repository.js index 86ea1fef..e7ee31ae 100644 --- a/src/resources/tool/data.repository.js +++ b/src/resources/tool/data.repository.js @@ -137,6 +137,7 @@ const editTool = async (req, res) => { } = req.body; let id = req.params.id; let programmingLanguage = req.body.programmingLanguage; + let updatedon = Date.now(); if (!categories || typeof categories === undefined) categories = { category: '', programmingLanguage: [], programmingLanguageVersion: '' }; @@ -181,6 +182,7 @@ const editTool = async (req, res) => { relatedObjects: relatedObjects, isPreprint: isPreprint, document_links: documentLinksValidated, + updatedon: updatedon, }, err => { if (err) { diff --git a/src/resources/utilities/constants.util.js b/src/resources/utilities/constants.util.js index a1b13156..27276ac3 100644 --- a/src/resources/utilities/constants.util.js +++ b/src/resources/utilities/constants.util.js @@ -4,6 +4,13 @@ const _userTypes = { APPLICANT: 'applicant', }; +const _formTypes = Object.freeze({ + Enquiry: 'enquiry', + Extended5Safe: '5 safe', +}); + +const _enquiryFormId = '5f0c4af5d138d3e486270031'; + const _userQuestionActions = { custodian: { reviewer: { @@ -288,6 +295,8 @@ const _notificationTypes = { WORKFLOWASSIGNED: 'WorkflowAssigned', WORKFLOWCREATED: 'WorkflowCreated', INPROGRESS: 'InProgress', + APPLICATIONCLONED: 'ApplicationCloned', + APPLICATIONDELETED: 'ApplicationDeleted', }; const _applicationStatuses = { @@ -335,12 +344,27 @@ const _roleTypes = { REVIEWER: 'reviewer', }; -// +// + +// + +const _datatsetStatuses = { + DRAFT: 'draft', + INPROGRESS: 'inProgress', + INREVIEW: 'inReview', + APPROVED: 'approved', + REJECTED: 'rejected', + APPROVEDWITHCONDITIONS: 'approved with conditions', +}; + +// const _hdrukEmail = 'enquiry@healthdatagateway.org'; export default { userTypes: _userTypes, + enquiryFormId: _enquiryFormId, + formTypes: _formTypes, userQuestionActions: _userQuestionActions, navigationFlags: _navigationFlags, amendmentStatuses: _amendmentStatuses, @@ -353,4 +377,5 @@ export default { darPanelMapper: _darPanelMapper, submissionEmailRecipientTypes: _submissionEmailRecipientTypes, hdrukEmail: _hdrukEmail, + datatsetStatuses: _datatsetStatuses, }; diff --git a/src/resources/utilities/dynamicForms/dynamicForm.util.js b/src/resources/utilities/dynamicForms/dynamicForm.util.js index af55e5b4..31510daa 100644 --- a/src/resources/utilities/dynamicForms/dynamicForm.util.js +++ b/src/resources/utilities/dynamicForms/dynamicForm.util.js @@ -53,7 +53,7 @@ let findQuestionPanel = (panelId = '', questionPanels = []) => { return {}; }; -let duplicateQuestionSet = (questionSetId, schema) => { +let duplicateQuestionSet = (questionSetId, schema, uniqueId = randomstring.generate(5)) => { let { questionSets } = schema; // 1. find questionSet let qSet = findQuestionSet(questionSetId, schema); @@ -65,14 +65,14 @@ let duplicateQuestionSet = (questionSetId, schema) => { // 3. duplicate questionSet ensure we take a copy let qSetDuplicate = [...questionSets].find(q => q.questionSetId === question.input.panelId); // 4. modify the questions array questionIds - let qSetModified = modifyQuestionSetIds(qSetDuplicate); + let qSetModified = modifyQuestionSetIds(qSetDuplicate, uniqueId); // 5. return the modified questionSet return qSetModified; } return {}; }; -let duplicateQuestions = (questionSetId, questionIdsToDuplicate, separatorText = '', schema) => { +let duplicateQuestions = (questionSetId, questionIdsToDuplicate, separatorText = '', schema, uniqueId = randomstring.generate(5)) => { // 1. find question set containing questions to duplicate let qSet = findQuestionSet(questionSetId, schema); // 2. map array of questions to duplicate @@ -84,7 +84,7 @@ let duplicateQuestions = (questionSetId, questionIdsToDuplicate, separatorText = } }); // 4. modify question ids with unique values - let modifiedQuestions = modifyQuestionIds(questionSetId, duplicatedQuestions); + let modifiedQuestions = modifyQuestionIds(questionSetId, duplicatedQuestions, uniqueId); // 5. insert separator text before new duplicated questions if(!_.isEmpty(separatorText)) { modifiedQuestions = insertQuestionSeparator(modifiedQuestions, separatorText); @@ -93,9 +93,8 @@ let duplicateQuestions = (questionSetId, questionIdsToDuplicate, separatorText = return modifiedQuestions; }; -let modifyQuestionSetIds = questionSet => { +let modifyQuestionSetIds = (questionSet, uniqueId) => { let { questionSetId, questions } = { ...questionSet }; - let uniqueId = randomstring.generate(5); questionSetId = `${questionSetId}_${uniqueId}`; // 1.loop over each qObj and if questionId update let questionsModified = [...questions].reduce((arr, qValue) => { @@ -133,8 +132,7 @@ let modifyQuestionSetIds = questionSet => { }; }; -let modifyQuestionIds = (questionSetId, questions) => { - let uniqueId = randomstring.generate(5); +let modifyQuestionIds = (questionSetId, questions, uniqueId) => { // 1.loop over each qObj and if questionId update let questionsModified = [...questions].reduce((arr, qValue) => { // 2. ensure we copy the original question deep @@ -271,7 +269,7 @@ let insertQuestions = (questionSetId, targetQuestionId, duplicatedQuestions, sch }); } }); - // 7. return updated schema + // 6. return updated schema return schema; }; diff --git a/src/resources/utilities/emailGenerator.util.js b/src/resources/utilities/emailGenerator.util.js index 289644d0..ad7704d4 100644 --- a/src/resources/utilities/emailGenerator.util.js +++ b/src/resources/utilities/emailGenerator.util.js @@ -1,4 +1,4 @@ -import _ from 'lodash'; +import { isNil, isEmpty, capitalize, groupBy, forEach } from 'lodash'; import moment from 'moment'; import { UserModel } from '../user/user.model'; import helper from '../utilities/helper.util'; @@ -12,13 +12,13 @@ let excludedQuestionSetIds = ['addRepeatableSection', 'removeRepeatableSection'] let autoCompleteLookups = { fullname: ['email'] }; const _getStepReviewers = (reviewers = []) => { - if (!_.isEmpty(reviewers)) return [...reviewers].map(reviewer => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); + if (!isEmpty(reviewers)) return [...reviewers].map(reviewer => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); return ''; }; const _getStepSections = (sections = []) => { - if (!_.isEmpty(sections)) return [...sections].map(section => constants.darPanelMapper[section]).join(', '); + if (!isEmpty(sections)) return [...sections].map(section => constants.darPanelMapper[section]).join(', '); return ''; }; @@ -142,7 +142,7 @@ const _getAllQuestionsFlattened = allQuestions => { // set the parent page and parent section as nested wont have reference to its parent parent = { page, section, questionSetId: qsId, questionSetHeader }; } - let { questionId, question } = questionObj; + let { questionId, question, input } = questionObj; // split up questionId let [qId, uniqueId] = questionId.split('_'); // actual quesitonId @@ -158,6 +158,7 @@ const _getAllQuestionsFlattened = allQuestions => { questionSetId: qsId, page: parent.page, section: parent.section, + input, }, ]; } @@ -181,7 +182,7 @@ const _getAllQuestionsFlattened = allQuestions => { const _formatSectionTitle = value => { let [questionId] = value.split('_'); - return _.capitalize(questionId); + return capitalize(questionId); }; const _buildSubjectTitle = (user, title, submissionType) => { @@ -189,9 +190,9 @@ const _buildSubjectTitle = (user, title, submissionType) => { if (user.toUpperCase() === 'DATACUSTODIAN') { subject = `Someone has submitted an application to access ${title} dataset. Please let the applicant know as soon as there is progress in the review of their submission.`; } else { - if ( submissionType === constants.submissionTypes.INPROGRESS){ + if (submissionType === constants.submissionTypes.INPROGRESS) { subject = `You are in progress with a request access to ${title}. The custodian will be in contact after you submit the application.`; - } else if (submissionType === constants.submissionTypes.INITIAL) { + } else if (submissionType === constants.submissionTypes.INITIAL) { subject = `You have requested access to ${title}. The custodian will be in contact about the application.`; } else { subject = `You have made updates to your Data Access Request for ${title}. The custodian will be in contact about the application.`; @@ -216,9 +217,11 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) let { projectName = 'No project name set', isNationalCoreStudies = false, nationalCoreStudiesProjectId = '' } = aboutApplication; let linkNationalCoreStudies = nationalCoreStudiesProjectId === '' ? '' : `${process.env.homeURL}/project/${nationalCoreStudiesProjectId}`; let heading = - submissionType === constants.submissionTypes.INPROGRESS ? 'Data access request application in progress' : (constants.submissionTypes.INITIAL + submissionType === constants.submissionTypes.INPROGRESS + ? 'Data access request application in progress' + : constants.submissionTypes.INITIAL ? `New data access request application` - : `Existing data access request application with new updates`); + : `Existing data access request application with new updates`; let subject = _buildSubjectTitle(userType, datasetTitles, submissionType); let questionTree = { ...fullQuestions }; let answers = { ...questionAnswers }; @@ -322,12 +325,16 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) }">${sectionTitle} `; // render question - for (let question of questionsArr) { - let answer = answers[question.questionId] || `-`; - table += ` - ${question.question} + const excludedInputTypes = ['buttonInput']; + for (let currentQuestion of questionsArr) { + let { question, questionId, input: { type = '' } = {} } = currentQuestion; + if (!excludedInputTypes.includes(type)) { + let answer = answers[questionId] || `-`; + table += ` + ${question} ${answer} `; + } } } table += ``; @@ -345,13 +352,13 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) */ const _groupByPageSection = allQuestions => { // group by page [Safe People, Safe Project] - let groupedByPage = _.groupBy(allQuestions, item => { + let groupedByPage = groupBy(allQuestions, item => { return item.page; }); // within grouped [Safe People: {Applicant, Applicant1, Something}] - let grouped = _.forEach(groupedByPage, (value, key) => { - groupedByPage[key] = _.groupBy(groupedByPage[key], item => { + let grouped = forEach(groupedByPage, (value, key) => { + groupedByPage[key] = groupBy(groupedByPage[key], item => { return item.questionSetId; }); }); @@ -559,6 +566,131 @@ const _generateDARStatusChangedEmail = options => { return body; }; +const _generateDARClonedEmail = options => { + let { id, projectId, projectName, datasetTitles, dateSubmitted, applicants, firstname, lastname } = options; + dateSubmitted = isNil(dateSubmitted) ? 'Not yet submitted' : moment(dateSubmitted).format('D MMM YYYY'); + + let body = `
+ + + + + + + + + + + + + + +
+ Data access request application has been duplicated +
+ ${firstname} ${lastname} has duplicated the contents of the following application into a new form. +

+ You will have received this message if you were a contributor to the original form, + but you will not have access to the new form unless granted by the creator, + at which point you will receive an additional notification. +

+
+ + + + + + + + + + + + + + + + + + + + + +
Project${ + projectName || 'No project name set' + }
Project ID${ + projectId || id + }
Dataset(s)${datasetTitles}
Applicants${applicants}
Submitted${dateSubmitted}
+
+
`; + return body; +}; + +const _generateDARDeletedEmail = options => { + let { publisher, projectName, datasetTitles, applicants, firstname, lastname, createdAt } = options; + createdAt = moment(createdAt).format('D MMM YYYY'); + + let body = `
+ + + + + + + + + + + + + + +
+ Data Access Request Application Deleted +
+ ${firstname} ${lastname} has deleted a data access request application. +
+ + + + + + + + + + + + + + + + + + + + + +
Project${ + projectName || 'No project name set' + }
Dataset(s)${datasetTitles}
Data custodian${publisher}
Applicants${applicants}
Created${createdAt}
+
+
`; + return body; +}; + const _generateDARReturnedEmail = options => { let { id, projectName, publisher, datasetTitles, dateSubmitted, applicants } = options; let body = `
@@ -1582,6 +1714,8 @@ export default { generateEmail: _generateEmail, generateDARReturnedEmail: _generateDARReturnedEmail, generateDARStatusChangedEmail: _generateDARStatusChangedEmail, + generateDARClonedEmail: _generateDARClonedEmail, + generateDARDeletedEmail: _generateDARDeletedEmail, generateContributorEmail: _generateContributorEmail, generateStepOverrideEmail: _generateStepOverrideEmail, generateNewReviewPhaseEmail: _generateNewReviewPhaseEmail, diff --git a/src/resources/utilities/helper.util.js b/src/resources/utilities/helper.util.js index c687b006..433fb322 100644 --- a/src/resources/utilities/helper.util.js +++ b/src/resources/utilities/helper.util.js @@ -82,6 +82,16 @@ const _getEnvironment = () => { return environment; }; +const _toTitleCase = str => { + return str.replace(/\w\S*/g, function (txt) { + return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase(); + }); +}; + +const _escapeRegexChars = str => { + return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +}; + export default { censorEmail: _censorEmail, arraysEqual: _arraysEqual, @@ -90,4 +100,6 @@ export default { generateAlphaNumericString: _generateAlphaNumericString, hidePrivateProfileDetails: _hidePrivateProfileDetails, getEnvironment: _getEnvironment, + toTitleCase: _toTitleCase, + escapeRegexChars: _escapeRegexChars, }; diff --git a/src/resources/utilities/notificationBuilder.js b/src/resources/utilities/notificationBuilder.js index 6fda4be9..6c6848f6 100644 --- a/src/resources/utilities/notificationBuilder.js +++ b/src/resources/utilities/notificationBuilder.js @@ -1,6 +1,6 @@ import { MessagesModel } from '../message/message.model'; -const triggerNotificationMessage = (messageRecipients, messageDescription, messageType, messageObjectID) => { +const triggerNotificationMessage = (messageRecipients, messageDescription, messageType, messageObjectID, publisherName = '') => { messageRecipients.forEach(async (recipient) => { let messageID = parseInt(Math.random().toString().replace('0.', '')); let message = new MessagesModel({ @@ -11,7 +11,8 @@ const triggerNotificationMessage = (messageRecipients, messageDescription, messa messageID, messageObjectID : (typeof messageObjectID == 'number' ? messageObjectID : messageID), messageTo: recipient, - messageDataRequestID: messageType === 'data access request' ? messageObjectID : null + messageDataRequestID: messageType === 'data access request' ? messageObjectID : null, + publisherName }); await message.save(async (err) => { if (err) { diff --git a/src/resources/workflow/workflow.controller.js b/src/resources/workflow/workflow.controller.js index 2385650a..e3513c6a 100644 --- a/src/resources/workflow/workflow.controller.js +++ b/src/resources/workflow/workflow.controller.js @@ -50,10 +50,7 @@ const getWorkflowById = async (req, res) => { // 4. Build workflow response let { active, _id, id, workflowName, version, steps, applications = [] } = workflow.toObject(); applications = applications.map(app => { - let { aboutApplication, _id } = app; - if (typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(aboutApplication) || {}; - } + let { aboutApplication = {}, _id } = app; let { projectName = 'No project name' } = aboutApplication; return { projectName, _id }; });