diff --git a/package.json b/package.json index 6181a578..ff15fcf1 100644 --- a/package.json +++ b/package.json @@ -87,7 +87,7 @@ }, "scripts": { "start": "node index.js", - "server": "nodemon index.js", + "server": "nodemon --ignore 'src/**/*.json' index.js", "debug": "nodemon --inspect=0.0.0.0:3001 index.js", "build": "", "test": "jest --runInBand", diff --git a/src/controllers/datasetonboarding.controller.js b/src/controllers/datasetonboarding.controller.js index 6a8ce5a7..260ddabf 100644 --- a/src/controllers/datasetonboarding.controller.js +++ b/src/controllers/datasetonboarding.controller.js @@ -12,6 +12,7 @@ import { filtersService } from '../resources/filters/dependency'; import datasetonboardingUtil from '../utils/datasetonboarding.util'; import { PublisherModel } from '../resources/publisher/publisher.model'; import { activityLogService } from '../resources/activitylog/dependency'; +const HttpClient = require('../services/httpClient/httpClient'); const readEnv = process.env.ENV || 'prod'; @@ -217,325 +218,268 @@ export default class DatasetOnboardingController { }; changeDatasetVersionStatus = async (req, res) => { - try { - // 1. Id is the _id object in MongoDb not the generated id or dataset Id - // 2. Get the userId - const id = req.params.id || null; - let { firstname, lastname } = req.user; - let { applicationStatus, applicationStatusDesc = '' } = req.body; - - if (!id) return res.status(404).json({ status: 'error', message: 'Dataset _id could not be found.' }); + const id = req.params.id || null; + if (!id) { + return res.status(404).json({ + status: 'error', + message: 'Dataset _id could not be found.', + }); + } - // 3. Check user type and authentication to submit application - let { authorised, userType } = await datasetonboardingUtil.getUserPermissionsForDataset(id, req.user); - if (!authorised) { - return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); - } + let { firstname, lastname } = req.user; + let { applicationStatus, applicationStatusDesc = '' } = req.body; - if (applicationStatus === 'approved') { - if (userType !== constants.userTypes.ADMIN) { - return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); - } + let { authorised, userType } = await datasetonboardingUtil.getUserPermissionsForDataset(id, req.user); + if (!authorised) { + return res.status(401).json({ + status: 'failure', + message: 'Unauthorised', + }); + } - let dataset = await Data.findOne({ _id: id }); + try { + let metadataCatalogueLink = process.env.MDC_Config_HDRUK_metadataUrl || 'https://modelcatalogue.cs.ox.ac.uk/hdruk-preprod'; + const loginDetails = { + username: process.env.MDC_Config_HDRUK_username || '', + password: process.env.MDC_Config_HDRUK_password || '', + }; + let updatedDataset = null; + let dataset = null; + let activityLogStatus = null; + + const _httpClient = new HttpClient(); + switch (applicationStatus) { + case 'approved': + if (userType !== constants.userTypes.ADMIN) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } - if (!dataset) return res.status(404).json({ status: 'error', message: 'Dataset could not be found.' }); + dataset = await Data.findOne({ _id: id }); - dataset.questionAnswers = JSON.parse(dataset.questionAnswers); - const publisherData = await PublisherModel.find({ _id: dataset.datasetv2.summary.publisher.identifier }).lean(); + if (!dataset) return res.status(404).json({ status: 'error', message: 'Dataset could not be found.' }); - //1. create new version on MDC with version number and take datasetid and store - let metadataCatalogueLink = process.env.MDC_Config_HDRUK_metadataUrl || 'https://modelcatalogue.cs.ox.ac.uk/hdruk-preprod'; - const loginDetails = { - username: process.env.MDC_Config_HDRUK_username || '', - password: process.env.MDC_Config_HDRUK_password || '', - }; + dataset.questionAnswers = JSON.parse(dataset.questionAnswers); + const publisherData = await PublisherModel.find({ _id: dataset.datasetv2.summary.publisher.identifier }).lean(); - await axios - .post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); + const responseLogin = await _httpClient.post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { withCredentials: true, timeout: 5000, - }) - .then(async session => { - axios.defaults.headers.Cookie = session.headers['set-cookie'][0]; // get cookie from request - - let jsonData = JSON.stringify(await datasetonboardingUtil.buildJSONFile(dataset)); - fs.writeFileSync(__dirname + `/datasetfiles/${dataset._id}.json`, jsonData); - - var data = new FormData(); - data.append('folderId', publisherData[0].mdcFolderId); - data.append('importFile', fs.createReadStream(__dirname + `/datasetfiles/${dataset._id}.json`)); - data.append('finalised', 'false'); - data.append('importAsNewDocumentationVersion', 'true'); - - await axios - .post( - metadataCatalogueLink + '/api/dataModels/import/ox.softeng.metadatacatalogue.core.spi.json/JsonImporterService/1.1', - data, - { - withCredentials: true, - timeout: 60000, - headers: { - ...data.getHeaders(), - }, - } - ) - .then(async newDatasetVersion => { - let newDatasetVersionId = newDatasetVersion.data.items[0].id; - fs.unlinkSync(__dirname + `/datasetfiles/${dataset._id}.json`); - - const updatedDatasetDetails = { - documentationVersion: dataset.datasetVersion, - }; - - await axios - .put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}`, updatedDatasetDetails, { - withCredentials: true, - timeout: 20000, - }) - .catch(err => { - console.error('Error when trying to update the version number on the MDC - ' + err.message); - }); - - await axios - .put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}/finalise`, { - withCredentials: true, - timeout: 20000, - }) - .catch(err => { - console.error('Error when trying to finalise the dataset on the MDC - ' + err.message); - }); - - // Adding to DB - let datasetv2Object = await datasetonboardingUtil.buildv2Object(dataset, newDatasetVersionId); - - let previousDataset = await Data.findOneAndUpdate({ pid: dataset.pid, activeflag: 'active' }, { activeflag: 'archive' }); - let previousCounter = 0; - let previousDiscourseTopicId = 0; - if (previousDataset) previousCounter = previousDataset.counter || 0; - if (previousDataset) previousDiscourseTopicId = previousDataset.discourseTopicId || 0; - - //get technicaldetails and metadataQuality - let technicalDetails = await datasetonboardingUtil.buildTechnicalDetails(dataset.structuralMetadata); - let metadataQuality = await datasetonboardingUtil.buildMetadataQuality(dataset, datasetv2Object, dataset.pid); - - // call filterCommercialUsage to determine commericalUse field only pass in v2 a - let commercialUse = filtersService.computeCommericalUse({}, datasetv2Object); - - let updatedDataset = await Data.findOneAndUpdate( - { _id: id }, - { - datasetid: newDatasetVersionId, - datasetVersion: dataset.datasetVersion, - name: dataset.questionAnswers['properties/summary/title'] || '', - description: dataset.questionAnswers['properties/documentation/abstract'] || '', - activeflag: 'active', - tags: { - features: dataset.questionAnswers['properties/summary/keywords'] || [], - }, - commercialUse, - hasTechnicalDetails: !isEmpty(technicalDetails) ? true : false, - 'timestamps.updated': Date.now(), - 'timestamps.published': Date.now(), - counter: previousCounter, - datasetfields: { - publisher: `${publisherData[0].publisherDetails.memberOf} > ${publisherData[0].publisherDetails.name}`, - geographicCoverage: dataset.questionAnswers['properties/coverage/spatial'] || [], - physicalSampleAvailability: dataset.questionAnswers['properties/coverage/physicalSampleAvailability'] || [], - abstract: dataset.questionAnswers['properties/summary/abstract'] || '', - releaseDate: dataset.questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', - accessRequestDuration: dataset.questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', - //conformsTo: dataset.questionAnswers['properties/accessibility/formatAndStandards/conformsTo'] || '', - //accessRights: dataset.questionAnswers['properties/accessibility/access/accessRights'] || '', - //jurisdiction: dataset.questionAnswers['properties/accessibility/access/jurisdiction'] || '', - datasetStartDate: dataset.questionAnswers['properties/provenance/temporal/startDate'] || '', - datasetEndDate: dataset.questionAnswers['properties/provenance/temporal/endDate'] || '', - //statisticalPopulation: datasetMDC.statisticalPopulation, - ageBand: dataset.questionAnswers['properties/coverage/typicalAgeRange'] || '', - contactPoint: dataset.questionAnswers['properties/summary/contactPoint'] || '', - periodicity: dataset.questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', - - metadataquality: metadataQuality, - //datautility: dataUtility ? dataUtility : {}, - //metadataschema: metadataSchema && metadataSchema.data ? metadataSchema.data : {}, - technicaldetails: technicalDetails, - //versionLinks: versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : [], - phenotypes: [], - }, - datasetv2: datasetv2Object, - applicationStatusDesc: applicationStatusDesc, - discourseTopicId: previousDiscourseTopicId, - }, - { new: true } - ); - - filtersService.optimiseFilters('dataset'); - - let datasetv2DifferenceObject = datasetonboardingUtil.datasetv2ObjectComparison(datasetv2Object, dataset.datasetv2); - - if (!_.isEmpty(datasetv2DifferenceObject)) { - await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_UPDATES_SUBMITTED, { - type: constants.activityLogTypes.DATASET, - updatedDataset, - user: req.user, - differences: datasetv2DifferenceObject, - }); - } - - //emails / notifications - await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETAPPROVED, updatedDataset); - - await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_APPROVED, { - type: constants.activityLogTypes.DATASET, - updatedDataset, - user: req.user, - }); - }) - .catch(err => { - console.error('Error when trying to create new dataset on the MDC - ' + err.message); - }); - }) - .catch(err => { - console.error('Error when trying to login to MDC - ' + err.message); }); + const [cookie] = responseLogin.headers['set-cookie']; + _httpClient.setHttpClientCookies(cookie); + + let jsonData = JSON.stringify(await datasetonboardingUtil.buildJSONFile(dataset)); + fs.writeFileSync(__dirname + `/datasetfiles/${dataset._id}.json`, jsonData); + + var data = new FormData(); + data.append('folderId', publisherData[0].mdcFolderId); + data.append('importFile', fs.createReadStream(__dirname + `/datasetfiles/${dataset._id}.json`)); + data.append('finalised', 'false'); + data.append('importAsNewDocumentationVersion', 'true'); + + const responseImport = await _httpClient.post( + metadataCatalogueLink + '/api/dataModels/import/ox.softeng.metadatacatalogue.core.spi.json/JsonImporterService/1.1', + data, + { + withCredentials: true, + timeout: 60000, + headers: { + ...data.getHeaders(), + }, + } + ); - await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { - console.error('Error when trying to logout of the MDC - ' + err.message); - }); + let newDatasetVersionId = responseImport.data.items[0].id; + fs.unlinkSync(__dirname + `/datasetfiles/${dataset._id}.json`); - return res.status(200).json({ status: 'success' }); - } else if (applicationStatus === 'rejected') { - if (userType !== constants.userTypes.ADMIN) { - return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); - } + const updatedDatasetDetails = { + documentationVersion: dataset.datasetVersion, + }; - let updatedDataset = await Data.findOneAndUpdate( - { _id: id }, - { - activeflag: constants.datasetStatuses.REJECTED, - applicationStatusDesc: applicationStatusDesc, - applicationStatusAuthor: `${firstname} ${lastname}`, - 'timestamps.rejected': Date.now(), - 'timestamps.updated': Date.now(), - }, - { new: true } - ); + await _httpClient.put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}`, updatedDatasetDetails, { + withCredentials: true, + timeout: 20000, + }); + await _httpClient.put(metadataCatalogueLink + `/api/dataModels/${newDatasetVersionId}/finalise`, null, { + withCredentials: true, + timeout: 20000, + }); - //emails / notifications - await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETREJECTED, updatedDataset); + // Adding to DB + let datasetv2Object = await datasetonboardingUtil.buildv2Object(dataset, newDatasetVersionId); + + let previousDataset = await Data.findOneAndUpdate({ pid: dataset.pid, activeflag: 'active' }, { activeflag: 'archive' }); + let previousCounter = 0; + let previousDiscourseTopicId = 0; + if (previousDataset) previousCounter = previousDataset.counter || 0; + if (previousDataset) previousDiscourseTopicId = previousDataset.discourseTopicId || 0; + + //get technicaldetails and metadataQuality + let technicalDetails = await datasetonboardingUtil.buildTechnicalDetails(dataset.structuralMetadata); + let metadataQuality = await datasetonboardingUtil.buildMetadataQuality(dataset, datasetv2Object, dataset.pid); + + // call filterCommercialUsage to determine commericalUse field only pass in v2 a + let commercialUse = filtersService.computeCommericalUse({}, datasetv2Object); + + updatedDataset = await Data.findOneAndUpdate( + { _id: id }, + { + datasetid: newDatasetVersionId, + datasetVersion: dataset.datasetVersion, + name: dataset.questionAnswers['properties/summary/title'] || '', + description: dataset.questionAnswers['properties/documentation/abstract'] || '', + activeflag: 'active', + tags: { + features: dataset.questionAnswers['properties/summary/keywords'] || [], + }, + commercialUse, + hasTechnicalDetails: !isEmpty(technicalDetails) ? true : false, + 'timestamps.updated': Date.now(), + 'timestamps.published': Date.now(), + counter: previousCounter, + datasetfields: { + publisher: `${publisherData[0].publisherDetails.memberOf} > ${publisherData[0].publisherDetails.name}`, + geographicCoverage: dataset.questionAnswers['properties/coverage/spatial'] || [], + physicalSampleAvailability: dataset.questionAnswers['properties/coverage/physicalSampleAvailability'] || [], + abstract: dataset.questionAnswers['properties/summary/abstract'] || '', + releaseDate: dataset.questionAnswers['properties/provenance/temporal/distributionReleaseDate'] || '', + accessRequestDuration: dataset.questionAnswers['properties/accessibility/access/deliveryLeadTime'] || '', + datasetStartDate: dataset.questionAnswers['properties/provenance/temporal/startDate'] || '', + datasetEndDate: dataset.questionAnswers['properties/provenance/temporal/endDate'] || '', + ageBand: dataset.questionAnswers['properties/coverage/typicalAgeRange'] || '', + contactPoint: dataset.questionAnswers['properties/summary/contactPoint'] || '', + periodicity: dataset.questionAnswers['properties/provenance/temporal/accrualPeriodicity'] || '', + metadataquality: metadataQuality, + technicaldetails: technicalDetails, + phenotypes: [], + }, + datasetv2: datasetv2Object, + applicationStatusDesc: applicationStatusDesc, + discourseTopicId: previousDiscourseTopicId, + }, + { new: true } + ); + + filtersService.optimiseFilters('dataset'); + + let datasetv2DifferenceObject = datasetonboardingUtil.datasetv2ObjectComparison(datasetv2Object, dataset.datasetv2); + + if (!_.isEmpty(datasetv2DifferenceObject)) { + await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_UPDATES_SUBMITTED, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + differences: datasetv2DifferenceObject, + }); + } - await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_REJECTED, { - type: constants.activityLogTypes.DATASET, - updatedDataset, - user: req.user, - }); + //emails / notifications + await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETAPPROVED, updatedDataset); - return res.status(200).json({ status: 'success' }); - } else if (applicationStatus === 'archive') { - let dataset = await Data.findOne({ _id: id }).lean(); + activityLogStatus = constants.activityLogEvents.dataset.DATASET_VERSION_APPROVED; - if (dataset.timestamps.submitted) { - //soft delete from MDC - let metadataCatalogueLink = process.env.MDC_Config_HDRUK_metadataUrl || 'https://modelcatalogue.cs.ox.ac.uk/hdruk-preprod'; + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); - await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { - console.error('Error when trying to logout of the MDC - ' + err.message); - }); - const loginDetails = { - username: process.env.MDC_Config_HDRUK_username || '', - password: process.env.MDC_Config_HDRUK_password || '', - }; + break; + case 'rejected': + if (userType !== constants.userTypes.ADMIN) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } - await axios - .post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { + updatedDataset = await Data.findOneAndUpdate( + { _id: id }, + { + activeflag: constants.datasetStatuses.REJECTED, + applicationStatusDesc: applicationStatusDesc, + applicationStatusAuthor: `${firstname} ${lastname}`, + 'timestamps.rejected': Date.now(), + 'timestamps.updated': Date.now(), + }, + { new: true } + ); + + //emails / notifications + await datasetonboardingUtil.createNotifications(constants.notificationTypes.DATASETREJECTED, updatedDataset); + + activityLogStatus = constants.activityLogEvents.dataset.DATASET_VERSION_REJECTED; + + break; + case 'archive': + dataset = await Data.findOne({ _id: id }).lean(); + + if (dataset.timestamps.submitted) { + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); + + const responseLogin = await _httpClient.post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { withCredentials: true, timeout: 5000, - }) - .then(async session => { - axios.defaults.headers.Cookie = session.headers['set-cookie'][0]; // get cookie from request - - await axios - .delete(metadataCatalogueLink + `/api/dataModels/${dataset.datasetid}`, { withCredentials: true, timeout: 5000 }) - .catch(err => { - console.error('Error when trying to delete(archive) a dataset - ' + err.message); - }); - }) - .catch(err => { - console.error('Error when trying to login to MDC - ' + err.message); }); + const [cookie] = responseLogin.headers['set-cookie']; + _httpClient.setHttpClientCookies(cookie); - await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { - console.error('Error when trying to logout of the MDC - ' + err.message); - }); - } - let updatedDataset = await Data.findOneAndUpdate( - { _id: id }, - { activeflag: constants.datasetStatuses.ARCHIVE, 'timestamps.updated': Date.now(), 'timestamps.archived': Date.now() } - ); - - await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_ARCHIVED, { - type: constants.activityLogTypes.DATASET, - updatedDataset, - user: req.user, - }); - - return res.status(200).json({ status: 'success' }); - } else if (applicationStatus === 'unarchive') { - let dataset = await Data.findOne({ _id: id }).lean(); - let flagIs = 'draft'; - if (dataset.timestamps.submitted) { - let metadataCatalogueLink = process.env.MDC_Config_HDRUK_metadataUrl || 'https://modelcatalogue.cs.ox.ac.uk/hdruk-preprod'; - - await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { - console.error('Error when trying to logout of the MDC - ' + err.message); - }); - const loginDetails = { - username: process.env.MDC_Config_HDRUK_username || '', - password: process.env.MDC_Config_HDRUK_password || '', - }; + await _httpClient.delete(metadataCatalogueLink + `/api/dataModels/${dataset.datasetid}`, loginDetails, { + withCredentials: true, + timeout: 5000, + }); + + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); + } + updatedDataset = await Data.findOneAndUpdate( + { _id: id }, + { activeflag: constants.datasetStatuses.ARCHIVE, 'timestamps.updated': Date.now(), 'timestamps.archived': Date.now() } + ); + + activityLogStatus = constants.activityLogEvents.dataset.DATASET_VERSION_ARCHIVED; + + break; + case 'unarchive': + dataset = await Data.findOne({ _id: id }).lean(); + let flagIs = 'draft'; + if (dataset.timestamps.submitted) { + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); - await axios - .post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { + const responseLogin = await _httpClient.post(metadataCatalogueLink + '/api/authentication/login', loginDetails, { withCredentials: true, timeout: 5000, - }) - .then(async session => { - axios.defaults.headers.Cookie = session.headers['set-cookie'][0]; // get cookie from request - - const updatedDatasetDetails = { - deleted: 'false', - }; - await axios - .put(metadataCatalogueLink + `/api/dataModels/${dataset.datasetid}`, updatedDatasetDetails, { - withCredentials: true, - timeout: 5000, - }) - .catch(err => { - console.error('Error when trying to update the version number on the MDC - ' + err.message); - }); - }) - .catch(err => { - console.error('Error when trying to login to MDC - ' + err.message); }); + const [cookie] = responseLogin.headers['set-cookie']; + _httpClient.setHttpClientCookies(cookie); - await axios.post(metadataCatalogueLink + `/api/authentication/logout`, { withCredentials: true, timeout: 5000 }).catch(err => { - console.error('Error when trying to logout of the MDC - ' + err.message); - }); + const updatedDatasetDetails = { + deleted: 'false', + }; - flagIs = 'active'; - } - const updatedDataset = await Data.findOneAndUpdate({ _id: id }, { activeflag: flagIs }); //active or draft + await _httpClient.put( + metadataCatalogueLink + metadataCatalogueLink + `/api/dataModels/${dataset.datasetid}`, + updatedDatasetDetails, + { withCredentials: true, timeout: 5000 } + ); + + await _httpClient.post(metadataCatalogueLink + `/api/authentication/logout`, null, { withCredentials: true, timeout: 5000 }); - await activityLogService.logActivity(constants.activityLogEvents.dataset.DATASET_VERSION_UNARCHIVED, { - type: constants.activityLogTypes.DATASET, - updatedDataset, - user: req.user, - }); + flagIs = 'active'; + } + updatedDataset = await Data.findOneAndUpdate({ _id: id }, { activeflag: flagIs }); //active or draft + + activityLogStatus = constants.activityLogEvents.dataset.DATASET_VERSION_UNARCHIVED; - return res.status(200).json({ status: 'success' }); + break; + default: + res.status(500).json({ + status: 'error', + message: 'An error occurred - application status is not set correctly', + }); } + + await activityLogService.logActivity(activityLogStatus, { + type: constants.activityLogTypes.DATASET, + updatedDataset, + user: req.user, + }); + + return res.status(200).json({ status: 'success' }); } catch (err) { - process.stdout.write(`${err.message}\n`); res.status(500).json({ status: 'error', message: 'An error occurred updating the dataset status', diff --git a/src/middlewares/__tests__/dataUseRegister.middleware.test.js b/src/middlewares/__tests__/dataUseRegister.middleware.test.js new file mode 100644 index 00000000..4c60927b --- /dev/null +++ b/src/middlewares/__tests__/dataUseRegister.middleware.test.js @@ -0,0 +1,282 @@ +import sinon from 'sinon'; + +import { dataUseRegisterService } from '../../resources/dataUseRegister/dependency'; +import { validateUpdateRequest, validateUploadRequest, authorizeUpdate, authorizeUpload } from '../dataUseRegister.middleware'; + +afterEach(function () { + sinon.restore(); +}); + +describe('Testing the dataUserRegister middleware', () => { + const mockedRequest = () => { + const req = {}; + req.params = jest.fn().mockReturnValue(req); + req.body = jest.fn().mockReturnValue(req); + return req; + }; + + const mockedResponse = () => { + const res = {}; + res.status = jest.fn().mockReturnValue(res); + res.json = jest.fn().mockReturnValue(res); + return res; + }; + + describe('Testing the validateUpdateRequest middleware', () => { + it('it should invoke next() if a valid ID is passed in the request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.params.id = 'mockID'; + + validateUpdateRequest(req, res, nextFunction); + + expect(nextFunction.mock.calls.length).toBe(1); + }); + + it('it should return the appropriate 400 error if no ID is given in the request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + validateUpdateRequest(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You must provide a data user register identifier', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(400); + }); + }); + + describe('Testing the validateUploadRequest middleware', () => { + it('it should invoke next() if a valid teamID and dataUses array are supplied in the request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body.teamId = 'testID'; + req.body.dataUses = ['dataUse']; + + validateUploadRequest(req, res, nextFunction); + + expect(nextFunction.mock.calls.length).toBe(1); + }); + + it('it should give an appropriate error if no teamID is given in the request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body.dataUses = ['dataUse']; + + validateUploadRequest(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You must provide the custodian team identifier to associate the data uses to', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(400); + }); + + it('it should give an appropriate error if no dataUses are given in the request', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body.teamId = 'testID'; + + validateUploadRequest(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You must provide data uses to upload', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(400); + }); + }); + + describe('Testing the authorizeUpdate middleware', () => { + it('it should return a 404 if no data use can be found for a given ID', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + sinon.stub(dataUseRegisterService, 'getDataUseRegister'); + + await authorizeUpdate(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'The requested data use register entry could not be found', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(404); + }); + + it('it should return a 401 if user not authorised to update DUR', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.user = { + _id: 'testUser', + teams: [ + { + publisher: { _id: { equals: jest.fn() } }, + type: 'NOT_ADMIN_TEAM', + members: [{ memberid: 'testUser', roles: 'admin_data_use' }], + }, + ], + }; + + sinon + .stub(dataUseRegisterService, 'getDataUseRegister') + .returns({ publisher: 'testPublisher', gatewayApplicants: ['anotherTestUser'] }); + + await authorizeUpdate(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You are not authorised to perform this action', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(401); + }); + + it('it should return a 401 if the projectID text is mismatched', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body = { + projectIdText: 'notAMatch', + }; + + req.user = { + _id: 'testUser', + teams: [{ publisher: { _id: 'testPublisher' }, type: 'admin', members: [{ memberid: 'testUser', roles: 'admin_data_use' }] }], + }; + + sinon.stub(dataUseRegisterService, 'getDataUseRegister').returns({ projectIdText: 'testIdText', gatewayApplicants: ['testUser'] }); + + await authorizeUpdate(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You are not authorised to update the project ID of an automatic data use register', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(401); + }); + + it('it should return a 401 if the datasetTitles is mismatched', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body = { + datasetTitles: 'notAMatch', + }; + + req.user = { + _id: 'testUser', + teams: [{ publisher: { _id: 'testPublisher' }, type: 'admin', members: [{ memberid: 'testUser', roles: 'admin_data_use' }] }], + }; + + sinon.stub(dataUseRegisterService, 'getDataUseRegister').returns({ datasetTitles: 'datasetTitles', gatewayApplicants: ['testUser'] }); + + await authorizeUpdate(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You are not authorised to update the datasets of an automatic data use register', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(401); + }); + + it('it should invoke next if all conditions are satisfied', async () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.body = { + datasetTitles: 'match', + projectIdText: 'match', + }; + + req.user = { + _id: 'testUser', + teams: [{ publisher: { _id: 'testPublisher' }, type: 'admin', members: [{ memberid: 'testUser', roles: 'admin_data_use' }] }], + }; + + sinon + .stub(dataUseRegisterService, 'getDataUseRegister') + .returns({ datasetTitles: 'match', projectIdText: 'match', gatewayApplicants: ['testUser'] }); + + await authorizeUpdate(req, res, nextFunction); + + expect(nextFunction.mock.calls.length).toBe(1); + }); + }); + + describe('Testing the authorizeUpload middleware', () => { + it('It should return 401 if user is not authorised', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.user = { + _id: 'testUser', + teams: [ + { publisher: { _id: { equals: jest.fn() } }, type: 'NotAdmin', members: [{ memberid: 'testUser', roles: 'admin_data_use' }] }, + ], + }; + + authorizeUpload(req, res, nextFunction); + + const expectedResponse = { + success: false, + message: 'You are not authorised to perform this action', + }; + + expect(nextFunction.mock.calls.length).toBe(0); + expect(res.json).toHaveBeenCalledWith(expectedResponse); + expect(res.status).toHaveBeenCalledWith(401); + }); + + it('It should invoke next() if user is authorised', () => { + let req = mockedRequest(); + let res = mockedResponse(); + const nextFunction = jest.fn(); + + req.user = { + _id: 'testUser', + teams: [{ publisher: { _id: 'testPublisher' }, type: 'admin', members: [{ memberid: 'testUser', roles: 'admin_data_use' }] }], + }; + + authorizeUpload(req, res, nextFunction); + + expect(nextFunction.mock.calls.length).toBe(1); + }); + }); +}); diff --git a/src/middlewares/__tests__/datasetonboarding.middleware.test.js b/src/middlewares/__tests__/datasetonboarding.middleware.test.js index 2d073425..bbea2d0f 100644 --- a/src/middlewares/__tests__/datasetonboarding.middleware.test.js +++ b/src/middlewares/__tests__/datasetonboarding.middleware.test.js @@ -1,7 +1,6 @@ import { authoriseUserForPublisher, validateSearchParameters } from '../datasetonboarding.middleware'; import { UserModel } from '../../resources/user/user.model'; import constants from '../../resources/utilities/constants.util'; -import { testing } from 'googleapis/build/src/apis/testing'; describe('Testing the datasetonboarding middleware', () => { const mockedRequest = () => { diff --git a/src/middlewares/dataUseRegister.middleware.js b/src/middlewares/dataUseRegister.middleware.js new file mode 100644 index 00000000..5bc90f36 --- /dev/null +++ b/src/middlewares/dataUseRegister.middleware.js @@ -0,0 +1,122 @@ +import { isEmpty, isNull, isEqual } from 'lodash'; + +import constants from '../resources/utilities/constants.util'; +import { dataUseRegisterService } from '../resources/dataUseRegister/dependency'; + +const _isUserMemberOfTeam = (user, teamId) => { + let { teams } = user; + return teams.filter(team => !isNull(team.publisher)).some(team => team.publisher._id.equals(teamId)); +}; + +const _isUserDataUseAdmin = user => { + let { teams } = user; + + if (teams) { + teams = teams.map(team => { + let { publisher, type, members } = team; + let member = members.find(member => { + return member.memberid.toString() === user._id.toString(); + }); + let { roles } = member; + return { ...publisher, type, roles }; + }); + } + + return teams + .filter(team => team.type === constants.teamTypes.ADMIN) + .some(team => team.roles.includes(constants.roleTypes.ADMIN_DATA_USE)); +}; + +const validateUpdateRequest = (req, res, next) => { + const { id } = req.params; + + if (!id) { + return res.status(400).json({ + success: false, + message: 'You must provide a data user register identifier', + }); + } + + next(); +}; + +const validateUploadRequest = (req, res, next) => { + const { teamId, dataUses } = req.body; + let errors = []; + + if (!teamId) { + errors.push('You must provide the custodian team identifier to associate the data uses to'); + } + + if (!dataUses || isEmpty(dataUses)) { + errors.push('You must provide data uses to upload'); + } + + if (!isEmpty(errors)) { + return res.status(400).json({ + success: false, + message: errors.join(', '), + }); + } + + next(); +}; + +const authorizeUpdate = async (req, res, next) => { + const requestingUser = req.user; + const { id } = req.params; + const { projectIdText, datasetTitles } = req.body; + + const dataUseRegister = await dataUseRegisterService.getDataUseRegister(id); + + if (!dataUseRegister) { + return res.status(404).json({ + success: false, + message: 'The requested data use register entry could not be found', + }); + } + + const { publisher } = dataUseRegister; + const isAuthor = dataUseRegister.gatewayApplicants.includes(requestingUser._id); + const authorised = _isUserDataUseAdmin(requestingUser) || _isUserMemberOfTeam(requestingUser, publisher._id) || isAuthor; + if (!authorised) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + if (!dataUseRegister.manualUpload) { + if (projectIdText && !isEqual(projectIdText, dataUseRegister.projectIdText)) + return res.status(401).json({ + success: false, + message: 'You are not authorised to update the project ID of an automatic data use register', + }); + + if (datasetTitles && !isEqual(datasetTitles, dataUseRegister.datasetTitles)) + return res.status(401).json({ + success: false, + message: 'You are not authorised to update the datasets of an automatic data use register', + }); + } + + next(); +}; + +const authorizeUpload = async (req, res, next) => { + const requestingUser = req.user; + const { teamId } = req.body; + + const authorised = _isUserDataUseAdmin(requestingUser) || _isUserMemberOfTeam(requestingUser, teamId); + + if (!authorised) { + return res.status(401).json({ + success: false, + message: 'You are not authorised to perform this action', + }); + } + + next(); +}; + +export { validateUpdateRequest, validateUploadRequest, authorizeUpdate, authorizeUpload }; diff --git a/src/middlewares/index.js b/src/middlewares/index.js index a49ce158..74cdab05 100644 --- a/src/middlewares/index.js +++ b/src/middlewares/index.js @@ -11,6 +11,7 @@ import { import checkInputMiddleware from './checkInputMiddleware'; import checkMinLengthMiddleware from './checkMinLengthMiddleware'; import checkStringMiddleware from './checkStringMiddleware'; +import { validateUpdateRequest, validateUploadRequest, authorizeUpdate, authorizeUpload } from './dataUseRegister.middleware'; export { checkIDMiddleware, @@ -21,8 +22,12 @@ export { validateDeleteRequest, authoriseDelete, checkInputMiddleware, - checkMinLengthMiddleware, - checkStringMiddleware, + checkMinLengthMiddleware, + checkStringMiddleware, authoriseUserForPublisher, validateSearchParameters, + validateUpdateRequest, + validateUploadRequest, + authorizeUpdate, + authorizeUpload, }; diff --git a/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js b/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js index 1d65fb54..d624535d 100644 --- a/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js +++ b/src/resources/dataUseRegister/__mocks__/dataUseRegisters.js @@ -444,3 +444,98 @@ export const editedDataUseObject = { researchOutputs: 'research Outputs', relatedObjects: [], }; + +export const dataUseRegistersStub = [ + { + _id: 'test_id', + counter: 1, + keywords: ['Blood', 'Test'], + datasetTitles: ['HDR UK Papers & Preprints'], + gatewayDatasets: ['1ccb9da3-4b5e-4b4b-a142-af739037983c'], + nonGatewayDatasets: [], + gatewayApplicants: [], + nonGatewayApplicants: ['John Smith'], + fundersAndSponsors: [], + otherApprovalCommittees: [], + gatewayOutputsTools: [514564326145692], + gatewayOutputsPapers: [7485531672584456], + nonGatewayOutputs: ['www.yahoo.com'], + publisher: 'testPub', + projectIdText: '619B-73C4-BCB0-7C22-D168-4DAC', + projectId: 'testId', + applicantId: '', + accreditedResearcherStatus: 'Unknown', + projectTitle: 'This is the title for Pauls Project', + organisationName: 'PA Consulting', + laySummary: 'This is the lay summary for Pauls Project', + publicBenefitStatement: 'This is how my project benefit the public and what is the anticipated impact', + dutyOfConfidentiality: 'Informed consent', + datasetLinkageDescription: '', + requestFrequency: 'One-off', + legalBasisForDataArticle6: + '(e) processing is necessary for the performance of a task carried out in the public interest or in the exercise of official authority vested in the controller;', + legalBasisForDataArticle9: + '(c) processing is necessary to protect the vital interests of the data subject or of another natural person where the data subject is physically or legally incapable of giving consent;', + privacyEnhancements: 'These are the steps', + latestApprovalDate: '2021-11-22T11:04:03.288Z', + relatedObjects: [], + activeflag: 'active', + user: 'userId', + userName: 'Paul McCafferty', + updatedon: '2021-12-17T10:54:39.812Z', + lastActivity: '2021-12-17T10:54:39.812Z', + manualUpload: false, + id: 17931311209279556, + type: 'dataUseRegister', + createdAt: '2021-11-22T11:04:03.507Z', + updatedAt: '2021-12-17T10:54:39.815Z', + __v: 0, + accessType: 'Release', + }, + { + _id: 'test_id2', + counter: 1, + keywords: ['Blood', 'Test'], + datasetTitles: ['HDR UK Papers & Preprints'], + gatewayDatasets: ['1ccb9da3-4b5e-4b4b-a142-af739037983c'], + nonGatewayDatasets: [], + gatewayApplicants: [], + nonGatewayApplicants: ['John Smith'], + fundersAndSponsors: [], + otherApprovalCommittees: [], + gatewayOutputsTools: [514564326145692], + gatewayOutputsPapers: [7485531672584456], + nonGatewayOutputs: ['www.yahoo.com'], + publisher: 'testPub', + projectIdText: '619B-73C4-BCB0-7C22-D168-4DAC', + projectId: 'testId2', + applicantId: '', + accreditedResearcherStatus: 'Unknown', + projectTitle: 'This is the title for Pauls Second Project', + organisationName: 'PA Consulting', + laySummary: 'This is the lay summary for Pauls Second Project', + publicBenefitStatement: 'This is how my project benefit the public and what is the anticipated impact', + dutyOfConfidentiality: 'Informed consent', + datasetLinkageDescription: '', + requestFrequency: 'One-off', + legalBasisForDataArticle6: + '(e) processing is necessary for the performance of a task carried out in the public interest or in the exercise of official authority vested in the controller;', + legalBasisForDataArticle9: + '(c) processing is necessary to protect the vital interests of the data subject or of another natural person where the data subject is physically or legally incapable of giving consent;', + privacyEnhancements: 'These are the steps', + latestApprovalDate: '2021-11-22T11:04:03.288Z', + relatedObjects: [], + activeflag: 'active', + user: 'userId', + userName: 'Paul McCafferty', + updatedon: '2021-12-17T10:54:39.812Z', + lastActivity: '2021-12-17T10:54:39.812Z', + manualUpload: false, + id: 17931311209279556, + type: 'dataUseRegister', + createdAt: '2021-11-22T11:04:03.507Z', + updatedAt: '2021-12-17T10:54:39.815Z', + __v: 0, + accessType: 'Release', + }, +]; diff --git a/src/resources/dataUseRegister/__tests__/dataUseRegister.controller.test.js b/src/resources/dataUseRegister/__tests__/dataUseRegister.controller.test.js new file mode 100644 index 00000000..0302cb17 --- /dev/null +++ b/src/resources/dataUseRegister/__tests__/dataUseRegister.controller.test.js @@ -0,0 +1,65 @@ +import sinon from 'sinon'; +import mongoose from 'mongoose'; + +import dbHandler from '../../../config/in-memory-db'; +import { dataUseRegistersStub } from '../__mocks__/dataUseRegisters'; +import DataUseRegisterController from '../dataUseRegister.controller'; + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ datauseregisters: dataUseRegistersStub }); + await mongoose.connection + .collection('datauseregisters') + .createIndex({ datasetTitles: 'text', fundersAndSponsors: 'text', keywords: 'text', laySummary: 'text', projectTitle: 'text' }); +}); + +afterEach(() => { + sinon.restore(); +}); + +afterAll(async () => { + await dbHandler.closeDatabase(); +}); + +describe('CLASS: dataUseRegisterController', () => { + const dataUseRegisterController = new DataUseRegisterController(); + + const mockedRequest = () => { + const req = { + query: {}, + params: {}, + }; + return req; + }; + + const mockedResponse = () => { + const res = {}; + res.status = jest.fn().mockReturnValue(res); + res.json = jest.fn().mockReturnValue(res); + return res; + }; + + describe('METHOD: searchDataUseRegisters', () => { + it('TEST: it should return a 200 response and 2 DURs if no search string is given', async () => { + const req = mockedRequest(); + const res = mockedResponse(); + + await dataUseRegisterController.searchDataUseRegisters(req, res); + + expect(res.json.mock.calls[0][0].result.length).toBe(2); + expect(res.status).toHaveBeenCalledWith(200); + }); + + it('TEST: it should filter the results appropriately based on a free text search term', async () => { + const req = mockedRequest(); + const res = mockedResponse(); + + req.query.search = 'second'; + + await dataUseRegisterController.searchDataUseRegisters(req, res); + + expect(res.json.mock.calls[0][0].result.length).toBe(1); + expect(res.status).toHaveBeenCalledWith(200); + }); + }); +}); diff --git a/src/resources/dataUseRegister/dataUseRegister.controller.js b/src/resources/dataUseRegister/dataUseRegister.controller.js index 28ac7bc4..f2074714 100644 --- a/src/resources/dataUseRegister/dataUseRegister.controller.js +++ b/src/resources/dataUseRegister/dataUseRegister.controller.js @@ -267,8 +267,6 @@ export default class DataUseRegisterController extends Controller { } let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); - searchQuery = getObjectFilters(searchQuery, req, 'dataUseRegister'); const aggregateQuery = [ @@ -348,6 +346,10 @@ export default class DataUseRegisterController extends Controller { { $match: searchQuery }, ]; + if (searchString.length > 0) { + aggregateQuery.unshift({ $match: { $text: { $search: searchString } } }); + } + const result = await DataUseRegister.aggregate(aggregateQuery); return res.status(200).json({ success: true, result }); diff --git a/src/resources/dataUseRegister/dataUseRegister.route.js b/src/resources/dataUseRegister/dataUseRegister.route.js index 5b63ad54..a2ff75cf 100644 --- a/src/resources/dataUseRegister/dataUseRegister.route.js +++ b/src/resources/dataUseRegister/dataUseRegister.route.js @@ -1,132 +1,16 @@ import express from 'express'; -import DataUseRegisterController from './dataUseRegister.controller'; +import passport from 'passport'; +import { logger } from '../utilities/logger'; import { dataUseRegisterService } from './dependency'; import { activityLogService } from '../activitylog/dependency'; -import { logger } from '../utilities/logger'; -import passport from 'passport'; -import constants from './../utilities/constants.util'; -import { isEmpty, isNull, isEqual } from 'lodash'; +import DataUseRegisterController from './dataUseRegister.controller'; +import { validateUpdateRequest, authorizeUpdate, validateUploadRequest, authorizeUpload } from '../../middlewares'; const router = express.Router(); const dataUseRegisterController = new DataUseRegisterController(dataUseRegisterService, activityLogService); const logCategory = 'dataUseRegister'; -function isUserMemberOfTeam(user, teamId) { - let { teams } = user; - return teams.filter(team => !isNull(team.publisher)).some(team => team.publisher._id.equals(teamId)); -} - -function isUserDataUseAdmin(user) { - let { teams } = user; - - if (teams) { - teams = teams.map(team => { - let { publisher, type, members } = team; - let member = members.find(member => { - return member.memberid.toString() === user._id.toString(); - }); - let { roles } = member; - return { ...publisher, type, roles }; - }); - } - - return teams - .filter(team => team.type === constants.teamTypes.ADMIN) - .some(team => team.roles.includes(constants.roleTypes.ADMIN_DATA_USE)); -} - -const validateUpdateRequest = (req, res, next) => { - const { id } = req.params; - - if (!id) { - return res.status(400).json({ - success: false, - message: 'You must provide a data user register identifier', - }); - } - - next(); -}; - -const validateUploadRequest = (req, res, next) => { - const { teamId, dataUses } = req.body; - let errors = []; - - if (!teamId) { - errors.push('You must provide the custodian team identifier to associate the data uses to'); - } - - if (!dataUses || isEmpty(dataUses)) { - errors.push('You must provide data uses to upload'); - } - - if (!isEmpty(errors)) { - return res.status(400).json({ - success: false, - message: errors.join(', '), - }); - } - - next(); -}; - -const authorizeUpdate = async (req, res, next) => { - const requestingUser = req.user; - const { id } = req.params; - const { projectIdText, datasetTitles } = req.body; - - const dataUseRegister = await dataUseRegisterService.getDataUseRegister(id); - - if (!dataUseRegister) { - return res.status(404).json({ - success: false, - message: 'The requested data use register entry could not be found', - }); - } - - const { publisher } = dataUseRegister; - const authorised = isUserDataUseAdmin(requestingUser) || isUserMemberOfTeam(requestingUser, publisher._id); - if (!authorised) { - return res.status(401).json({ - success: false, - message: 'You are not authorised to perform this action', - }); - } - - if (!dataUseRegister.manualUpload) { - if (!isEqual(projectIdText, dataUseRegister.projectIdText)) - return res.status(401).json({ - success: false, - message: 'You are not authorised to update the project ID of an automatic data use register', - }); - - if (!isEqual(datasetTitles, dataUseRegister.datasetTitles)) - return res.status(401).json({ - success: false, - message: 'You are not authorised to update the datasets of an automatic data use register', - }); - } - - next(); -}; - -const authorizeUpload = async (req, res, next) => { - const requestingUser = req.user; - const { teamId } = req.body; - - const authorised = isUserDataUseAdmin(requestingUser) || isUserMemberOfTeam(requestingUser, teamId); - - if (!authorised) { - return res.status(401).json({ - success: false, - message: 'You are not authorised to perform this action', - }); - } - - next(); -}; - router.get('/search', logger.logRequestMiddleware({ logCategory, action: 'Search uploaded data uses' }), (req, res) => dataUseRegisterController.searchDataUseRegisters(req, res) ); diff --git a/src/resources/datarequest/datarequest.controller.js b/src/resources/datarequest/datarequest.controller.js index c123270b..e776dc3e 100644 --- a/src/resources/datarequest/datarequest.controller.js +++ b/src/resources/datarequest/datarequest.controller.js @@ -697,7 +697,6 @@ export default class DataRequestController extends Controller { this.dataRequestService.updateVersionStatus(accessRecord, accessRecord.applicationStatus); if (accessRecord.applicationStatus === constants.applicationStatuses.APPROVED) { - await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); const dataUseRegister = await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); await dataUseRegisterController.createNotifications( constants.dataUseRegisterNotifications.DATAUSEAPPROVED, @@ -710,7 +709,6 @@ export default class DataRequestController extends Controller { }); } else if (accessRecord.applicationStatus === constants.applicationStatuses.APPROVEDWITHCONDITIONS) { - await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); const dataUseRegister = await this.dataUseRegisterService.createDataUseRegister(requestingUser, accessRecord); await dataUseRegisterController.createNotifications( constants.dataUseRegisterNotifications.DATAUSEAPPROVED, diff --git a/src/resources/team/team.controller.js b/src/resources/team/team.controller.js index 175ef1c9..531a12c1 100644 --- a/src/resources/team/team.controller.js +++ b/src/resources/team/team.controller.js @@ -1,4 +1,4 @@ -import { isEmpty, has, difference, includes, isNull, filter, some } from 'lodash'; +import _, { isEmpty, has, difference, includes, isNull, filter, some } from 'lodash'; import { TeamModel } from './team.model'; import { UserModel } from '../user/user.model'; import { PublisherModel } from '../publisher/publisher.model'; @@ -1047,8 +1047,12 @@ const filterMembersByNoticationTypes = (members, notificationTypes) => { */ const filterMembersByNoticationTypesOptIn = (members, notificationTypes) => { return filter(members, member => { + if (!('notifications' in member) || _.isEmpty(member.notifications)) { + return true; + } + return some(member.notifications, notification => { - return includes(notificationTypes, notification.notificationType) && notification.optIn; + return includes(notificationTypes, notification.notificationType) && (notification.optIn === true); }); }); }; diff --git a/src/resources/user/__mocks__/cohorts.data.js b/src/resources/user/__mocks__/cohorts.data.js new file mode 100644 index 00000000..60851d22 --- /dev/null +++ b/src/resources/user/__mocks__/cohorts.data.js @@ -0,0 +1,91 @@ +export const mockCohorts = [ + { + _id: "610aabea83eb3f2a4d33ddd1", + pid: "9a41b63f-5ec5-4966-9d70-f718df24a395", + description: "a test 1", + uploaders: [8470291714590256,8470291714590257], + cohort: { + stuff: "stuff" + }, + version: 1, + changeLog: "", + testArr: ["test1", "test2"], + id: 1234, + name: "Cohort One", + updatedAt: "2021-10-07T14:43:55.508Z", + activeflag: "archived_version", + type: "cohort", + relatedObjects: [{ + _id: "6141fae77e4d8d8f758e9fb6", + objectId: "4050303073977839", + objectType: "project", + user: "User Name One", + updated: "21 May 2021" + }, { + _id: "6141fb4f7e4d8d8f758e9fb7", + objectId: "6061998693684476", + reason: "cohort add via db", + objectType: "tool", + user: "User Name One", + updated: "11 September 2021" + }, { + _id: "61431817508c5aa2dce95cdb", + objectId: "5d76d094-446d-4dcc-baa1-076095f30c23", + objectType: "dataset", + pid: "0bb8d80b-4d92-4bcb-84b7-5a1ff1f86a33", + user: "User Name One", + updated: "16 September 2021", + isLocked: true + }, { + _id: "614321de508c5aa2dce95cdc", + objectId: "c6d6bbd3-74ed-46af-841d-ac5e05f4da41", + objectType: "dataset", + pid: "f725187f-7352-482b-a43b-64ebc96e66f2", + user: "User Name One", + updated: "16 September 2021", + isLocked: true + }], + "publicflag": true, + "datasetPids": [] + }, + { + _id: "610aac0683eb3f2a4d33ddd2", + pid: "abc12a3", + description: "a test 2", + uploaders: [8470291714590256,8470291714590257], + cohort: { + stuff: "4444" + }, + version: 1, + changeLog: "", + id: 3456, + name: "Cohort Two", + updatedAt: "2021-10-20T13:23:09.093Z", + activeflag: "active", + type: "cohort", + publicflag: false, + relatedObjects: [{ + _id: "614dcb0e1b5e0aa5019aee12", + objectId: "5d76d094-446d-4dcc-baa1-076095f30c23", + objectType: "dataset", + pid: "0bb8d80b-4d92-4bcb-84b7-5a1ff1f86a33", + user: "User Name One", + updated: "6 September 2021", + isLocked: true + }, { + _id: "6155ad4116113e65c26a8a4c", + objectId: "4050303073977839", + objectType: "project", + user: "User Name One", + updated: "28 September 2021" + }, { + _id: "6155ada116113e65c26a8a4d", + reason: "cohort add via db", + objectType: "tool", + user: "User Name One", + updated: "29 September 2021", + objectId: "6061998693684476" + }], + "datasetPids": [] + } +]; \ No newline at end of file diff --git a/src/resources/user/__mocks__/collections.data.js b/src/resources/user/__mocks__/collections.data.js new file mode 100644 index 00000000..6fa0eb00 --- /dev/null +++ b/src/resources/user/__mocks__/collections.data.js @@ -0,0 +1,146 @@ +export const mockCollections = [ + { + _id: '612e0d035671f75be2461dfa', + authors: [8470291714590256,8470291714590257], + keywords: [], + relatedObjects: [ + { + _id: '612e0d035671f75be2461dfb', + objectId: '6ec3a47b-447a-4b22-9b7a-43acae5d408f', + reason: '', + objectType: 'dataset', + pid: 'fce78329-0de1-45f2-9ff1-e1b4af50528e', + user: 'John Doe', + updated: '31 Aug 2021', + }, + ], + id: 20905331408744290, + name: 'Test', + description: 'TestTestTestTestTestTestTestTestTestTest', + imageLink: '', + activeflag: 'active', + publicflag: true, + updatedon: '2021-08-31T11:06:19.329Z', + createdAt: '2021-08-31T11:05:39.129Z', + updatedAt: '2021-10-14T14:38:21.800Z', + __v: 0, + counter: 3, + persons: [ + { + _id: '6128a6f9dd361d15499db644', + categories: { programmingLanguage: [] }, + tags: { features: [], topics: [] }, + document_links: { doi: [], pdf: [], html: [] }, + datasetfields: { geographicCoverage: [], physicalSampleAvailability: [], technicaldetails: [], versionLinks: [], phenotypes: [] }, + authors: [], + emailNotifications: true, + showOrganisation: true, + structuralMetadata: [], + datasetVersionIsV1: false, + toolids: [], + datasetids: [], + id: 8470291714590257, + type: 'person', + firstname: 'John', + lastname: 'Doe', + bio: '', + link: '', + orcid: 'https://orcid.org/', + activeflag: 'active', + terms: true, + sector: 'Academia', + organisation: '', + showSector: true, + showBio: true, + showLink: true, + showOrcid: true, + showDomain: true, + profileComplete: true, + relatedObjects: [], + programmingLanguage: [], + createdAt: '2021-08-27T08:48:57.710Z', + updatedAt: '2021-08-27T10:23:11.582Z', + __v: 0, + counter: 1, + }, + ], + }, +]; + + +// export const mockCollections = [ +// { +// _id: { +// oid: '6168030b0e24c03595166261', +// }, +// authors: [12345], +// keywords: [], +// relatedObjects: [ +// { +// _id: { +// $oid: '6168030b0e24c03595166262', +// }, +// objectId: 'af434b05-52a7-4ff1-92f5-e2dd38a574aa', +// reason: '', +// objectType: 'dataset', +// pid: 'fdd9e5ab-442f-45d0-a004-f581a3ac809c', +// user: 'John Doe', +// updated: '14 Oct 2021', +// }, +// ], +// id: 138879762298581, +// name: 'Test collection 1', +// description: 'A test collection', +// imageLink: '', +// activeflag: 'active', +// publicflag: true, +// updatedon: { +// $date: '2021-10-14T12:10:13.817Z', +// }, +// createdAt: { +// $date: '2021-10-14T10:14:35.308Z', +// }, +// updatedAt: { +// $date: '2021-10-14T12:10:14.563Z', +// }, +// __v: 0, +// counter: 1, +// }, +// { +// _id: { +// oid: '6168030b0e24c03595166262', +// }, +// authors: [12345], +// keywords: [], +// relatedObjects: [ +// { +// _id: { +// $oid: '6168030b0e24c03595166262', +// }, +// objectId: 'af434b05-52a7-4ff1-92f5-e2dd38a574aa', +// reason: '', +// objectType: 'dataset', +// pid: 'fdd9e5ab-442f-45d0-a004-f581a3ac809c', +// user: 'John Doe', +// updated: '14 Oct 2021', +// }, +// ], +// id: 138879762298582, +// name: 'Test collection 2', +// description: 'A test collection', +// imageLink: '', +// activeflag: 'active', +// publicflag: true, +// updatedon: { +// $date: '2021-10-14T12:10:13.817Z', +// }, +// createdAt: { +// $date: '2021-10-14T10:14:35.308Z', +// }, +// updatedAt: { +// $date: '2021-10-14T12:10:14.563Z', +// }, +// __v: 0, +// counter: 1, +// }, +// ]; diff --git a/src/resources/user/__mocks__/dars.data.js b/src/resources/user/__mocks__/dars.data.js new file mode 100644 index 00000000..a542e8df --- /dev/null +++ b/src/resources/user/__mocks__/dars.data.js @@ -0,0 +1,359 @@ +export const mockDars = [ + { + _id: "61f1143078397b350634dac3", + majorVersion: 1, + authorIds: [8470291714590256,8470291714590257], + datasetIds: ["9c457d37-3402-450a-8bbd-32bf51524ded"], + initialDatasetIds: [], + datasetTitles: ["Demo v4", "Demo v4-duplicate"], + applicationStatus: "inProgress", + applicationType: "initial", + publisher: "ALLIANCE > SAIL", + formType: "5 safe", + isShared: false, + userId: 7789224198062117, + isCloneable: true, + jsonSchema: { + pages: [{ + description: "Who is going to be accessing the data?\n\nSafe People should have the right motivations for accessing research data and understand the legal and ethical considerations when using data that may be sensitive or confidential. Safe People should also have sufficient skills, knowledge and experience to work with the data effectively. Researchers may need to undergo specific training or accreditation before accessing certain data or research environments and demonstrate that they are part of a bona fide research organisation.\n\nThe purpose of this section is to ensure that:\n- details of people who will be accessing the data and the people who are responsible for completing the application are identified\n- any individual or organisation that intends to access the data requested is identified\n- all identified individuals have the necessary accreditation and/or expertise to work with the data effectively.", + pageId: "safepeople", + title: "Safe people", + active: true + }, { + title: "Safe project", + active: false, + pageId: "safeproject", + description: "What is the purpose of accessing the data?\n\nSafe projects are those that have a valid research purpose with a defined public benefit. \nFor access to data to be granted the researchers need to demonstrate that their proposal is an appropriate and ethical use of the data, and that it is intended to deliver clear public benefits. The purpose of this section is to ensure that:\n- the project rationale is explained in lay terms\n- the research purpose has a defined public benefit. This can be new knowledge, new treatments, improved pathways of care, new techniques of training staff. \n- how the data requested will be used to achieve the project objectives is articulated." + }, { + active: false, + title: "Safe data", + description: "Safe data ensure that researchers have a clear legal basis for accessing the data and do not inadvertently learn something about the data subjects during the course of their analysis, minimising the risks of re-identification.\nThe minimisation of this risk could be achieved by removing direct identifiers, aggregating values, banding variables, or other statistical techniques that may make re-identification more difficult. Sensitive or confidential data could not be considered to be completely safe because of the residual risk to a data subject’s confidentiality. Hence other limitations on access will need to be applied.\n\nThe purpose of this section is to ensure that: \n- there is a clear legal basis for accessing the requested data\n- the data requested is proportionate to the requirement of the project \n- all data requested is necessary in order to achieve the public benefit declared \n- data subjects cannot be identified by your team by cross-referencing datasets from anywhere else.", + pageId: "safedata" + }, { + description: "Safe settings are analytics environments where researchers can access and analyse the requested datasets in a safe and ethical way. Safe settings encompass the physical environment and procedural arrangements such as the supervision and auditing regimes. For safe settings, the likelihood of both deliberate and accidental disclosure needs to be explicitly considered.\n\nThe purpose of this section is to ensure that:\n\n- researchers access requested data in a secure and controlled setting such as a Trusted Research Environment (TRE) that limits the unauthorised use of the data\n- practical controls and appropriate restrictions are in place if researchers access data though non-TRE environment. There may be requirements that data is held on restricted access servers, encrypted and only decrypted at the point of use.", + pageId: "safesettings", + title: "Safe settings", + active: false + }, { + pageId: "safeoutputs", + description: "Safe outputs ensure that all research outputs cannot be used to identify data subjects. They typically include ‘descriptive statistics’ that have been sufficiently aggregated such that identification is near enough impossible, and modelled outputs which are inherently non-confidential. The purpose of this section is to ensure that:\n\n- controls are in place to minimise risks associated with planned outputs and publications \n- the researchers aim to openly publish their results to enable use, scrutiny and further research.", + title: "Safe outputs", + active: false + }], + formPanels: [{ + index: 1, + pageId: "safepeople", + panelId: "primaryapplicant" + }, { + panelId: "safepeople-otherindividuals", + pageId: "safepeople", + index: 2 + }, { + panelId: "safeproject-aboutthisapplication", + pageId: "safeproject", + index: 3 + }, { + pageId: "safeproject", + index: 4, + panelId: "safeproject-projectdetails" + }, { + panelId: "safeproject-funderinformation", + pageId: "safeproject", + index: 5 + }, { + index: 6, + pageId: "safeproject", + panelId: "safeproject-sponsorinformation" + }, { + panelId: "safeproject-declarationofinterest", + pageId: "safeproject", + index: 7 + }, { + pageId: "safeproject", + index: 8, + panelId: "safeproject-intellectualproperty" + }, { + index: 9, + pageId: "safedata", + panelId: "safedata-datafields" + }, { + panelId: "safedata-analysis", + index: 10, + pageId: "safedata" + }, { + panelId: "safedata-otherdatasetsintentiontolinkdata", + pageId: "safedata", + index: 11 + }, { + panelId: "safedata-lawfulbasis", + index: 12, + pageId: "safedata" + }, { + panelId: "safedata-confidentialityavenue", + index: 13, + pageId: "safedata" + }, { + pageId: "safedata", + index: 14, + panelId: "safedata-ethicalapproval" + }, { + panelId: "safesettings-storageandprocessing", + pageId: "safesettings", + index: 15 + }, { + pageId: "safesettings", + index: 16, + panelId: "safesettings-dataflow" + }, { + index: 17, + pageId: "safeoutputs", + panelId: "safeoutputs-outputsdisseminationplans" + }, { + panelId: "safeoutputs-retention", + index: 18, + pageId: "safeoutputs" + }, { + index: 19, + pageId: "safeoutputs", + panelId: "safeoutputs-archiving" + }], + questionPanels: [{ + pageId: "safepeople", + panelHeader: "Please list the individuals who will have access to the data requested, or are responsible for helping complete this application form. \r\n\r\nThis section should include key contact details for the person who is leading the project; key contact details for the person(s) who (are) leading the project from other organisations. Only one contact from each organisation is needed. \r\n\r\nThe 'Primary applicant' is the person filling out the application form and principal contact for the application. This is usually the person with operational responsibility for the proposal. Each application must have details for at least one person.\r\n\r\nPlease use the file upload function if you're not able to add all individuals via the form.\r", + questionSets: [{ + questionSetId: "primaryapplicant", + index: 1 + }], + navHeader: "Primary applicant", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "primaryapplicant" + }, { + pageId: "safepeople", + panelHeader: "Please list the individuals who will have access to the data requested, or are responsible for helping complete this application form. \r\n\r\nThis section should include key contact details for the person who is leading the project; key contact details for the person(s) who (are) leading the project from other organisations. Only one contact from each organisation is needed. \r\n\r\nThe 'Primary applicant' is the person filling out the application form and principal contact for the application. This is usually the person with operational responsibility for the proposal. Each application must have details for at least one person.\r\n\r\nPlease use the file upload function if you're not able to add all individuals via the form.\r", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safepeople-otherindividuals", + questionSets: [{ + questionSetId: "safepeople-otherindividuals", + index: 1 + }, { + index: 100, + questionSetId: "add-safepeople-otherindividuals" + }], + navHeader: "Other individuals" + }, { + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safeproject-aboutthisapplication", + questionSets: [{ + index: 1, + questionSetId: "safeproject-aboutthisapplication" + }], + navHeader: "About this application", + panelHeader: "", + pageId: "safeproject" + }, { + panelHeader: "", + pageId: "safeproject", + panelId: "safeproject-projectdetails", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Project details", + questionSets: [{ + questionSetId: "safeproject-projectdetails", + index: 1 + }] + }, { + navHeader: "Funder information", + questionSets: [{ + index: 1, + questionSetId: "safeproject-funderinformation" + }], + panelId: "safeproject-funderinformation", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelHeader: "A funder is the organisation or body providing the financial resource to make the project possible, and may be different to the organisation detailed in the Safe people section. Please provide details of the main funder organisations supporting this project.\r\n\r\nPlease use the file upload function if you're not able to add all funders via the form.\r", + pageId: "safeproject" + }, { + panelId: "safeproject-sponsorinformation", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Sponsor information", + questionSets: [{ + index: 1, + questionSetId: "safeproject-sponsorinformation" + }], + panelHeader: "The sponsor is usually, but does not have to be, the main funder of the research. The sponsor takes primary responsibility for ensuring that the design of the project meets appropriate standards and that arrangements are in place to ensure appropriate conduct and reporting.\r\n\r\nPlease use the file upload function if you're not able to add all sponsors via the form.\r\n", + pageId: "safeproject" + }, { + navHeader: "Declaration of interest", + questionSets: [{ + questionSetId: "safeproject-declarationofinterest", + index: 1 + }], + panelId: "safeproject-declarationofinterest", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelHeader: "All interests that might unduly influence an individual’s judgement and objectivity in the use of the data being requested are of relevance, particularly if it involves payment or financial inducement. \r\n\r\nThese might include any involvement of commercial organisations at arm’s-length to the project, or likely impact on commercial organisations, individually or collectively, that might result from the outcomes or methodology of the project.\r\n\r\nAll individuals named in this application who have an interest this application must declare their interest.\r", + pageId: "safeproject" + }, { + panelHeader: "All interests that might unduly influence an individual’s judgement and objectivity in the use of the data being requested are of relevance, particularly if it involves payment or financial inducement. \r\n\r\nThese might include any involvement of commercial organisations at arm’s-length to the project, or likely impact on commercial organisations, individually or collectively, that might result from the outcomes or methodology of the project.\r\n\r\nAll individuals named in this application who have an interest this application must declare their interest.\r", + pageId: "safeproject", + questionSets: [{ + questionSetId: "safeproject-intellectualproperty", + index: 1 + }], + navHeader: "Intellectual property", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safeproject-intellectualproperty" + }, { + questionSets: [{ + questionSetId: "safedata-datafields", + index: 1 + }], + navHeader: "Data fields", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safedata-datafields", + pageId: "safedata", + panelHeader: "These are the Information assets which your proposal seeks to access and use.\r\n\r\nYou should consider this definition to be wide in scope and include any source of information which you propose to access and use. The data may be highly structured or less structured in nature, already existing or to be newly collected or gathered. \r\n\r\nExamples may include national datasets, local data sets, national or local extracts from systems, national or local registries or networks, patient records, or new information to be gathered from patients, families or other cohorts. \r\n\r\nNew data” should only include data that is being specifically gathered for the first time for the purposes of this proposal. i.e. data already held in case notes and transferred to a form is not “new” data, but a survey filled out by clinicians in order to gather information not recorded anywhere else is “new”.\r" + }, { + panelId: "safedata-analysis", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Analysis", + questionSets: [{ + questionSetId: "safedata-analysis", + index: 1 + }], + panelHeader: "These are the Information assets which your proposal seeks to access and use.\r\n\r\nYou should consider this definition to be wide in scope and include any source of information which you propose to access and use. The data may be highly structured or less structured in nature, already existing or to be newly collected or gathered. \r\n\r\nExamples may include national datasets, local data sets, national or local extracts from systems, national or local registries or networks, patient records, or new information to be gathered from patients, families or other cohorts. \r\n\r\nNew data” should only include data that is being specifically gathered for the first time for the purposes of this proposal. i.e. data already held in case notes and transferred to a form is not “new” data, but a survey filled out by clinicians in order to gather information not recorded anywhere else is “new”.\r", + pageId: "safedata" + }, { + panelHeader: "This section should include information on the planned use of datasets not already included in this application. The following information is required:\r\n\r\nA descriptive name so that it is clear what the dataset is. \r\n\r\nSufficient information to explain the content of the dataset. \r\n\r\nWhether the proposal requires linkage of data, the use of matched controls, or the extraction of anonymised data.\r\n\r\nPlease indicate which organisation or body is undertaking these processes and which variables from the data sources requested will be used to achieve the proposed linkage. This should cover every dataset and variable you will require.\r\n", + pageId: "safedata", + panelId: "safedata-otherdatasetsintentiontolinkdata", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Other datasets - Intention to link data", + questionSets: [{ + index: 1, + questionSetId: "safedata-otherdatasetsintentiontolinkdata" + }] + }, { + questionSets: [{ + questionSetId: "safedata-lawfulbasis", + index: 1 + }], + navHeader: "Lawful basis", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safedata-lawfulbasis", + pageId: "safedata", + panelHeader: "General Data Protection Regulation (GDPR) applies to ‘controllers’ and ‘processors’. \r\n\r\nA controller determines the purposes and means of processing personal data.\r\n\r\nA processor is responsible for processing personal data on behalf of a controller.\r\n \r\nGDPR applies to processing carried out by organisations operating within the EU. It also applies to organisations outside the EU that offer goods or services to individuals in the EU.\r\nGDPR does not apply to certain activities including processing covered by the Law Enforcement Directive, processing for national security purposes and processing carried out by individuals purely for personal/household activities. \r\n \r\nGDPR only applies to information which relates to an identifiable living individual. Information relating to a deceased person does not constitute personal data and therefore is not subject to the GDPR.\r" + }, { + questionSets: [{ + index: 1, + questionSetId: "safedata-confidentialityavenue" + }], + navHeader: "Confidentiality avenue", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safedata-confidentialityavenue", + pageId: "safedata", + panelHeader: "If confidential information is being disclosed, the organisations holding this data (both the organisation disclosing the information and the recipient organisation) must also have a lawful basis to hold and use this information, and if applicable, have a condition to hold and use special categories of confidential information, and be fair and transparent about how they hold and use this data. \r\n\r\nIn England and Wales, if you are using section 251 of the NHS Act 2006 (s251) as a legal basis for identifiable data, you will need to ensure that you have the latest approval letter and application. \r\n\r\nFor Scotland this application will be reviewed by the Public Benefit and Privacy Panel.\r\n\r\nIn Northern Ireland it will be considered by the Privacy Advisory Committee. If you are using patient consent as the legal basis, you will need to provide all relevant consent forms and information leaflets.\r\n" + }, { + panelHeader: "This section details the research and ethics approval which you have obtained or sought for your project, or otherwise provides evidence as to why such approval is not necessary. \r\nWhere such approval is not in place, it is important that you demonstrate why this is the case and provide assurances if approval is pending. If you need advice on whether ethics approval is necessary, you should approach your local ethics services in the first instance. Information about UK research ethics committees and ethical opinions can be found on the Health Research Authority website.\r\n", + pageId: "safedata", + navHeader: "Ethical approval", + questionSets: [{ + index: 1, + questionSetId: "safedata-ethicalapproval" + }], + panelId: "safedata-ethicalapproval", + questionPanelHeaderText: "TODO: We need a description for this panel" + }, { + panelHeader: "This section details in what way the proposal aims to store and use data, and controls in place to minimise risks associated with this storage and use. If you have indicated that your proposal seeks to store and use data exclusively through a recognised trusted research environment, then you do not need to complete this section.\r\n \r\nIn relation to personal data, means any operation or set of operations which is performed on personal data or on sets of personal data (whether or not by automated means, such as collection, recording, organisation, structuring, storage, alteration, retrieval, consultation, use, disclosure, dissemination, restriction, erasure or destruction).\r\n \r\nAll Locations where processing will be undertaken, for the avoidance of doubt storage is considered processing. For each separate organisation processing data which is not fully anonymous a separate partner organisation form must also be completed.\r\n \r\n Processing, in relation to information or data means obtaining, recording or holding the information or data or carrying out any operation or set of operations on the information or data, including—\r\n a) organisation, adaptation or alteration of the information or data,\r\n b) retrieval, consultation or use of the information or data,\r\n c) disclosure of the information or data by transmission,\r\n dissemination or otherwise making available, or\r\n d) alignment, combination, blocking, erasure or destruction of the information or data.\r\n\r\nPlease use the file upload function if you're not able to add all organisations via the form.\r", + pageId: "safesettings", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safesettings-storageandprocessing", + questionSets: [{ + questionSetId: "safesettings-storageandprocessing", + index: 1 + }], + navHeader: "Storage and processing" + }, { + pageId: "safesettings", + panelHeader: "", + panelId: "safesettings-dataflow", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Dataflow", + questionSets: [{ + index: 1, + questionSetId: "safesettings-dataflow" + }] + }, { + panelId: "safeoutputs-outputsdisseminationplans", + questionPanelHeaderText: "TODO: We need a description for this panel", + navHeader: "Outputs dissemination plans", + questionSets: [{ + index: 1, + questionSetId: "safeoutputs-outputsdisseminationplans" + }], + panelHeader: "Please include any plans for dissemination and publication of the data and results arising from your proposal. Please also specify any controls in place to minimise risks associated with publication. Dissemination can take place in a variety of ways and through many mechanisms, including through electronic media, print media or word of mouth.", + pageId: "safeoutputs" + }, { + navHeader: "Retention", + questionSets: [{ + questionSetId: "safeoutputs-retention", + index: 1 + }], + panelId: "safeoutputs-retention", + questionPanelHeaderText: "TODO: We need a description for this panel", + pageId: "safeoutputs", + panelHeader: "This section details how the project will treat data being processed after it has been used for the purpose of the proposal outlined, including governance in place to determine how long it will be retained, and controls to manage its subsequent disposal if required. Please reference any relevant policies and procedures which are in place to govern retention and disposal of data as outlined in the proposal." + }, { + panelHeader: "This section details how the project will treat data being processed after it has been used for the purpose of the proposal outlined, including governance in place to determine how long it will be retained, and controls to manage its subsequent disposal if required. Please reference any relevant policies and procedures which are in place to govern retention and disposal of data as outlined in the proposal.", + pageId: "safeoutputs", + questionPanelHeaderText: "TODO: We need a description for this panel", + panelId: "safeoutputs-archiving", + questionSets: [{ + questionSetId: "safeoutputs-archiving", + index: 1 + }], + navHeader: "Archiving" + }], + questionSets: [{ + questionSetHeader: "Archiving", + questions: [{ + validations: [{ + type: "isLength", + message: "Please enter a value", + params: [1] + }], + input: { + required: true, + type: "textareaInput" + }, + question: "What method of destruction will be used when this period has expired?", + guidance: "Please provide details of how the data/files will be disposed of at the end of the period specified above. You might refer to any relevant disposal or destruction policies held by your organisation, by summarising the relevant section from the policy or including a URL and indicating which section is relevant.", + questionId: "safeoutputsdataretentiondestructionmethod" + }, { + questionId: "safeoutputsdataretentiondestructionevidence", + input: { + type: "textareaInput" + }, + guidance: "Please confirm you will notify us when the data have been destroyed. ", + question: "What evidence will be provided that destruction has occurred and when?" + }], + questionSetId: "safeoutputs-archiving" + }] + }, + schemaId: "5fbabae775c2095bdbdc1533", + files: [], + amendmentIterations: [], + createdAt: "2022-01-26T09:28:16.463Z", + updatedAt: "2022-01-26T09:28:16.586Z", + __v: 0, + projectId: "61F1-1430-7839-7B35-0634-DAC3", + versionTree: { + 1.0: { + applicationId: "61f1143078397b350634dac3", + displayTitle: "Version 1.0", + detailedTitle: "Version 1.0", + link: "/data-access-request/61f1143078397b350634dac3?version=1.0", + applicationType: "initial", + applicationStatus: "inProgress", + isShared: false + } + } + } +]; \ No newline at end of file diff --git a/src/resources/user/__mocks__/tools.data.js b/src/resources/user/__mocks__/tools.data.js new file mode 100644 index 00000000..fcbb6883 --- /dev/null +++ b/src/resources/user/__mocks__/tools.data.js @@ -0,0 +1,106 @@ +export const mockTools = [ + { + _id: "618a6dfef53d81176faa3828", + categories: { + programmingLanguage: [] + }, + tags: { + features: [], + topics: [] + }, + document_links: { + doi: [], + pdf: [], + html: [] + }, + datasetfields: { + geographicCoverage: [], + physicalSampleAvailability: [], + technicaldetails: [], + versionLinks: [], + phenotypes: [] + }, + authors: [8470291714590256], + emailNotifications: true, + showOrganisation: true, + structuralMetadata: [], + datasetVersionIsV1: false, + isCohortDiscovery: false, + toolids: [], + datasetids: [], + id: 9285155117905740, + type: "person", + firstname: "Joseph", + lastname: "Best", + bio: "", + link: "", + orcid: "https://orcid.org/", + activeflag: "active", + terms: true, + sector: "Charity/Non-profit", + organisation: "", + showSector: true, + showBio: true, + showLink: true, + showOrcid: true, + showDomain: true, + profileComplete: true, + relatedObjects: [], + programmingLanguage: [], + createdAt: "2021-11-09T12:47:58.360Z", + updatedAt: "2021-11-09T12:47:58.360Z", + __v: 0 + }, + { + _id: "619390f4619256bde461d2a3", + categories: { + programmingLanguage: [] + }, + tags: { + features: [], + topics: [] + }, + document_links: { + doi: [], + pdf: [], + html: [] + }, + datasetfields: { + geographicCoverage: [], + physicalSampleAvailability: [], + technicaldetails: [], + versionLinks: [], + phenotypes: [] + }, + authors: [8470291714590257], + emailNotifications: true, + showOrganisation: true, + structuralMetadata: [], + datasetVersionIsV1: false, + isCohortDiscovery: false, + toolids: [], + datasetids: [], + id: 9248830212478532, + type: "person", + firstname: "HDRUK", + lastname: "Developer", + bio: "", + link: "", + orcid: "https://orcid.org/0000-0002-7770-8811", + activeflag: "active", + terms: true, + sector: "Charity/Non-profit", + organisation: "", + showSector: true, + showBio: true, + showLink: true, + showOrcid: true, + showDomain: true, + profileComplete: true, + relatedObjects: [], + programmingLanguage: [], + createdAt: "2021-11-16T11:07:32.009Z", + updatedAt: "2021-11-16T11:07:32.009Z", + __v: 0 + } +]; \ No newline at end of file diff --git a/src/resources/user/__mocks__/users.data.js b/src/resources/user/__mocks__/users.data.js new file mode 100644 index 00000000..94875524 --- /dev/null +++ b/src/resources/user/__mocks__/users.data.js @@ -0,0 +1,50 @@ +export const mockUsers = [ + { + _id: { + oid: '5e6f984a0a7300dc8f6fb195', + }, + id: 12345, + providerId: '102167422686846649659', + provider: 'google', + firstname: 'FirstName1', + lastname: 'LastName1', + email: 'FirstName1.LastName1@test.com', + password: null, + role: 'Admin', + __v: 0, + updatedAt: { + $date: '2021-10-14T12:10:14.563Z', + }, + discourseKey: 'e2ca8f3f13da82a108db03079ae301b7bab59beb56b8', + discourseUsername: 'FirstName1.LastName1', + createdAt: { + $date: '2020-09-04T00:00:00.000Z', + }, + feedback: false, + news: false + }, + { + _id: { + oid: '5e6f984a0a7300dc8f6fb196', + }, + id: 12346, + providerId: '102167422686846649659', + provider: 'google', + firstname: 'FirstName2', + lastname: 'LastName2', + email: 'FirstName2.LastName2@test.com', + password: null, + role: 'Admin', + __v: 0, + updatedAt: { + $date: '2021-10-14T12:10:14.563Z', + }, + discourseKey: 'e2ca8f3f13da82a108db03079ae301b7bab59beb56c8', + discourseUsername: 'FirstName2.LastName2', + createdAt: { + $date: '2020-09-04T00:00:00.000Z', + }, + feedback: false, + news: false + }, +]; \ No newline at end of file diff --git a/src/resources/user/__tests__/getCollaboratorsCohorts.test.js b/src/resources/user/__tests__/getCollaboratorsCohorts.test.js new file mode 100644 index 00000000..bb626688 --- /dev/null +++ b/src/resources/user/__tests__/getCollaboratorsCohorts.test.js @@ -0,0 +1,35 @@ +import dbHandler from '../../../config/in-memory-db'; +import {mockCohorts} from '../__mocks__/cohorts.data'; + +const {getCollaboratorsCohorts} = require('../user.service'); + + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ cohorts: mockCohorts }); +}); + +afterAll(async () => { + await dbHandler.clearDatabase(); + await dbHandler.closeDatabase(); +}); + +describe('getCollaboratorsCohorts tests', () => { + it('should return values', async () => { + const currentUserId = 8470291714590257; + const filter = currentUserId ? { uploaders: currentUserId } : {}; + + const result = await getCollaboratorsCohorts(filter, currentUserId); + expect(result.length > 0).toBe(true); + expect(typeof result).toBe('object'); + }); + + it('should return values', async () => { + const currentUserId = null; + const filter = currentUserId ? { uploaders: currentUserId } : {}; + + const result = await getCollaboratorsCohorts(filter, currentUserId); + expect(result.length > 0).toBe(true); + expect(typeof result).toBe('object'); + }); +}); \ No newline at end of file diff --git a/src/resources/user/__tests__/getCollaboratorsCollections.test.js b/src/resources/user/__tests__/getCollaboratorsCollections.test.js new file mode 100644 index 00000000..63a069ca --- /dev/null +++ b/src/resources/user/__tests__/getCollaboratorsCollections.test.js @@ -0,0 +1,26 @@ +import dbHandler from '../../../config/in-memory-db'; +import {mockCollections} from '../__mocks__/collections.data'; + +const {getCollaboratorsCollections} = require('../user.service'); + + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ collections: mockCollections }); +}); + +afterAll(async () => { + await dbHandler.clearDatabase(); + await dbHandler.closeDatabase(); +}); + +describe('getCollaboratorsCollections tests', () => { + it('should return values', async () => { + const currentUserId = 8470291714590257; + const filter = currentUserId ? { authors: currentUserId } : {}; + + const result = await getCollaboratorsCollections(filter, currentUserId); + expect(result.length).toBe(1); + expect(typeof result).toBe('object'); + }); +}); \ No newline at end of file diff --git a/src/resources/user/__tests__/getCollaboratorsDARs.test.js b/src/resources/user/__tests__/getCollaboratorsDARs.test.js new file mode 100644 index 00000000..048a8e68 --- /dev/null +++ b/src/resources/user/__tests__/getCollaboratorsDARs.test.js @@ -0,0 +1,35 @@ +import dbHandler from '../../../config/in-memory-db'; +import {mockDars} from '../__mocks__/dars.data'; + +const {getCollaboratorsDARs} = require('../user.service'); + + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ data_requests: mockDars }); +}); + +afterAll(async () => { + await dbHandler.clearDatabase(); + await dbHandler.closeDatabase(); +}); + +describe('getCollaboratorsDARs tests', () => { + it('should return values', async () => { + const currentUserId = 8470291714590257; + const filter = currentUserId ? { $or: [{ userId: currentUserId }, { authorIds: currentUserId }] } : {}; + + const result = await getCollaboratorsDARs(filter, currentUserId); + expect(result.length > 0).toBe(true); + expect(typeof result).toBe('object'); + }); + + it('should return values', async () => { + const currentUserId = null; + const filter = currentUserId ? { $or: [{ userId: currentUserId }, { authorIds: currentUserId }] } : {}; + + const result = await getCollaboratorsDARs(filter, currentUserId); + expect(result.length > 0).toBe(true); + expect(typeof result).toBe('object'); + }); +}); \ No newline at end of file diff --git a/src/resources/user/__tests__/getCollaboratorsTools.test.js b/src/resources/user/__tests__/getCollaboratorsTools.test.js new file mode 100644 index 00000000..a083609d --- /dev/null +++ b/src/resources/user/__tests__/getCollaboratorsTools.test.js @@ -0,0 +1,34 @@ +import dbHandler from '../../../config/in-memory-db'; +import {mockTools} from '../__mocks__/tools.data'; + +const {getCollaboratorsTools} = require('../user.service'); + + +beforeAll(async () => { + await dbHandler.connect(); + await dbHandler.loadData({ tools: mockTools }); +}); + +afterAll(async () => { + await dbHandler.clearDatabase(); + await dbHandler.closeDatabase(); +}); + +describe('getCollaboratorsTools tests', () => { + it('should return values', async () => { + const currentUserId = 8470291714590257; + const filter = currentUserId ? { uploaders: currentUserId } : {}; + + const result = await getCollaboratorsTools(filter, currentUserId); + expect(typeof result).toBe('object'); + }); + + it('should return values', async () => { + const currentUserId = null; + const filter = currentUserId ? { uploaders: currentUserId } : {}; + + const result = await getCollaboratorsTools(filter, currentUserId); + expect(result.length > 0).toBe(true); + expect(typeof result).toBe('object'); + }); +}); \ No newline at end of file diff --git a/src/resources/user/__tests__/getUniqueCollaborators.test.js b/src/resources/user/__tests__/getUniqueCollaborators.test.js new file mode 100644 index 00000000..bd8da7f2 --- /dev/null +++ b/src/resources/user/__tests__/getUniqueCollaborators.test.js @@ -0,0 +1,48 @@ +const {getUniqueCollaborators} = require('../user.service'); + + +describe('getUniqueCollaborators tests', () => { + it('should return a unique collaborator like map', () => { + let collaborators = [39025048818527176,917335621870613]; + const result = getUniqueCollaborators(collaborators); + + expect(result instanceof Map).toBe(true); + }); + + it('should return a specific number of unique collaborators', () => { + let collaborators = [39025048818527176,917335621870613]; + const result = getUniqueCollaborators(collaborators); + + expect(result.size).toBe(2); + }); + + it('should return empty', () => { + let collaborators = []; + const result = getUniqueCollaborators(collaborators); + + expect(result.size).toBe(0); + }); + + it('should return values', () => { + let collaborators = [39025048818527176,917335621870613]; + const result = getUniqueCollaborators(collaborators); + + expect(result.has(39025048818527176)).toBe(true); + expect(result.has(917335621870613)).toBe(true); + }) + + + it('should return correct keys and values', () => { + let collaborators = [39025048818527176,917335621870613]; + const result = getUniqueCollaborators(collaborators); + + const mapValues = [...result.values()]; + const typeMapValues = typeof mapValues; + + const mapKeys = [...result.keys()]; + const typeMapKeys = typeof mapKeys; + + expect(typeMapValues).toBe('object'); + expect(typeMapKeys).toBe('object'); + }) +}); \ No newline at end of file diff --git a/src/resources/user/user.route.js b/src/resources/user/user.route.js index 6397acea..316c3113 100644 --- a/src/resources/user/user.route.js +++ b/src/resources/user/user.route.js @@ -3,12 +3,14 @@ import passport from 'passport'; import { utils } from '../auth'; import { UserModel } from './user.model'; -import { Data } from '../tool/data.model'; -import helper from '../utilities/helper.util'; import { ROLES } from './user.roles'; -import { setCohortDiscoveryAccess } from './user.service'; +import { setCohortDiscoveryAccess, getUsers } from './user.service'; import { upperCase } from 'lodash'; -//import { createServiceAccount } from './user.repository'; + +import { + checkInputMiddleware, + checkMinLengthMiddleware, +} from '../../middlewares/index'; const router = express.Router(); @@ -29,62 +31,37 @@ router.get('/:userID', passport.authenticate('jwt'), utils.checkIsUser(), async // @desc get all // @access Private router.get('/', passport.authenticate('jwt'), async (req, res) => { - var q = Data.aggregate([ - // Find all tools with type of person - { $match: { type: 'person' } }, - // Perform lookup to users - { - $lookup: { - from: 'users', - localField: 'id', - foreignField: 'id', - as: 'user', - }, - }, - // select fields to use - { - $project: { - _id: '$user._id', - id: 1, - firstname: 1, - lastname: 1, - orcid: { - $cond: [ - { - $eq: [true, '$showOrcid'], - }, - '$orcid', - '$$REMOVE', - ], - }, - bio: { - $cond: [ - { - $eq: [true, '$showBio'], - }, - '$bio', - '$$REMOVE', - ], - }, - email: '$user.email', - }, - }, - ]); - - q.exec((err, data) => { - if (err) { + let reqUserId = req.user.id; + await getUsers(reqUserId) + .then(response => { + return res.json({ success: true, data: response }); + }) + .catch(err => { return new Error({ success: false, error: err }); - } - - const users = []; - data.map(dat => { - let { _id, id, firstname, lastname, orcid = '', bio = '', email = '' } = dat; - if (email.length !== 0) email = helper.censorEmail(email[0]); - users.push({ _id, id, orcid, name: `${firstname} ${lastname}`, bio, email }); }); +}); - return res.json({ success: true, data: users }); - }); +// @router GET /api/v1/users/search/:filter +// @desc get all filtered by text +// @access Private +router.get('/search/:filter', passport.authenticate('jwt'), [checkInputMiddleware, checkMinLengthMiddleware], async (req, res) => { + let filterString = req.params.filter; + let reqUserId = req.user.id; + await getUsers(reqUserId, filterString) + .then(response => { + + const usersFiltered = []; + response.map((item) => { + if (item.name.toLowerCase().includes(filterString.toLowerCase())) { + usersFiltered.push(item); + } + }); + + return res.json({ success: true, data: usersFiltered }); + }) + .catch(err => { + return new Error({ success: false, error: err }); + }); }); // @router PATCH /api/v1/users/advancedSearch/terms/:id diff --git a/src/resources/user/user.service.js b/src/resources/user/user.service.js index 71252049..1b13bec3 100644 --- a/src/resources/user/user.service.js +++ b/src/resources/user/user.service.js @@ -1,5 +1,11 @@ import emailGeneratorUtil from '../utilities/emailGenerator.util'; import { UserModel } from './user.model'; +import { Data } from '../tool/data.model'; +import helper from '../utilities/helper.util'; +import { Collections } from '../collections/collections.model'; +import { DataRequestModel } from '../datarequest/datarequest.model'; + +let arrCollaborators = []; export async function createUser({ firstname, lastname, email, providerId, provider, role }) { return new Promise(async resolve => { @@ -77,3 +83,228 @@ export async function setCohortDiscoveryAccess(id, roles) { return resolve(updatedUser); }); } + +// Gets all of the logged in users collaborators +export const getUsersCollaborators = async (currentUserId) => { + // Get all collaborators from collections + await getCollaboratorsCollections({ authors: currentUserId }, currentUserId); + + // Get all collaborators from tools and papers (data collection) + await getCollaboratorsTools({ authors: currentUserId }, currentUserId); + + // Get all collaborators from DARs + await getCollaboratorsDARs({ $or: [{ userId: currentUserId }, { authorIds: currentUserId }] }, currentUserId); + + // Strip out duplicate collaborators, add a count + return getUniqueCollaborators(arrCollaborators); +} + +export const getCollaboratorsCollections = async (filter, currentUserId) => { + let collaboratorsCollections = await Collections.find(filter, { _id: 0, authors: 1 }).sort({ updatedAt: -1 }); + return await populateCollaborators(collaboratorsCollections, 'authors', currentUserId); +} + +export const getCollaboratorsTools = async (filter, currentUserId) => { + let collaboratorsTools = await Data.find(filter, { _id: 0, authors: 1 }).sort({ updatedAt: -1 }); + return await populateCollaborators(collaboratorsTools, 'authors', currentUserId); +} + +export const getCollaboratorsDARs = async (filter, currentUserId) => { + let collaboratorsDARs = await DataRequestModel.find( + filter, + { _id: 0, authorIds: 1, userId: 1 } + ).sort({ updatedAt: -1 }); + return await populateCollaborators(collaboratorsDARs, 'authorIds', currentUserId); +} + +export const getUniqueCollaborators = (collaborators) => { + let uniqueCollaborators = new Map(); + for (const collaborator of collaborators) { + if (uniqueCollaborators.has(collaborator)) { + let incrementedValue = uniqueCollaborators.get(collaborator) + 1; + uniqueCollaborators.set(collaborator, incrementedValue); + } else { + uniqueCollaborators.set(collaborator, 1); + } + } + + return uniqueCollaborators; +} + +export const populateCollaborators = async (collaboratorsEntity, items, currentUserId) => { + for (const collaborator of collaboratorsEntity) { + if ((!currentUserId && items === 'authorIds') + || (currentUserId && items === 'authorIds' && arrCollaborators.userId !== currentUserId)) { + arrCollaborators.push(collaborator.userId); + } + + for (const item of collaborator[items]) { + if (!currentUserId || (currentUserId && item !== currentUserId)) { + arrCollaborators.push(item); + } + } + } + + return arrCollaborators; +} + +export const getUsers = async (currentUserId, filterString = null) => { + // Get the users collaborators + arrCollaborators = []; + let usersCollaborators; + if (!filterString) { + usersCollaborators = await getUsersCollaborators(currentUserId); + } else { + usersCollaborators = new Map(); + } + + // Get the whole list of users + let typePerson; + if (filterString) { + typePerson = Data.aggregate([ + // Find all tools with type of person + { $match: { type: 'person' } }, + // Perform lookup to users + { + $lookup: { + from: 'users', + localField: 'id', + foreignField: 'id', + as: 'user', + }, + }, + { + $match: { + $or: [ + { 'user.firstname': {'$regex': `${filterString}`, '$options': 'i'} }, + { 'user.lastname': {'$regex': `${filterString}`, '$options': 'i'} }, + ] + } + }, + // select fields to use + { + $project: { + _id: '$user._id', + id: 1, + firstname: 1, + lastname: 1, + orcid: { + $cond: [ + { + $eq: [true, '$showOrcid'], + }, + '$orcid', + '$$REMOVE', + ], + }, + bio: { + $cond: [ + { + $eq: [true, '$showBio'], + }, + '$bio', + '$$REMOVE', + ], + }, + email: '$user.email', + }, + }, + { + $sort: { + updateAt: -1 + }, + }, + ]); + } else { + typePerson = Data.aggregate([ + // Find all tools with type of person + { $match: { type: 'person' } }, + // Perform lookup to users + { + $lookup: { + from: 'users', + localField: 'id', + foreignField: 'id', + as: 'user', + }, + }, + // select fields to use + { + $project: { + _id: '$user._id', + id: 1, + firstname: 1, + lastname: 1, + orcid: { + $cond: [ + { + $eq: [true, '$showOrcid'], + }, + '$orcid', + '$$REMOVE', + ], + }, + bio: { + $cond: [ + { + $eq: [true, '$showBio'], + }, + '$bio', + '$$REMOVE', + ], + }, + email: '$user.email', + }, + }, + { + $sort: { + updateAt: -1 + }, + }, + ]); + } + + return new Promise((resolve, reject) => { + typePerson.exec((err, data) => { + if (err) { + return err; + } + + const users = []; + data.map(dat => { + let { _id, id, firstname, lastname, orcid = '', bio = '', email = '' } = dat; + if (email.length !== 0) email = helper.censorEmail(email[0]); + users.push({ _id, id, orcid, name: `${firstname} ${lastname}`, bio, email }); + }); + + let collaborators = []; + let nonCollaboratorUsers = []; + + // Pull all non collaborators from users + nonCollaboratorUsers = users.filter(user => !usersCollaborators.has(user.id)); + + // Pull all collaborators from users, add count to sort by + for (const user of users) { + usersCollaborators.forEach((count, collaboratorId) => { + if (user.id === collaboratorId) { + collaborators.push({ user: user, count: count }); + } + }); + } + + collaborators.sort((a, b) => b.count - a.count); + + // Remove count after collaborators are sorted + let collaboratorUsers = collaborators.map(collaborator => { + return collaborator.user; + }); + + // resolve([...collaboratorUsers, ...nonCollaboratorUsers]); + if (!filterString) { + resolve([...collaboratorUsers]); + } else { + resolve([...collaboratorUsers, ...nonCollaboratorUsers]); + } + }); + }); +} diff --git a/src/services/httpClient/httpClient.js b/src/services/httpClient/httpClient.js new file mode 100644 index 00000000..c18ebb0d --- /dev/null +++ b/src/services/httpClient/httpClient.js @@ -0,0 +1,73 @@ +const axios = require('axios'); + +class HttpClient { + constructor() { + this._axios = axios; + } + + setHttpClientCookies(cookies) { + return axios.defaults.headers.Cookie = cookies; + } + + async post(url, body, options) { + const headers = { + ...(options && options.headers), + Accept: 'application/json', + 'Content-Type': 'application/json;charset=UTF-8', + }; + + try { + const response = await this._axios.post(url, body, { + ...options, + headers, + }); + + return response; + } catch (err) { + console.error(err); + throw new Error(err.message); + } + } + + async put(url, body, options) { + const headers = { + ...(options && options.headers), + Accept: 'application/json', + 'Content-Type': 'application/json;charset=UTF-8', + }; + + try { + const response = await this._axios.put(url, body, { + ...options, + headers, + }); + + return response; + } catch (err) { + console.error(err); + throw new Error(err.message); + } + } + + async delete(url, options) { + const headers = { + ...(options && options.headers), + Accept: 'application/json', + 'Content-Type': 'application/json;charset=UTF-8', + }; + + try { + const response = await this._axios.delete(url, { + ...options, + headers, + }); + + return response; + } catch (err) { + console.error(err); + throw new Error(err.message); + } + } +} + +module.exports = HttpClient; \ No newline at end of file diff --git a/src/utils/__tests__/datasetonboarding.util.test.js b/src/utils/__tests__/datasetonboarding.util.test.js index 87521246..6e8e6fde 100644 --- a/src/utils/__tests__/datasetonboarding.util.test.js +++ b/src/utils/__tests__/datasetonboarding.util.test.js @@ -103,4 +103,18 @@ describe('Dataset onboarding utility', () => { }); }); }); + describe('returnAsDate', () => { + it('Should return a correctly formatted date for `2007-01-04`', () => { + expect(datasetonboardingUtil.returnAsDate('2007-01-04')).toStrictEqual(`04/01/2007`); + }); + it('Should return a correctly formatted date for `2007/01/04`', () => { + expect(datasetonboardingUtil.returnAsDate('2007/01/04')).toStrictEqual(`04/01/2007`); + }); + it('Should not return a correctly formatted date for `01-04-2007`', () => { + expect(datasetonboardingUtil.returnAsDate('04-01-2007')).not.toEqual(`04/01/2007`); + }); + it('Should not return a correctly formatted date for `01/04/2007`', () => { + expect(datasetonboardingUtil.returnAsDate('04/01/2007')).not.toEqual(`04/01/2007`); + }); + }); }); diff --git a/src/utils/datasetonboarding.util.js b/src/utils/datasetonboarding.util.js index 51f09e84..e8a32b95 100644 --- a/src/utils/datasetonboarding.util.js +++ b/src/utils/datasetonboarding.util.js @@ -231,7 +231,6 @@ const returnAsArray = value => { * @returns {String} [value as date format] */ const returnAsDate = value => { - if (moment(value, 'DD/MM/YYYY').isValid()) return value; return moment(new Date(value)).format('DD/MM/YYYY'); }; @@ -885,7 +884,10 @@ const createNotifications = async (type, context) => { team = await TeamModel.findOne({ _id: context.datasetv2.summary.publisher.identifier }).lean(); for (let member of team.members) { - if (member.roles.some(role => ['manager', 'metadata_editor'].includes(role))) teamMembers.push(member.memberid); + if ((Array.isArray(member.roles) && member.roles.some(role => ['manager', 'metadata_editor'].includes(role))) + || (typeof member.roles === 'string' && ['manager', 'metadata_editor'].includes(member.roles))) { + teamMembers.push(member.memberid); + } } teamMembersDetails = await UserModel.find({ _id: { $in: teamMembers } }) @@ -1300,4 +1302,5 @@ export default { buildBulkUploadObject, buildv2Object, datasetv2ObjectComparison, + returnAsDate, };