diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..c3b22ed2 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,11 @@ +{ + "arrowParens": "avoid", + "bracketSpacing": true, + "trailingComma": "es5", + "useTabs": true, + "semi": true, + "singleQuote": true, + "jsxSingleQuote": true, + "jsxBracketSameLine": true, + "printWidth": 140 +} \ No newline at end of file diff --git a/cloudbuild.yaml b/cloudbuild.yaml index f3326e1b..92f08e45 100644 --- a/cloudbuild.yaml +++ b/cloudbuild.yaml @@ -1,6 +1,9 @@ steps: - name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'gcr.io/$PROJECT_ID/${_APP_NAME}:latest', '.'] + entrypoint: 'bash' + args: ['-c', 'docker pull gcr.io/$PROJECT_ID/${_APP_NAME}:latest || exit 0'] +- name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', 'gcr.io/$PROJECT_ID/${_APP_NAME}:latest', '--cache-from', 'gcr.io/$PROJECT_ID/${_APP_NAME}:latest', '.'] - name: 'gcr.io/cloud-builders/docker' args: ['push', 'gcr.io/$PROJECT_ID/${_APP_NAME}:latest'] - name: 'gcr.io/cloud-builders/gcloud' @@ -15,3 +18,5 @@ steps: args: ['run', 'deploy', 'uatbeta-api', '--image', 'gcr.io/$PROJECT_ID/${_APP_NAME}:latest', '--platform', 'managed', '--region', '${_REGION}', '--allow-unauthenticated'] images: - gcr.io/$PROJECT_ID/${_APP_NAME}:latest +options: + machineType: 'E2_HIGHCPU_8' \ No newline at end of file diff --git a/cloudbuild_dynamic.yaml b/cloudbuild_dynamic.yaml new file mode 100644 index 00000000..3470fe8a --- /dev/null +++ b/cloudbuild_dynamic.yaml @@ -0,0 +1,19 @@ +steps: +- name: 'gcr.io/cloud-builders/docker' + entrypoint: 'bash' + args: ['-c', 'docker pull gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT} || exit 0'] +- name: 'gcr.io/cloud-builders/docker' + args: [ + 'build', + '-t', 'gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT}', + '--cache-from', 'gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT}', + '.' + ] +- name: 'gcr.io/cloud-builders/docker' + args: ['push', 'gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT}'] +- name: 'gcr.io/cloud-builders/gcloud' + args: ['run', 'deploy', '${_ENVIRONMENT}-api', '--image', 'gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT}', '--platform', 'managed', '--region', '${_REGION}', '--allow-unauthenticated'] +images: +- gcr.io/$PROJECT_ID/${_APP_NAME}:${_ENVIRONMENT} +options: + machineType: 'E2_HIGHCPU_8' \ No newline at end of file diff --git a/package.json b/package.json index aacd177d..9fa5d3df 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "btoa": "^1.2.1", "cookie-parser": "^1.4.5", "cors": "^2.8.5", + "crypto": "^1.0.1", "crypto-js": "^4.0.0", "discourse-sso": "^1.0.3", "dotenv": "^8.2.0", @@ -41,7 +42,9 @@ "passport-jwt": "^4.0.0", "passport-linkedin-oauth2": "^2.0.0", "passport-openidconnect": "0.0.2", + "prettier": "^2.2.1", "query-string": "^6.12.1", + "randomstring": "^1.1.5", "snyk": "^1.334.0", "swagger-ui-express": "^4.1.4", "test": "^0.6.0", @@ -65,7 +68,9 @@ "test": "jest --runInBand", "eject": "", "snyk-protect": "snyk protect", - "prepublish": "npm run snyk-protect" + "prepublish": "npm run snyk-protect", + "prettify": "prettier --write \"src/**/*.{scss,js,jsx}\"", + "prettify-test": "prettier --write \"test/**/*.js\"" }, "proxy": "http://localhost:3001", "snyk": true diff --git a/src/config/account.js b/src/config/account.js index 2b487e05..65c82c8c 100755 --- a/src/config/account.js +++ b/src/config/account.js @@ -1,67 +1,68 @@ import { getUserByUserId } from '../resources/user/user.repository'; -import { to } from 'await-to-js' +import { to } from 'await-to-js'; const store = new Map(); const logins = new Map(); const { nanoid } = require('nanoid'); - class Account { - constructor(id, profile) { - this.accountId = id || nanoid(); - this.profile = profile; - store.set(this.accountId, this); - } + constructor(id, profile) { + this.accountId = id || nanoid(); + this.profile = profile; + store.set(this.accountId, this); + } - /** - * @param use - can either be "id_token" or "userinfo", depending on - * where the specific claims are intended to be put in. - * @param scope - the intended scope, while oidc-provider will mask - * claims depending on the scope automatically you might want to skip - * loading some claims from external resources etc. based on this detail - * or not return them in id tokens but only userinfo and so on. - */ - async claims(use, scope) { // eslint-disable-line no-unused-vars - if (this.profile) { - return { - sub: this.accountId, // it is essential to always return a sub claim - email: this.profile.email, - firstname: this.profile.firstname, - lastname: this.profile.lastname - }; - } + /** + * @param use - can either be "id_token" or "userinfo", depending on + * where the specific claims are intended to be put in. + * @param scope - the intended scope, while oidc-provider will mask + * claims depending on the scope automatically you might want to skip + * loading some claims from external resources etc. based on this detail + * or not return them in id tokens but only userinfo and so on. + */ + async claims(use, scope) { + // eslint-disable-line no-unused-vars + if (this.profile) { + return { + sub: this.accountId, // it is essential to always return a sub claim + email: this.profile.email, + firstname: this.profile.firstname, + lastname: this.profile.lastname, + }; + } - return { - sub: this.accountId, // it is essential to always return a sub claim - }; - } + return { + sub: this.accountId, // it is essential to always return a sub claim + }; + } - static async findByFederated(provider, claims) { - const id = `${provider}.${claims.sub}`; - if (!logins.get(id)) { - logins.set(id, new Account(id, claims)); - } - return logins.get(id); - } + static async findByFederated(provider, claims) { + const id = `${provider}.${claims.sub}`; + if (!logins.get(id)) { + logins.set(id, new Account(id, claims)); + } + return logins.get(id); + } - static async findByLogin(login) { - if (!logins.get(login)) { - logins.set(login, new Account(login)); - } + static async findByLogin(login) { + if (!logins.get(login)) { + logins.set(login, new Account(login)); + } - return logins.get(login); - } + return logins.get(login); + } - static async findAccount(ctx, id, token) { // eslint-disable-line no-unused-vars - // token is a reference to the token used for which a given account is being loaded, - // it is undefined in scenarios where account claims are returned from authorization endpoint - // ctx is the koa request context - if (!store.get(id)) { - let [err, user] = await to(getUserByUserId(parseInt(id))) - new Account(id, user); // eslint-disable-line no-new - } - return store.get(id); - } + static async findAccount(ctx, id, token) { + // eslint-disable-line no-unused-vars + // token is a reference to the token used for which a given account is being loaded, + // it is undefined in scenarios where account claims are returned from authorization endpoint + // ctx is the koa request context + if (!store.get(id)) { + let [err, user] = await to(getUserByUserId(parseInt(id))); + new Account(id, user); // eslint-disable-line no-new + } + return store.get(id); + } } module.exports = Account; diff --git a/src/config/configuration.js b/src/config/configuration.js index 172059c7..889e6248 100755 --- a/src/config/configuration.js +++ b/src/config/configuration.js @@ -1,90 +1,92 @@ import oidcProvider from 'oidc-provider'; -const { interactionPolicy: { Prompt, base: policy } } = oidcProvider; +const { + interactionPolicy: { Prompt, base: policy }, +} = oidcProvider; // copies the default policy, already has login and consent prompt policies const interactions2 = policy(); // create a requestable prompt with no implicit checks const selectAccount = new Prompt({ - name: 'select_account', - requestable: true, + name: 'select_account', + requestable: true, }); // add to index 0, order goes select_account > login > consent interactions2.add(selectAccount, 0); export const clients = [ - { - //Metadata works - client_id: process.env.MDWClientID || '', - client_secret: process.env.MDWClientSecret || '', - grant_types: ['authorization_code'], - response_types: ['code'], - //grant_types: ['authorization_code', 'implicit'], - //response_types: ['code id_token'], - redirect_uris: process.env.MDWRedirectURI.split(",") || [''], - id_token_signed_response_alg: 'HS256', - post_logout_redirect_uris: ['https://hdruk-auth.metadata.works/auth/logout'] - }, - { - //BC Platforms - client_id: process.env.BCPClientID || '', - client_secret: process.env.BCPClientSecret || '', - grant_types: ['authorization_code', 'implicit'], - response_types: ['code id_token'], - redirect_uris: process.env.BCPRedirectURI.split(",") || [''], - id_token_signed_response_alg: 'HS256', - post_logout_redirect_uris: ['https://web.uatbeta.healthdatagateway.org/search?search=&logout=true'] - } + { + //Metadata works + client_id: process.env.MDWClientID || '', + client_secret: process.env.MDWClientSecret || '', + grant_types: ['authorization_code'], + response_types: ['code'], + //grant_types: ['authorization_code', 'implicit'], + //response_types: ['code id_token'], + redirect_uris: process.env.MDWRedirectURI.split(',') || [''], + id_token_signed_response_alg: 'HS256', + post_logout_redirect_uris: ['https://hdruk-auth.metadata.works/auth/logout'], + }, + { + //BC Platforms + client_id: process.env.BCPClientID || '', + client_secret: process.env.BCPClientSecret || '', + grant_types: ['authorization_code', 'implicit'], + response_types: ['code id_token'], + redirect_uris: process.env.BCPRedirectURI.split(',') || [''], + id_token_signed_response_alg: 'HS256', + post_logout_redirect_uris: ['https://web.uatbeta.healthdatagateway.org/search?search=&logout=true'], + }, ]; export const interactions = { - policy: interactions2, - url(ctx, interaction) { - return `/api/v1/openid/interaction/${ctx.oidc.uid}`; - }, + policy: interactions2, + url(ctx, interaction) { + return `/api/v1/openid/interaction/${ctx.oidc.uid}`; + }, }; export const cookies = { - long: { signed: true, maxAge: (1 * 24 * 60 * 60) * 1000 }, - short: { signed: true }, - keys: ['some secret key', 'and also the old rotated away some time ago', 'and one more'], + long: { signed: true, maxAge: 1 * 24 * 60 * 60 * 1000 }, + short: { signed: true }, + keys: ['some secret key', 'and also the old rotated away some time ago', 'and one more'], }; export const claims = { - email: ['email'], - profile: ['firstname', 'lastname'], + email: ['email'], + profile: ['firstname', 'lastname'], }; export const features = { - devInteractions: { enabled: false }, - deviceFlow: { enabled: true }, - introspection: { enabled: true }, - revocation: { enabled: true }, - encryption: { enabled: true }, - rpInitiatedLogout: { - enabled: true, - logoutSource, - postLogoutSuccessSource - } + devInteractions: { enabled: false }, + deviceFlow: { enabled: true }, + introspection: { enabled: true }, + revocation: { enabled: true }, + encryption: { enabled: true }, + rpInitiatedLogout: { + enabled: true, + logoutSource, + postLogoutSuccessSource, + }, }; export const jwks = require('./jwks.json'); export const ttl = { - AccessToken: 1 * 60 * 60, - AuthorizationCode: 10 * 60, - IdToken: 1 * 60 * 60, - DeviceCode: 10 * 60, - RefreshToken: 1 * 24 * 60 * 60, + AccessToken: 1 * 60 * 60, + AuthorizationCode: 10 * 60, + IdToken: 1 * 60 * 60, + DeviceCode: 10 * 60, + RefreshToken: 1 * 24 * 60 * 60, }; async function logoutSource(ctx, form) { - // @param ctx - koa request context - // @param form - form source (id="op.logoutForm") to be embedded in the page and submitted by - // the End-User - ctx.body = ` + // @param ctx - koa request context + // @param form - form source (id="op.logoutForm") to be embedded in the page and submitted by + // the End-User + ctx.body = ` Logout Request @@ -98,10 +100,10 @@ async function logoutSource(ctx, form) { `; - } +} - async function postLogoutSuccessSource(ctx) { - // @param ctx - koa request context - ctx.res.clearCookie('jwt'); - ctx.res.status(200).redirect(process.env.homeURL+'/search?search='); - } +async function postLogoutSuccessSource(ctx) { + // @param ctx - koa request context + ctx.res.clearCookie('jwt'); + ctx.res.status(200).redirect(process.env.homeURL + '/search?search='); +} diff --git a/src/config/db.js b/src/config/db.js index 13c3a8c1..4bb40f0f 100644 --- a/src/config/db.js +++ b/src/config/db.js @@ -1,28 +1,35 @@ import mongoose from 'mongoose'; -import { connection } from 'mongoose' +import { connection } from 'mongoose'; const connectToDatabase = async () => { + try { + const mongoURI = + 'mongodb+srv://' + + process.env.user + + ':' + + process.env.password + + '@' + + process.env.cluster + + '/' + + process.env.database + + '?ssl=true&retryWrites=true&w=majority'; + await mongoose.connect(mongoURI, { + useNewUrlParser: true, + useFindAndModify: false, + useUnifiedTopology: true, + autoIndex: false, // Don't build indexes + poolSize: 10, // Maintain up to 10 socket connections + // If not connected, return errors immediately rather than waiting for reconnect + bufferMaxEntries: 0, + useNewUrlParser: true, + }); - try { - const mongoURI = 'mongodb+srv://'+process.env.user+':'+process.env.password+'@'+process.env.cluster+'/'+process.env.database+'?ssl=true&retryWrites=true&w=majority'; - await mongoose.connect( - mongoURI, { - useNewUrlParser: true, - useFindAndModify: false, - useUnifiedTopology: true, - autoIndex: false, // Don't build indexes - poolSize: 10, // Maintain up to 10 socket connections - // If not connected, return errors immediately rather than waiting for reconnect - bufferMaxEntries: 0, - useNewUrlParser: true - }); + console.log('MongoDB connected...'); + } catch (error) { + console.error(error.message); - console.log('MongoDB connected...'); - } catch (error) { - console.error(error.message); - - process.exit(1); - } + process.exit(1); + } }; -export { connectToDatabase, connection } \ No newline at end of file +export { connectToDatabase, connection }; diff --git a/src/config/generate-keys.js b/src/config/generate-keys.js index d4e2ecb3..0cecde82 100644 --- a/src/config/generate-keys.js +++ b/src/config/generate-keys.js @@ -1,19 +1,12 @@ -const { JWKS: { KeyStore } } = require('jose'); +const { + JWKS: { KeyStore }, +} = require('jose'); const generateKeys = async () => { + const keystore = new KeyStore(); + await Promise.all([keystore.generate('RSA', 2048, { use: 'sig' }), keystore.generate('RSA', 2048, { use: 'enc' })]).then(() => { + return keystore.toJWKS(true); + }); +}; - - - const keystore = new KeyStore(); - await Promise.all([ - keystore.generate('RSA', 2048, { use: 'sig' }), - keystore.generate('RSA', 2048, { use: 'enc' }) - ]) - .then(() => { - return keystore.toJWKS(true); - }); - - -} - -export { generateKeys } \ No newline at end of file +export { generateKeys }; diff --git a/src/config/in-memory-db.js b/src/config/in-memory-db.js index 8a2ad932..3f900393 100644 --- a/src/config/in-memory-db.js +++ b/src/config/in-memory-db.js @@ -6,45 +6,45 @@ const mongod = new MongoMemoryServer(); * Connect to the in-memory database. */ module.exports.connect = async () => { - const uri = await mongod.getUri(); + const uri = await mongod.getUri(); - const mongooseOpts = { - useNewUrlParser: true, - useUnifiedTopology: true, - useCreateIndex: true - }; + const mongooseOpts = { + useNewUrlParser: true, + useUnifiedTopology: true, + useCreateIndex: true, + }; - await mongoose.connect(uri, mongooseOpts); -} + await mongoose.connect(uri, mongooseOpts); +}; /** * Load data into the database. */ -module.exports.loadData = async (data) => { - const queries = Object.keys(data).map(col => { - const collection = mongoose.connection.collection(col); - return collection.insertMany(data[col]) - }) - return Promise.all(queries); -} +module.exports.loadData = async data => { + const queries = Object.keys(data).map(col => { + const collection = mongoose.connection.collection(col); + return collection.insertMany(data[col]); + }); + return Promise.all(queries); +}; /** * Drop database, close the connection and stop mongod. */ module.exports.closeDatabase = async () => { - await mongoose.connection.dropDatabase(); - await mongoose.connection.close(true); - await mongod.stop(); -} + await mongoose.connection.dropDatabase(); + await mongoose.connection.close(true); + await mongod.stop(); +}; /** * Remove all the data for all db collections. */ module.exports.clearDatabase = async () => { - const collections = mongoose.connection.collections; + const collections = mongoose.connection.collections; - for (const key in collections) { - const collection = collections[key]; - await collection.deleteMany(); - } -} \ No newline at end of file + for (const key in collections) { + const collection = collections[key]; + await collection.deleteMany(); + } +}; diff --git a/src/config/server.js b/src/config/server.js index 50e81dcc..5f941083 100644 --- a/src/config/server.js +++ b/src/config/server.js @@ -1,6 +1,6 @@ 'use strict'; -import express from 'express'; +import express from 'express'; import Provider from 'oidc-provider'; import swaggerUi from 'swagger-ui-express'; import YAML from 'yamljs'; @@ -20,17 +20,17 @@ require('dotenv').config(); if (helper.getEnvironment() !== 'local') { Sentry.init({ dsn: 'https://c7c564a153884dc0a6b676943b172121@o444579.ingest.sentry.io/5419637', - environment: helper.getEnvironment() + environment: helper.getEnvironment(), }); } const Account = require('./account'); const configuration = require('./configuration'); - const API_PORT = process.env.PORT || 3001; const session = require('express-session'); var app = express(); +app.disable('x-powered-by'); configuration.findAccount = Account.findAccount; const oidc = new Provider(process.env.api_url || 'http://localhost:3001', configuration); @@ -52,6 +52,11 @@ app.use( }) ); +// apply rate limiter of 100 requests per minute +const RateLimit = require('express-rate-limit'); +let limiter = new RateLimit({ windowMs: 60000, max: 500 }); +app.use(limiter); + const router = express.Router(); connectToDatabase(); @@ -70,6 +75,11 @@ app.use( secret: process.env.JWTSecret, resave: false, saveUninitialized: true, + name: 'sessionId', + cookie: { + secure: process.env.api_url ? true : false, + httpOnly: true + } }) ); @@ -82,63 +92,52 @@ function setNoCache(req, res, next) { app.get('/api/v1/openid/endsession', setNoCache, (req, res, next) => { passport.authenticate('jwt', async function (err, user, info) { if (err || !user) { - return res.status(200).redirect(process.env.homeURL+'/search?search='); + return res.status(200).redirect(process.env.homeURL + '/search?search='); } oidc.Session.destory; req.logout(); - res.clearCookie('jwt'); + res.clearCookie('jwt'); - return res.status(200).redirect(process.env.homeURL+'/search?search='); + return res.status(200).redirect(process.env.homeURL + '/search?search='); })(req, res, next); -}) - +}); app.get('/api/v1/openid/interaction/:uid', setNoCache, (req, res, next) => { passport.authenticate('jwt', async function (err, user, info) { - if (err || !user) { //login in user - go to login screen var apiURL = process.env.api_url || 'http://localhost:3001'; - return res.status(200).redirect(process.env.homeURL+'/search?search=&showLogin=true&loginReferrer='+apiURL+req.url) - } - else { + return res.status(200).redirect(process.env.homeURL + '/search?search=&showLogin=true&loginReferrer=' + apiURL + req.url); + } else { try { const { uid, prompt, params, session } = await oidc.interactionDetails(req, res); - + const client = await oidc.Client.find(params.client_id); - switch (prompt.name) { case 'select_account': { - } case 'login': { - - - const result = { select_account: {}, // make sure its skipped by the interaction policy since we just logged in login: { - account: user.id.toString() + account: user.id.toString(), }, }; - - - - return await oidc.interactionFinished(req, res, result, { mergeWithLastSubmission: false }); } case 'consent': { if (!session) { return oidc.interactionFinished(req, res, { select_account: {} }, { mergeWithLastSubmission: false }); } - + const account = await oidc.Account.findAccount(undefined, session.accountId); const { email } = await account.claims('prompt', 'email', { email: null }, []); - - const { prompt: { name, details } } = await oidc.interactionDetails(req, res); + const { + prompt: { name, details }, + } = await oidc.interactionDetails(req, res); //assert.equal(name, 'consent'); const consent = {}; @@ -167,20 +166,18 @@ app.get('/api/v1/openid/interaction/:uid', setNoCache, (req, res, next) => { } } })(req, res, next); -}) - - - - +}); app.use('/api/v1/openid', oidc.callback); app.use('/api', router); app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument)); +app.use('/oauth', require('../resources/auth/oauth.route')); app.use('/api/v1/auth/sso/discourse', require('../resources/auth/sso/sso.discourse.router')); app.use('/api/v1/auth', require('../resources/auth/auth.route')); app.use('/api/v1/auth/register', require('../resources/user/user.register.route')); + app.use('/api/v1/users', require('../resources/user/user.route')); app.use('/api/v1/topics', require('../resources/topic/topic.route')); app.use('/api/v1/publishers', require('../resources/publisher/publisher.route')); @@ -192,14 +189,14 @@ app.use('/api/v1/relatedobject/', require('../resources/relatedobjects/relatedob app.use('/api/v1/tools', require('../resources/tool/tool.route')); app.use('/api/v1/accounts', require('../resources/account/account.route')); app.use('/api/v1/search/filter', require('../resources/search/filter.route')); -app.use('/api/v1/search', require('../resources/search/search.router')); // tools projects people +app.use('/api/v1/search', require('../resources/search/search.router')); // tools projects people + +app.use('/api/v1/linkchecker', require('../resources/linkchecker/linkchecker.router')); -app.use('/api/v1/linkchecker', require('../resources/linkchecker/linkchecker.router')); - -app.use('/api/v1/stats', require('../resources/stats/stats.router')); -app.use('/api/v1/kpis', require('../resources/stats/kpis.router')); +app.use('/api/v1/stats', require('../resources/stats/stats.router')); +app.use('/api/v1/kpis', require('../resources/stats/kpis.router')); -app.use('/api/v1/course', require('../resources/course/course.route')); +app.use('/api/v1/course', require('../resources/course/course.route')); app.use('/api/v1/person', require('../resources/person/person.route')); diff --git a/src/resources/account/account.route.js b/src/resources/account/account.route.js index b213549d..66f303b4 100644 --- a/src/resources/account/account.route.js +++ b/src/resources/account/account.route.js @@ -1,12 +1,12 @@ import express from 'express'; -import passport from "passport"; -import { utils } from "../auth"; -import { ROLES } from '../user/user.roles' +import passport from 'passport'; +import { utils } from '../auth'; +import { ROLES } from '../user/user.roles'; import { Data } from '../tool/data.model'; -import { Collections } from '../collections/collections.model'; +import { Collections } from '../collections/collections.model'; import { MessagesModel } from '../message/message.model'; -import { createDiscourseTopic } from '../discourse/discourse.service' -import { UserModel } from '../user/user.model' +import { createDiscourseTopic } from '../discourse/discourse.service'; +import { UserModel } from '../user/user.model'; import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; @@ -15,282 +15,251 @@ const hdrukEmail = `enquiry@healthdatagateway.org`; /** * {delete} /api/v1/accounts - * + * * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ -router.delete( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - const { id } = req.body; - Data.findOneAndDelete({ id: id }, (err) => { - if (err) return res.send(err); - return res.json({ success: true }); - }); - }); +router.delete('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + const { id } = req.body; + Data.findOneAndDelete({ id: id }, err => { + if (err) return res.send(err); + return res.json({ success: true }); + }); +}); /** * {get} /api/v1/accounts/admin - * + * * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ -router.get( - '/admin', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - var result; - var startIndex = 0; - var maxResults = 25; - var typeString = ""; +router.get('/admin', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + var result; + var startIndex = 0; + var maxResults = 25; + var typeString = ''; - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.maxResults) { - maxResults = req.query.maxResults; - } - if (req.query.type) { - typeString = req.query.type; - } + if (req.query.startIndex) { + startIndex = req.query.startIndex; + } + if (req.query.maxResults) { + maxResults = req.query.maxResults; + } + if (req.query.type) { + typeString = req.query.type; + } - var q = Data.aggregate([ - { $match: { $and: [{ type: typeString }] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ])//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - result = res.json({ success: true, data: data }); - }); - - return result; - }); + var q = Data.aggregate([ + { $match: { $and: [{ type: typeString }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]); //.skip(parseInt(startIndex)).limit(parseInt(maxResults)); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + result = res.json({ success: true, data: data }); + }); + + return result; +}); /** * {get} /api/v1/accounts/admin/collections - * + * * Returns list of all collections. */ - router.get( - '/admin/collections', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - var result; - var startIndex = 0; - var maxResults = 25; - - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.maxResults) { - maxResults = req.query.maxResults; - } - - var q = Collections.aggregate([ - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ])//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); +router.get('/admin/collections', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + var result; + var startIndex = 0; + var maxResults = 25; + + if (req.query.startIndex) { + startIndex = req.query.startIndex; + } + if (req.query.maxResults) { + maxResults = req.query.maxResults; + } + + var q = Collections.aggregate([ + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]); //.skip(parseInt(startIndex)).limit(parseInt(maxResults)); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - result = res.json({ success: true, data: data }); - }); - - return result; - }); + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + result = res.json({ success: true, data: data }); + }); + + return result; +}); /** -* {get} /api/v1/accounts -* -* Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. -* The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search -*/ -router.get( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - var result; - var startIndex = 0; - var maxResults = 25; - var typeString = ""; - var idString = ""; + * {get} /api/v1/accounts + * + * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. + * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search + */ +router.get('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + var result; + var startIndex = 0; + var maxResults = 25; + var typeString = ''; + var idString = ''; - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.maxResults) { - maxResults = req.query.maxResults; - } - if (req.query.type) { - typeString = req.query.type; - } - if (req.query.id) { - idString = req.query.id; - } + if (req.query.startIndex) { + startIndex = req.query.startIndex; + } + if (req.query.maxResults) { + maxResults = req.query.maxResults; + } + if (req.query.type) { + typeString = req.query.type; + } + if (req.query.id) { + idString = req.query.id; + } - var q = Data.aggregate([ - { $match: { $and: [{ type: typeString }, { authors: parseInt(idString) }] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ])//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - result = res.json({ success: true, data: data }); - }); - return result; - }); + var q = Data.aggregate([ + { $match: { $and: [{ type: typeString }, { authors: parseInt(idString) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]); //.skip(parseInt(startIndex)).limit(parseInt(maxResults)); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + result = res.json({ success: true, data: data }); + }); + return result; +}); - /** -* {get} /api/v1/accounts/collections -* -* Returns list of collections. -*/ - router.get( - '/collections', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - var result; - var startIndex = 0; - var maxResults = 25; - var idString = ""; - - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.maxResults) { - maxResults = req.query.maxResults; - } - if (req.query.id) { - idString = req.query.id; - } - - var q = Collections.aggregate([ - { $match: { $and: [{ authors: parseInt(idString) }] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ])//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); + * {get} /api/v1/accounts/collections + * + * Returns list of collections. + */ +router.get('/collections', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + var result; + var startIndex = 0; + var maxResults = 25; + var idString = ''; + + if (req.query.startIndex) { + startIndex = req.query.startIndex; + } + if (req.query.maxResults) { + maxResults = req.query.maxResults; + } + if (req.query.id) { + idString = req.query.id; + } - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - result = res.json({ success: true, data: data }); - }); - return result; - }); + var q = Collections.aggregate([ + { $match: { $and: [{ authors: parseInt(idString) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]); //.skip(parseInt(startIndex)).limit(parseInt(maxResults)); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + result = res.json({ success: true, data: data }); + }); + return result; +}); /** * {put} /api/v1/accounts/status - * + * * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ -router.put( - '/status', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - const { id, activeflag } = req.body; - try { - await Data.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag }}); - const tool = await Data.findOne({ id: id }); - - if (!tool) { - return res.status(400).json({ success: false, error: 'Tool not found' }); - } +router.put('/status', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + const { id, activeflag } = req.body; + try { + await Data.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag } }); + const tool = await Data.findOne({ id: id }); + + if (!tool) { + return res.status(400).json({ success: false, error: 'Tool not found' }); + } - if (tool.authors) { - tool.authors.forEach(async (authorId) => { - await createMessage(authorId, id, tool.name, tool.type, activeflag); - }); - } - await createMessage(0, id, tool.name, tool.type, activeflag); + if (tool.authors) { + tool.authors.forEach(async authorId => { + await createMessage(authorId, id, tool.name, tool.type, activeflag); + }); + } + await createMessage(0, id, tool.name, tool.type, activeflag); - if (!tool.discourseTopicId && tool.activeflag === 'active') { - await createDiscourseTopic(tool); - } + if (!tool.discourseTopicId && tool.activeflag === 'active') { + await createDiscourseTopic(tool); + } - // Send email notifications to all admins and authors who have opted in - await sendEmailNotifications(tool, activeflag); + // Send email notifications to all admins and authors who have opted in + await sendEmailNotifications(tool, activeflag); - return res.json({ success: true }); - - } catch (err) { - console.log(err); - return res.status(500).json({ success: false, error: err }); - } - }); + return res.json({ success: true }); + } catch (err) { + console.log(err); + return res.status(500).json({ success: false, error: err }); + } +}); module.exports = router; async function createMessage(authorId, toolId, toolName, toolType, activeflag) { - let message = new MessagesModel(); - const toolLink = process.env.homeURL + '/tool/' + toolId; + let message = new MessagesModel(); + const toolLink = process.env.homeURL + '/tool/' + toolId; - if (activeflag === 'active') { - message.messageType = 'approved'; - message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}` - } else if (activeflag === 'archive') { - message.messageType = 'rejected'; - message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}` - } - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = authorId; - message.messageObjectID = toolId; - message.messageSent = Date.now(); - message.isRead = false; - await message.save(); + if (activeflag === 'active') { + message.messageType = 'approved'; + message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}`; + } else if (activeflag === 'archive') { + message.messageType = 'rejected'; + message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}`; + } + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = authorId; + message.messageObjectID = toolId; + message.messageSent = Date.now(); + message.isRead = false; + await message.save(); } async function sendEmailNotifications(tool, activeflag) { - let subject; - let html; - // 1. Generate URL for linking tool in email - const toolLink = process.env.homeURL + '/tool/' + tool.id + '/' + tool.name - - // 2. Build HTML for email - if (activeflag === 'active') { - subject = `Your ${tool.type} ${tool.name} has been approved and is now live` - html = `Your ${tool.type} ${tool.name} has been approved and is now live

${toolLink}` - } else if (activeflag === 'archive') { - subject = `Your ${tool.type} ${tool.name} has been rejected` - html = `Your ${tool.type} ${tool.name} has been rejected

${toolLink}` - } + let subject; + let html; + // 1. Generate URL for linking tool in email + const toolLink = process.env.homeURL + '/tool/' + tool.id + '/' + tool.name; + + // 2. Build HTML for email + if (activeflag === 'active') { + subject = `Your ${tool.type} ${tool.name} has been approved and is now live`; + html = `Your ${tool.type} ${tool.name} has been approved and is now live

${toolLink}`; + } else if (activeflag === 'archive') { + subject = `Your ${tool.type} ${tool.name} has been rejected`; + html = `Your ${tool.type} ${tool.name} has been rejected

${toolLink}`; + } - // 3. Query Db for all admins or authors of the tool who have opted in to email updates - var q = UserModel.aggregate([ - // Find all users who are admins or authors of this tool - { $match: { $or: [{ role: 'Admin' }, { id: { $in: tool.authors } }] } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); + // 3. Query Db for all admins or authors of the tool who have opted in to email updates + var q = UserModel.aggregate([ + // Find all users who are admins or authors of this tool + { $match: { $or: [{ role: 'Admin' }, { id: { $in: tool.authors } }] } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html - ); - }); + // 4. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); + }); } diff --git a/src/resources/auth/auth.route.js b/src/resources/auth/auth.route.js index 6c55a51d..79eb6df8 100644 --- a/src/resources/auth/auth.route.js +++ b/src/resources/auth/auth.route.js @@ -1,10 +1,11 @@ import express from 'express'; +import _ from 'lodash'; import { to } from 'await-to-js'; -import { verifyPassword } from '../auth/utils'; +import passport from 'passport'; + +import { verifyPassword, getRedirectUrl } from '../auth/utils'; import { login } from '../auth/strategies/jwt'; import { getUserByEmail } from '../user/user.repository'; -import { getRedirectUrl } from '../auth/utils'; -import passport from 'passport'; const router = express.Router(); @@ -17,9 +18,7 @@ router.post('/login', async (req, res) => { const [err, user] = await to(getUserByEmail(email)); const authenticationError = () => { - return res - .status(500) - .json({ success: false, data: 'Authentication error!' }); + return res.status(500).json({ success: false, data: 'Authentication error!' }); }; if (!(await verifyPassword(password, user.password))) { @@ -38,6 +37,7 @@ router.post('/login', async (req, res) => { .status(200) .cookie('jwt', token, { httpOnly: true, + secure: process.env.api_url ? true : false, }) .json({ success: true, @@ -49,10 +49,10 @@ router.post('/login', async (req, res) => { // @desc logout user // @access Private router.get('/logout', function (req, res) { - req.logout(); - for (var prop in req.cookies) { - res.clearCookie(prop); - } + req.logout(); + for (var prop in req.cookies) { + res.clearCookie(prop); + } return res.json({ success: true }); }); @@ -67,19 +67,19 @@ router.get('/status', function (req, res, next) { data: [{ role: 'Reader', id: null, name: null, loggedIn: false }], }); } else { - // 1. Reformat teams array for frontend - let { teams } = req.user.toObject(); - if(teams) { - teams = teams.map((team) => { - let { publisher, type, members } = team; - let member = members.find(member => { - return member.memberid.toString() === req.user._id.toString(); - }); - let { roles } = member; - return { ...publisher, type, roles }; - }); - } - // 2. Return user info + // 1. Reformat teams array for frontend + let { teams } = req.user.toObject(); + if (teams) { + teams = teams.map(team => { + let { publisher, type, members } = team; + let member = members.find(member => { + return member.memberid.toString() === req.user._id.toString(); + }); + let { roles } = member; + return { ...publisher, type, roles }; + }); + } + // 2. Return user info return res.json({ success: true, data: [ diff --git a/src/resources/auth/index.js b/src/resources/auth/index.js index 38185421..68e52171 100644 --- a/src/resources/auth/index.js +++ b/src/resources/auth/index.js @@ -1,12 +1,12 @@ -import * as utils from './utils' -import * as strategies from './strategies' +import * as utils from './utils'; +import * as strategies from './strategies'; -const pipe = (...functions) => args => functions.reduce((arg, fn) => fn(arg), args) +const pipe = (...functions) => args => functions.reduce((arg, fn) => fn(arg), args); const initialiseAuthentication = app => { - utils.setup() + utils.setup(); - pipe(strategies.OdicStrategy, strategies.LinkedinStrategy, strategies.GoogleStrategy, strategies.JWTStrategy)(app) -} + pipe(strategies.OdicStrategy, strategies.LinkedinStrategy, strategies.GoogleStrategy, strategies.JWTStrategy)(app); +}; -export { utils, initialiseAuthentication, strategies } \ No newline at end of file +export { utils, initialiseAuthentication, strategies }; diff --git a/src/resources/auth/oauth.route.js b/src/resources/auth/oauth.route.js new file mode 100644 index 00000000..70cb9073 --- /dev/null +++ b/src/resources/auth/oauth.route.js @@ -0,0 +1,54 @@ +import express from 'express'; +import _ from 'lodash'; + +import { signToken } from './utils'; +import { getServiceAccountByClientCredentials } from '../user/user.repository'; + +const router = express.Router(); + +// @router POST /oauth/token +// @desc Issues a JWT for a valid authentication attempt using a user defined grant type +// @access Public +router.post('/token', async (req, res) => { + // 1. Deconstruct grant type + const { grant_type = '' } = req.body; + // 2. Allow different grant types to be processed + switch(grant_type) { + case 'client_credentials': + // Deconstruct request body to extract client ID, secret + const { client_id = '', client_secret = '' } = req.body; + // Ensure client credentials have been passed + if (_.isEmpty(client_id) || _.isEmpty(client_secret)) { + return res.status(400).json({ + success: false, + message: 'Incomplete client credentials were provided for the authorisation attempt', + }); + } + // Find an associated service account based on the credentials passed + const serviceAccount = await getServiceAccountByClientCredentials(client_id, client_secret); + if (_.isNil(serviceAccount)) { + return res.status(400).json({ + success: false, + message: 'Invalid client credentials were provided for the authorisation attempt', + }); + } + // Construct JWT for service account + const token_type = 'jwt', expires_in = 900; + const jwt = signToken({ _id: serviceAccount._id, id: serviceAccount.id, timeStamp: Date.now() }, expires_in); + const access_token = `Bearer ${jwt}`; + + // Return payload + return res.status(200).json({ + access_token, + token_type, + expires_in, + }); + } + // Bad request for any other grant type passed + return res.status(400).json({ + success: false, + message: 'An invalid grant type has been specified', + }); +}); + +module.exports = router; diff --git a/src/resources/auth/sso/sso.discourse.router.js b/src/resources/auth/sso/sso.discourse.router.js index 309d71d5..a9555291 100644 --- a/src/resources/auth/sso/sso.discourse.router.js +++ b/src/resources/auth/sso/sso.discourse.router.js @@ -8,8 +8,8 @@ const router = express.Router(); // @router GET /api/v1/auth/sso/discourse // @desc Single Sign On for Discourse forum // @access Private -router.get("/", function(req, res, next) { - passport.authenticate("jwt", function(err, user, info) { +router.get('/', function (req, res, next) { + passport.authenticate('jwt', function (err, user, info) { if (err || !user) { return res.status(200).json({ redirectUrl: null }); } else { @@ -20,21 +20,22 @@ router.get("/", function(req, res, next) { redirectUrl = discourseLogin(req.query.sso, req.query.sig, req.user); } catch (err) { console.error(err); - return res.status(500).send("Error authenticating the user."); + return res.status(500).send('Error authenticating the user.'); } } return res .status(200) .cookie( - "jwt", + 'jwt', signToken({ _id: req.user._id, id: req.user.id, - timeStamp: Date.now() + timeStamp: Date.now(), }), { - httpOnly: true + httpOnly: true, + secure: process.env.api_url ? true : false, } ) .json({ redirectUrl: redirectUrl }); diff --git a/src/resources/auth/sso/sso.discourse.service.js b/src/resources/auth/sso/sso.discourse.service.js index 0debe555..38432f28 100644 --- a/src/resources/auth/sso/sso.discourse.service.js +++ b/src/resources/auth/sso/sso.discourse.service.js @@ -1,22 +1,22 @@ import discourse_sso from 'discourse-sso'; export function discourseLogin(payload, sig, user) { - const sso = new discourse_sso(process.env.DISCOURSE_SSO_SECRET); - - if (!sso.validate(payload, sig)) { - throw Error(`Error validating Discourse SSO payload for user with id: ${user.id}.`); - } + const sso = new discourse_sso(process.env.DISCOURSE_SSO_SECRET); - const nonce = sso.getNonce(payload); - const userparams = { - nonce: nonce, - external_id: user.id, - email: user.email, - username: `${user.firstname.toLowerCase()}.${user.lastname.toLowerCase()}`, - name: `${user.firstname} ${user.lastname}`, - }; + if (!sso.validate(payload, sig)) { + throw Error(`Error validating Discourse SSO payload for user with id: ${user.id}.`); + } - const q = sso.buildLoginString(userparams); + const nonce = sso.getNonce(payload); + const userparams = { + nonce: nonce, + external_id: user.id, + email: user.email, + username: `${user.firstname.toLowerCase()}.${user.lastname.toLowerCase()}`, + name: `${user.firstname} ${user.lastname}`, + }; - return `${process.env.DISCOURSE_URL}/session/sso_login?${q}`; + const q = sso.buildLoginString(userparams); + + return `${process.env.DISCOURSE_URL}/session/sso_login?${q}`; } diff --git a/src/resources/auth/strategies/google.js b/src/resources/auth/strategies/google.js index 4c404896..facd79cf 100644 --- a/src/resources/auth/strategies/google.js +++ b/src/resources/auth/strategies/google.js @@ -1,155 +1,147 @@ -import passport from 'passport' -import passportGoogle from 'passport-google-oauth' -import { to } from 'await-to-js' - -import { getUserByProviderId } from '../../user/user.repository' -import { updateRedirectURL } from '../../user/user.service' -import { getObjectById } from '../../tool/data.repository' -import { createUser } from '../../user/user.service' -import { signToken } from '../utils' -import { ROLES } from '../../user/user.roles' -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; +import passport from 'passport'; +import passportGoogle from 'passport-google-oauth'; +import { to } from 'await-to-js'; + +import { getUserByProviderId } from '../../user/user.repository'; +import { updateRedirectURL } from '../../user/user.service'; +import { getObjectById } from '../../tool/data.repository'; +import { createUser } from '../../user/user.service'; +import { signToken } from '../utils'; +import { ROLES } from '../../user/user.roles'; +import queryString from 'query-string'; +import Url from 'url'; +import { discourseLogin } from '../sso/sso.discourse.service'; const eventLogController = require('../../eventlog/eventlog.controller'); -const GoogleStrategy = passportGoogle.OAuth2Strategy +const GoogleStrategy = passportGoogle.OAuth2Strategy; const strategy = app => { - const strategyOptions = { - clientID: process.env.googleClientID, - clientSecret: process.env.googleClientSecret, - callbackURL: `/auth/google/callback`, - proxy: true - } - - const verifyCallback = async ( - accessToken, - refreshToken, - profile, - done - ) => { - if (!profile.id || profile.id === '') return done("loginError"); - - let [err, user] = await to(getUserByProviderId(profile.id)) - if (err || user) { - return done(err, user) - } - - const verifiedEmail = profile.emails.find(email => email.verified) || profile.emails[0]; - - const [createdError, createdUser] = await to( - createUser({ - provider: profile.provider, - providerId: profile.id, - firstname: profile.name.givenName, - lastname: profile.name.familyName, - email: verifiedEmail.value, - password: null, - role: ROLES.Creator - }) - ) - - return done(createdError, createdUser) - } - - passport.use(new GoogleStrategy(strategyOptions, verifyCallback)) - - app.get( - `/auth/google`, - (req, res, next) => { - // Save the url of the user's current page so the app can redirect back to it after authorization - if (req.headers.referer) {req.param.returnpage = req.headers.referer;} - next(); - }, - passport.authenticate('google', { - scope: [ - 'https://www.googleapis.com/auth/userinfo.profile', - 'https://www.googleapis.com/auth/userinfo.email' - ] - }) - ) - - app.get('/auth/google/callback', (req, res, next) => { - passport.authenticate('google', (err, user, info) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL+'/loginerror') - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res - .status(200) - .redirect(redirectUrl) - } - - req.login(user, async (err) => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [profileErr, profile] = await to(getObjectById(req.user.id)) - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })) - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id) - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now() - } - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({_id: req.user._id, id: req.user.id, timeStamp: Date.now()}), { - httpOnly: true - }) - .redirect(redirectUrl) - - }); - })(req, res, next); - }); - - return app -} - -export { strategy } \ No newline at end of file + const strategyOptions = { + clientID: process.env.googleClientID, + clientSecret: process.env.googleClientSecret, + callbackURL: `/auth/google/callback`, + proxy: true, + }; + + const verifyCallback = async (accessToken, refreshToken, profile, done) => { + if (!profile.id || profile.id === '') return done('loginError'); + + let [err, user] = await to(getUserByProviderId(profile.id)); + if (err || user) { + return done(err, user); + } + + const verifiedEmail = profile.emails.find(email => email.verified) || profile.emails[0]; + + const [createdError, createdUser] = await to( + createUser({ + provider: profile.provider, + providerId: profile.id, + firstname: profile.name.givenName, + lastname: profile.name.familyName, + email: verifiedEmail.value, + password: null, + role: ROLES.Creator, + }) + ); + + return done(createdError, createdUser); + }; + + passport.use(new GoogleStrategy(strategyOptions, verifyCallback)); + + app.get( + `/auth/google`, + (req, res, next) => { + // Save the url of the user's current page so the app can redirect back to it after authorization + if (req.headers.referer) { + req.param.returnpage = req.headers.referer; + } + next(); + }, + passport.authenticate('google', { + scope: ['https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email'], + }) + ); + + app.get('/auth/google/callback', (req, res, next) => { + passport.authenticate('google', (err, user, info) => { + if (err || !user) { + //loginError + if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); + + // failureRedirect + var redirect = '/'; + let returnPage = null; + + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + return res.status(200).redirect(redirectUrl); + } + + req.login(user, async err => { + if (err) { + return next(err); + } + + var redirect = '/'; + + let returnPage = null; + let queryStringParsed = null; + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + queryStringParsed = queryString.parse(returnPage.query); + } + + let [profileErr, profile] = await to(getObjectById(req.user.id)); + + if (!profile) { + await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); + return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); + } + + if (req.param.returnpage) { + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { + try { + redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); + } catch (err) { + console.error(err); + return res.status(500).send('Error authenticating the user.'); + } + } + + //Build event object for user login and log it to DB + let eventObj = { + userId: req.user.id, + event: `user_login_${req.user.provider}`, + timestamp: Date.now(), + }; + await eventLogController.logEvent(eventObj); + + return res + .status(200) + .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { + httpOnly: true, + secure: process.env.api_url ? true : false, + }) + .redirect(redirectUrl); + }); + })(req, res, next); + }); + + return app; +}; + +export { strategy }; diff --git a/src/resources/auth/strategies/index.js b/src/resources/auth/strategies/index.js index 6c88fff4..7c11bd56 100644 --- a/src/resources/auth/strategies/index.js +++ b/src/resources/auth/strategies/index.js @@ -1,6 +1,6 @@ -import { strategy as JWTStrategy } from './jwt' -import { strategy as GoogleStrategy } from './google' -import { strategy as LinkedinStrategy } from './linkedin' -import { strategy as OdicStrategy } from './oidc' +import { strategy as JWTStrategy } from './jwt'; +import { strategy as GoogleStrategy } from './google'; +import { strategy as LinkedinStrategy } from './linkedin'; +import { strategy as OdicStrategy } from './oidc'; -export { JWTStrategy, GoogleStrategy, LinkedinStrategy, OdicStrategy } \ No newline at end of file +export { JWTStrategy, GoogleStrategy, LinkedinStrategy, OdicStrategy }; diff --git a/src/resources/auth/strategies/jwt.js b/src/resources/auth/strategies/jwt.js index e91adef5..8fd4d191 100644 --- a/src/resources/auth/strategies/jwt.js +++ b/src/resources/auth/strategies/jwt.js @@ -1,44 +1,61 @@ -import passport from 'passport' -import passportJWT from 'passport-jwt' -import { to } from 'await-to-js' -import { getUserById } from '../../user/user.repository' -import { signToken } from '../utils' +import passport from 'passport'; +import passportJWT from 'passport-jwt'; +import { to } from 'await-to-js'; +import { getUserById } from '../../user/user.repository'; +import { signToken } from '../utils'; +import _ from 'lodash'; -const JWTStrategy = passportJWT.Strategy +const JWTStrategy = passportJWT.Strategy; const strategy = () => { - const strategyOptions = { - jwtFromRequest: req => req.cookies.jwt, - secretOrKey: process.env.JWTSecret, - passReqToCallback: true - } - - const verifyCallback = async (req, jwtPayload, cb) => { - if(typeof jwtPayload.data === 'string') { - jwtPayload.data = JSON.parse(jwtPayload.data); - } - const [err, user] = await to(getUserById(jwtPayload.data._id)) - - if (err) { - return cb(err) - } - req.user = user - return cb(null, user) - } - - passport.use(new JWTStrategy(strategyOptions, verifyCallback)) -} + const extractJWT = (req) => { + // 1. Default extract jwt from request cookie + let { cookies: { jwt = '' }} = req; + if(!_.isEmpty(jwt)) { + // 2. Return jwt if found in cookie + return jwt; + } + // 2. Fallback/external integration extracts jwt from authorization header + let { headers: { authorization = '' }} = req; + // If token contains bearer type, strip it and return jwt + if(authorization.split(' ')[0] === 'Bearer') { + jwt = authorization.split(' ')[1]; + } + return jwt; + } + + const strategyOptions = { + jwtFromRequest: extractJWT, + secretOrKey: process.env.JWTSecret, + passReqToCallback: true, + }; + + const verifyCallback = async (req, jwtPayload, cb) => { + if (typeof jwtPayload.data === 'string') { + jwtPayload.data = JSON.parse(jwtPayload.data); + } + const [err, user] = await to(getUserById(jwtPayload.data._id)); + + if (err) { + return cb(err); + } + req.user = user; + return cb(null, user); + }; + + passport.use(new JWTStrategy(strategyOptions, verifyCallback)); +}; const login = (req, user) => { - return new Promise((resolve, reject) => { - req.login(user, { session: false }, err => { - if (err) { - return reject(err) - } - - return resolve(signToken(user)) - }) - }) -} - -export { strategy, login } \ No newline at end of file + return new Promise((resolve, reject) => { + req.login(user, { session: false }, err => { + if (err) { + return reject(err); + } + + return resolve(signToken(user)); + }); + }); +}; + +export { strategy, login }; diff --git a/src/resources/auth/strategies/linkedin.js b/src/resources/auth/strategies/linkedin.js index 1a571d11..93d3cffb 100644 --- a/src/resources/auth/strategies/linkedin.js +++ b/src/resources/auth/strategies/linkedin.js @@ -1,153 +1,145 @@ -import passport from 'passport' -import passportLinkedin from 'passport-linkedin-oauth2' -import { to } from 'await-to-js' - -import { getUserByProviderId } from '../../user/user.repository' -import { getObjectById } from '../../tool/data.repository' -import { updateRedirectURL } from '../../user/user.service' -import { createUser } from '../../user/user.service' -import { signToken } from '../utils' -import { ROLES } from '../../user/user.roles' -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; +import passport from 'passport'; +import passportLinkedin from 'passport-linkedin-oauth2'; +import { to } from 'await-to-js'; + +import { getUserByProviderId } from '../../user/user.repository'; +import { getObjectById } from '../../tool/data.repository'; +import { updateRedirectURL } from '../../user/user.service'; +import { createUser } from '../../user/user.service'; +import { signToken } from '../utils'; +import { ROLES } from '../../user/user.roles'; +import queryString from 'query-string'; +import Url from 'url'; +import { discourseLogin } from '../sso/sso.discourse.service'; const eventLogController = require('../../eventlog/eventlog.controller'); -const LinkedinStrategy = passportLinkedin.OAuth2Strategy +const LinkedinStrategy = passportLinkedin.OAuth2Strategy; const strategy = app => { - const strategyOptions = { - clientID: process.env.linkedinClientID, - clientSecret: process.env.linkedinClientSecret, - callbackURL: `/auth/linkedin/callback`, - proxy: true - } - - const verifyCallback = async ( - accessToken, - refreshToken, - profile, - done - ) => { - if (!profile.id || profile.id === '') return done("loginError"); - - let [err, user] = await to(getUserByProviderId(profile.id)) - if (err || user) { - return done(err, user) - } - - const [createdError, createdUser] = await to( - createUser({ - provider: profile.provider, - providerId: profile.id, - firstname: profile.name.givenName, - lastname: profile.name.familyName, - email: '', - password: null, - role: ROLES.Creator - }) - ) - - return done(createdError, createdUser) - } - - passport.use(new LinkedinStrategy(strategyOptions, verifyCallback)) - - app.get( - `/auth/linkedin`, - (req, res, next) => { - // Save the url of the user's current page so the app can redirect back to it after authorization - if (req.headers.referer) {req.param.returnpage = req.headers.referer;} - next(); - }, - passport.authenticate('linkedin', { - scope: [ - 'r_emailaddress', - 'r_liteprofile' - ] - }) - ) - - app.get('/auth/linkedin/callback', (req, res, next) => { - passport.authenticate('linkedin', (err, user, info) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL+'/loginerror') - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res - .status(200) - .redirect(redirectUrl) - } - - req.login(user, async (err) => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [profileErr, profile] = await to(getObjectById(req.user.id)) - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })) - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id) - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now() - } - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({_id: req.user._id, id: req.user.id, timeStamp: Date.now()}), { - httpOnly: true - }) - .redirect(redirectUrl) - - }); - })(req, res, next); - }); - - return app -} - -export { strategy } \ No newline at end of file + const strategyOptions = { + clientID: process.env.linkedinClientID, + clientSecret: process.env.linkedinClientSecret, + callbackURL: `/auth/linkedin/callback`, + proxy: true, + }; + + const verifyCallback = async (accessToken, refreshToken, profile, done) => { + if (!profile.id || profile.id === '') return done('loginError'); + + let [err, user] = await to(getUserByProviderId(profile.id)); + if (err || user) { + return done(err, user); + } + + const [createdError, createdUser] = await to( + createUser({ + provider: profile.provider, + providerId: profile.id, + firstname: profile.name.givenName, + lastname: profile.name.familyName, + email: '', + password: null, + role: ROLES.Creator, + }) + ); + + return done(createdError, createdUser); + }; + + passport.use(new LinkedinStrategy(strategyOptions, verifyCallback)); + + app.get( + `/auth/linkedin`, + (req, res, next) => { + // Save the url of the user's current page so the app can redirect back to it after authorization + if (req.headers.referer) { + req.param.returnpage = req.headers.referer; + } + next(); + }, + passport.authenticate('linkedin', { + scope: ['r_emailaddress', 'r_liteprofile'], + }) + ); + + app.get('/auth/linkedin/callback', (req, res, next) => { + passport.authenticate('linkedin', (err, user, info) => { + if (err || !user) { + //loginError + if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); + + // failureRedirect + var redirect = '/'; + let returnPage = null; + + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + return res.status(200).redirect(redirectUrl); + } + + req.login(user, async err => { + if (err) { + return next(err); + } + + var redirect = '/'; + + let returnPage = null; + let queryStringParsed = null; + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + queryStringParsed = queryString.parse(returnPage.query); + } + + let [profileErr, profile] = await to(getObjectById(req.user.id)); + + if (!profile) { + await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); + return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); + } + + if (req.param.returnpage) { + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { + try { + redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); + } catch (err) { + console.error(err); + return res.status(500).send('Error authenticating the user.'); + } + } + + //Build event object for user login and log it to DB + let eventObj = { + userId: req.user.id, + event: `user_login_${req.user.provider}`, + timestamp: Date.now(), + }; + await eventLogController.logEvent(eventObj); + + return res + .status(200) + .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { + httpOnly: true, + secure: process.env.api_url ? true : false, + }) + .redirect(redirectUrl); + }); + })(req, res, next); + }); + + return app; +}; + +export { strategy }; diff --git a/src/resources/auth/strategies/oidc.js b/src/resources/auth/strategies/oidc.js index 15d5c66d..18842969 100644 --- a/src/resources/auth/strategies/oidc.js +++ b/src/resources/auth/strategies/oidc.js @@ -1,153 +1,149 @@ -import passport from 'passport' -import passportOidc from 'passport-openidconnect' -import { to } from 'await-to-js' - -import { getUserByProviderId } from '../../user/user.repository' -import { getObjectById } from '../../tool/data.repository' -import { updateRedirectURL } from '../../user/user.service' -import { createUser } from '../../user/user.service' -import { signToken } from '../utils' -import { ROLES } from '../../user/user.roles' -import queryString from 'query-string'; -import Url from 'url'; -import { discourseLogin } from '../sso/sso.discourse.service'; - -const OidcStrategy = passportOidc.Strategy +import passport from 'passport'; +import passportOidc from 'passport-openidconnect'; +import { to } from 'await-to-js'; + +import { getUserByProviderId } from '../../user/user.repository'; +import { getObjectById } from '../../tool/data.repository'; +import { updateRedirectURL } from '../../user/user.service'; +import { createUser } from '../../user/user.service'; +import { signToken } from '../utils'; +import { ROLES } from '../../user/user.roles'; +import queryString from 'query-string'; +import Url from 'url'; +import { discourseLogin } from '../sso/sso.discourse.service'; + +const OidcStrategy = passportOidc.Strategy; const baseAuthUrl = process.env.AUTH_PROVIDER_URI; const eventLogController = require('../../eventlog/eventlog.controller'); const strategy = app => { - const strategyOptions = { - issuer: baseAuthUrl, - authorizationURL: baseAuthUrl + "/oidc/auth", - tokenURL: baseAuthUrl + "/oidc/token", - userInfoURL: baseAuthUrl + "/oidc/userinfo", - clientID: process.env.openidClientID, - clientSecret: process.env.openidClientSecret, - callbackURL: `/auth/oidc/callback`, - proxy: true - } - - const verifyCallback = async ( - accessToken, - refreshToken, - profile, - done - ) => { - if (!profile || !profile._json || !profile._json.eduPersonTargetedID || profile._json.eduPersonTargetedID === '') return done("loginError"); - - let [err, user] = await to(getUserByProviderId(profile._json.eduPersonTargetedID)) - if (err || user) { - return done(err, user) - } - - const [createdError, createdUser] = await to( - createUser({ - provider: 'oidc', - providerId: profile._json.eduPersonTargetedID, - firstname: '', - lastname: '', - email: '', - password: null, - role: ROLES.Creator - }) - ) - - return done(createdError, createdUser) - } - - passport.use('oidc', new OidcStrategy(strategyOptions, verifyCallback)) - - app.get( - `/auth/oidc`, - (req, res, next) => { - // Save the url of the user's current page so the app can redirect back to it after authorization - if (req.headers.referer) {req.param.returnpage = req.headers.referer;} - next(); - }, - passport.authenticate('oidc') - ) - - app.get('/auth/oidc/callback', (req, res, next) => { - passport.authenticate('oidc', (err, user, info) => { - if (err || !user) { - //loginError - if (err === 'loginError') return res.status(200).redirect(process.env.homeURL+'/loginerror') - - // failureRedirect - var redirect = '/'; - let returnPage = null; - - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - return res - .status(200) - .redirect(redirectUrl) - } - - req.login(user, async (err) => { - if (err) { - return next(err); - } - - var redirect = '/'; - - let returnPage = null; - let queryStringParsed = null; - if (req.param.returnpage) { - returnPage = Url.parse(req.param.returnpage); - redirect = returnPage.path; - queryStringParsed = queryString.parse(returnPage.query); - } - - let [profileErr, profile] = await to(getObjectById(req.user.id)) - - if (!profile) { - await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })) - return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id) - } - - if (req.param.returnpage) { - delete req.param.returnpage; - } - - let redirectUrl = process.env.homeURL + redirect; - - if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { - try { - redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); - } catch (err) { - console.error(err); - return res.status(500).send('Error authenticating the user.'); - } - } - - //Build event object for user login and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_login_${req.user.provider}`, - timestamp: Date.now() - } - await eventLogController.logEvent(eventObj); - - return res - .status(200) - .cookie('jwt', signToken({_id: req.user._id, id: req.user.id, timeStamp: Date.now()}), { - httpOnly: true - }) - .redirect(redirectUrl) - - }); - })(req, res, next); - }); - - return app -} - -export { strategy } \ No newline at end of file + const strategyOptions = { + issuer: baseAuthUrl, + authorizationURL: baseAuthUrl + '/oidc/auth', + tokenURL: baseAuthUrl + '/oidc/token', + userInfoURL: baseAuthUrl + '/oidc/userinfo', + clientID: process.env.openidClientID, + clientSecret: process.env.openidClientSecret, + callbackURL: `/auth/oidc/callback`, + proxy: true, + }; + + const verifyCallback = async (accessToken, refreshToken, profile, done) => { + if (!profile || !profile._json || !profile._json.eduPersonTargetedID || profile._json.eduPersonTargetedID === '') + return done('loginError'); + + let [err, user] = await to(getUserByProviderId(profile._json.eduPersonTargetedID)); + if (err || user) { + return done(err, user); + } + + const [createdError, createdUser] = await to( + createUser({ + provider: 'oidc', + providerId: profile._json.eduPersonTargetedID, + firstname: '', + lastname: '', + email: '', + password: null, + role: ROLES.Creator, + }) + ); + + return done(createdError, createdUser); + }; + + passport.use('oidc', new OidcStrategy(strategyOptions, verifyCallback)); + + app.get( + `/auth/oidc`, + (req, res, next) => { + // Save the url of the user's current page so the app can redirect back to it after authorization + if (req.headers.referer) { + req.param.returnpage = req.headers.referer; + } + next(); + }, + passport.authenticate('oidc') + ); + + app.get('/auth/oidc/callback', (req, res, next) => { + passport.authenticate('oidc', (err, user, info) => { + if (err || !user) { + //loginError + if (err === 'loginError') return res.status(200).redirect(process.env.homeURL + '/loginerror'); + + // failureRedirect + var redirect = '/'; + let returnPage = null; + + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + return res.status(200).redirect(redirectUrl); + } + + req.login(user, async err => { + if (err) { + return next(err); + } + + var redirect = '/'; + + let returnPage = null; + let queryStringParsed = null; + if (req.param.returnpage) { + returnPage = Url.parse(req.param.returnpage); + redirect = returnPage.path; + queryStringParsed = queryString.parse(returnPage.query); + } + + let [profileErr, profile] = await to(getObjectById(req.user.id)); + + if (!profile) { + await to(updateRedirectURL({ id: req.user.id, redirectURL: redirect })); + return res.redirect(process.env.homeURL + '/completeRegistration/' + req.user.id); + } + + if (req.param.returnpage) { + delete req.param.returnpage; + } + + let redirectUrl = process.env.homeURL + redirect; + + if (queryStringParsed && queryStringParsed.sso && queryStringParsed.sig) { + try { + redirectUrl = discourseLogin(queryStringParsed.sso, queryStringParsed.sig, req.user); + } catch (err) { + console.error(err); + return res.status(500).send('Error authenticating the user.'); + } + } + + //Build event object for user login and log it to DB + let eventObj = { + userId: req.user.id, + event: `user_login_${req.user.provider}`, + timestamp: Date.now(), + }; + await eventLogController.logEvent(eventObj); + + return res + .status(200) + .cookie('jwt', signToken({ _id: req.user._id, id: req.user.id, timeStamp: Date.now() }), { + httpOnly: true, + secure: process.env.api_url ? true : false, + }) + .redirect(redirectUrl); + }); + })(req, res, next); + }); + + return app; +}; + +export { strategy }; diff --git a/src/resources/auth/utils.js b/src/resources/auth/utils.js index 1013b8ad..9fc17b10 100644 --- a/src/resources/auth/utils.js +++ b/src/resources/auth/utils.js @@ -1,32 +1,29 @@ -import passport from 'passport' -import jwt from "jsonwebtoken" -import { UserModel } from '../user/user.model' -import bcrypt from "bcrypt" -import { ROLES } from '../user/user.roles' +import passport from 'passport'; +import jwt from 'jsonwebtoken'; +import { UserModel } from '../user/user.model'; +import bcrypt from 'bcrypt'; +import { ROLES } from '../user/user.roles'; const setup = () => { - passport.serializeUser((user, done) => done(null, user._id)) + passport.serializeUser((user, done) => done(null, user._id)); - passport.deserializeUser(async (id, done) => { - try { - const user = await UserModel.findById(id) - return done(null, user) - } catch (err) { - return done(err, null) - } - }) -} + passport.deserializeUser(async (id, done) => { + try { + const user = await UserModel.findById(id); + return done(null, user); + } catch (err) { + return done(err, null); + } + }); +}; -const signToken = (user) => { - return jwt.sign( - { data: user }, - process.env.JWTSecret, - { //Here change it so only id - algorithm: 'HS256', - expiresIn: 604800 - } - ) -} +const signToken = (user, expiresIn = 604800) => { + return jwt.sign({ data: user }, process.env.JWTSecret, { + //Here change it so only id + algorithm: 'HS256', + expiresIn + }); +}; const camundaToken = () => { return jwt.sign( @@ -34,7 +31,7 @@ const camundaToken = () => { // username: An admin user the exists within the camunda-admin group // groupIds: The admin group that has been configured on the camunda portal. { username: process.env.BPMN_ADMIN_USER, groupIds: ["camunda-admin"], tenantIds: []}, - process.env.JWTSecret, + process.env.JWTSecret || "local", { //Here change it so only id algorithm: 'HS256', expiresIn: 604800 @@ -43,49 +40,48 @@ const camundaToken = () => { } const hashPassword = async password => { - if (!password) { - throw new Error('Password was not provided') - } + if (!password) { + throw new Error('Password was not provided'); + } - const salt = await bcrypt.genSalt(10) - return await bcrypt.hash(password, salt) -} + const salt = await bcrypt.genSalt(10); + return await bcrypt.hash(password, salt); +}; const verifyPassword = async (candidate, actual) => { - return await bcrypt.compare(candidate, actual) -} + return await bcrypt.compare(candidate, actual); +}; const checkIsInRole = (...roles) => (req, res, next) => { - if (!req.user) { - return res.redirect('/login') - } + if (!req.user) { + return res.redirect('/login'); + } - const hasRole = roles.find(role => req.user.role === role) - if (!hasRole) { - return res.redirect('/login') - } + const hasRole = roles.find(role => req.user.role === role); + if (!hasRole) { + return res.redirect('/login'); + } - return next() -} + return next(); +}; -const whatIsRole = (req) => { - if (!req.user) { - return "Reader"; - } - else { - return req.user.role - } -} +const whatIsRole = req => { + if (!req.user) { + return 'Reader'; + } else { + return req.user.role; + } +}; const getRedirectUrl = role => { - switch (role) { - case ROLES.Admin: - return '/admin-dashboard' - case ROLES.Creator: - return '/customer-dashboard' - default: - return '/' - } -} + switch (role) { + case ROLES.Admin: + return '/admin-dashboard'; + case ROLES.Creator: + return '/customer-dashboard'; + default: + return '/'; + } +}; -export { setup, signToken, camundaToken, hashPassword, verifyPassword, checkIsInRole, getRedirectUrl, whatIsRole } \ No newline at end of file +export { setup, signToken, camundaToken, hashPassword, verifyPassword, checkIsInRole, getRedirectUrl, whatIsRole }; diff --git a/src/resources/bpmnworkflow/bpmnworkflow.controller.js b/src/resources/bpmnworkflow/bpmnworkflow.controller.js index 20726352..06452397 100644 --- a/src/resources/bpmnworkflow/bpmnworkflow.controller.js +++ b/src/resources/bpmnworkflow/bpmnworkflow.controller.js @@ -1,11 +1,14 @@ import axios from 'axios'; import axiosRetry from 'axios-retry'; import _ from 'lodash'; -import { utils } from "../auth"; +import { utils } from '../auth'; -axiosRetry(axios, { retries: 3, retryDelay: () => { - return 3000; - }}); +axiosRetry(axios, { + retries: 3, + retryDelay: () => { + return 3000; + }, +}); const bpmnBaseUrl = process.env.BPMNBASEURL; //Generate Bearer token for camunda endpoints @@ -14,150 +17,143 @@ const config = { }; module.exports = { - //Generic Get Task Process Endpoints - getProcess: async (businessKey) => { - return await axios.get(`${bpmnBaseUrl}/engine-rest/task?processInstanceBusinessKey=${businessKey.toString()}`, config); - }, + //Generic Get Task Process Endpoints + getProcess: async businessKey => { + return await axios.get(`${bpmnBaseUrl}/engine-rest/task?processInstanceBusinessKey=${businessKey.toString()}`, config); + }, - //Simple Workflow Endpoints - postCreateProcess: async (bpmContext) => { - // Create Axios requet to start Camunda process - let { applicationStatus, dateSubmitted, publisher, actioner, businessKey } = bpmContext; - let data = { - "variables": { - "applicationStatus": { - "value": applicationStatus, - "type": "String" - }, - "dateSubmitted": { - "value": dateSubmitted, - "type": "String" - }, - "publisher": { - "value": publisher, - "type": "String" - }, - "actioner" : { - "value": actioner, - "type": "String" - } - }, - "businessKey": businessKey.toString() - } - await axios.post(`${bpmnBaseUrl}/engine-rest/process-definition/key/GatewayWorkflowSimple/start`, data, config) - .catch((err) => { - console.error(err); - }); - }, - postUpdateProcess: async (bpmContext) => { - // Create Axios requet to start Camunda process - let { taskId, applicationStatus, dateSubmitted, publisher, actioner, archived } = bpmContext; - let data = { - "variables": { - "applicationStatus": { - "value": applicationStatus, - "type": "String" - }, - "dateSubmitted": { - "value": dateSubmitted, - "type": "String" - }, - "publisher": { - "value": publisher, - "type": "String" - }, - "actioner" : { - "value": actioner, - "type": "String" - }, - "archived" :{ - "value": archived, - "type": "Boolean" - } - } - } - await axios.post(`${bpmnBaseUrl}/engine-rest/task/${taskId}/complete`, data, config) - .catch((err) => { - console.error(err); - }); - }, + //Simple Workflow Endpoints + postCreateProcess: async bpmContext => { + // Create Axios requet to start Camunda process + let { applicationStatus, dateSubmitted, publisher, actioner, businessKey } = bpmContext; + let data = { + variables: { + applicationStatus: { + value: applicationStatus, + type: 'String', + }, + dateSubmitted: { + value: dateSubmitted, + type: 'String', + }, + publisher: { + value: publisher, + type: 'String', + }, + actioner: { + value: actioner, + type: 'String', + }, + }, + businessKey: businessKey.toString(), + }; + await axios.post(`${bpmnBaseUrl}/engine-rest/process-definition/key/GatewayWorkflowSimple/start`, data, config).catch(err => { + console.error(err); + }); + }, + postUpdateProcess: async bpmContext => { + // Create Axios requet to start Camunda process + let { taskId, applicationStatus, dateSubmitted, publisher, actioner, archived } = bpmContext; + let data = { + variables: { + applicationStatus: { + value: applicationStatus, + type: 'String', + }, + dateSubmitted: { + value: dateSubmitted, + type: 'String', + }, + publisher: { + value: publisher, + type: 'String', + }, + actioner: { + value: actioner, + type: 'String', + }, + archived: { + value: archived, + type: 'Boolean', + }, + }, + }; + await axios.post(`${bpmnBaseUrl}/engine-rest/task/${taskId}/complete`, data, config).catch(err => { + console.error(err); + }); + }, - //Complex Workflow Endpoints - postStartPreReview: async (bpmContext) => { - //Start pre-review process - let { applicationStatus, dateSubmitted, publisher, businessKey } = bpmContext; - let data = { - "variables": { - "applicationStatus": { - "value": applicationStatus, - "type": "String" - }, - "dateSubmitted": { - "value": dateSubmitted, - "type": "String" - }, - "publisher": { - "value": publisher, - "type": "String" - } - }, - "businessKey": businessKey.toString() - } - await axios.post(`${bpmnBaseUrl}/engine-rest/process-definition/key/GatewayReviewWorkflowComplex/start`, data, config) - .catch((err) => { - console.error(err); - }); - }, - postStartManagerReview: async (bpmContext) => { - // Start manager-review process - let { applicationStatus, managerId, publisher, notifyManager, taskId } = bpmContext; - let data = { - "variables": { - "applicationStatus": { - "value": applicationStatus, - "type": "String" - }, - "userId": { - "value": managerId, - "type": "String" - }, - "publisher": { - "value": publisher, - "type": "String" - }, - "notifyManager": { - "value": notifyManager, - "type": "String" - } - } - } - await axios.post(`${bpmnBaseUrl}/engine-rest/task/${taskId}/complete`, data, config) - .catch((err) => { - console.error(err); - }); - }, - postManagerApproval: async (bpmContext) => { - // Manager has approved sectoin - let { businessKey } = bpmContext; - await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/manager/completed/${businessKey}`, bpmContext. config) - .catch((err) => { - console.error(err); - }) - }, - postStartStepReview: async (bpmContext) => { - //Start Step-Review process - let { businessKey } = bpmContext; - await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/complete/review/${businessKey}`, bpmContext, config) - .catch((err) => { - console.error(err); - }); - }, - postCompleteReview: async (bpmContext) => { - //Start Next-Step process - let { businessKey } = bpmContext; - await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/reviewer/complete/${businessKey}`, bpmContext, config) - .catch((err) => { - console.error(err); - }); - } -} \ No newline at end of file + //Complex Workflow Endpoints + postStartPreReview: async bpmContext => { + //Start pre-review process + let { applicationStatus, dateSubmitted, publisher, businessKey } = bpmContext; + let data = { + variables: { + applicationStatus: { + value: applicationStatus, + type: 'String', + }, + dateSubmitted: { + value: dateSubmitted, + type: 'String', + }, + publisher: { + value: publisher, + type: 'String', + }, + }, + businessKey: businessKey.toString(), + }; + await axios.post(`${bpmnBaseUrl}/engine-rest/process-definition/key/GatewayReviewWorkflowComplex/start`, data, config).catch(err => { + console.error(err); + }); + }, + postStartManagerReview: async bpmContext => { + // Start manager-review process + let { applicationStatus, managerId, publisher, notifyManager, taskId } = bpmContext; + let data = { + variables: { + applicationStatus: { + value: applicationStatus, + type: 'String', + }, + userId: { + value: managerId, + type: 'String', + }, + publisher: { + value: publisher, + type: 'String', + }, + notifyManager: { + value: notifyManager, + type: 'String', + }, + }, + }; + await axios.post(`${bpmnBaseUrl}/engine-rest/task/${taskId}/complete`, data, config).catch(err => { + console.error(err); + }); + }, + postManagerApproval: async bpmContext => { + // Manager has approved sectoin + let { businessKey } = bpmContext; + await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/manager/completed/${businessKey}`, bpmContext.config).catch(err => { + console.error(err); + }); + }, + postStartStepReview: async bpmContext => { + //Start Step-Review process + let { businessKey } = bpmContext; + await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/complete/review/${businessKey}`, bpmContext, config).catch(err => { + console.error(err); + }); + }, + postCompleteReview: async bpmContext => { + //Start Next-Step process + let { businessKey } = bpmContext; + await axios.post(`${bpmnBaseUrl}/api/gateway/workflow/v1/reviewer/complete/${businessKey}`, bpmContext, config).catch(err => { + console.error(err); + }); + }, +}; diff --git a/src/resources/collections/collections.model.js b/src/resources/collections/collections.model.js index db50f48a..bc470d10 100644 --- a/src/resources/collections/collections.model.js +++ b/src/resources/collections/collections.model.js @@ -1,32 +1,32 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -// this will be our data base's data structure +// this will be our data base's data structure const CollectionSchema = new Schema( - { - id: Number, - name: String, - description: String, - imageLink: String, - authors: [Number], - // emailNotifications: Boolean, - counter: Number, - discourseTopicId: Number, - relatedObjects: [ - { - objectId: String, - reason: String, - pid: String, - objectType: String, - user: String, - updated: String, - }, - ], - activeflag: String, - }, - { - collection: 'collections', //will be created when first posting - timestamps: true, - } + { + id: Number, + name: String, + description: String, + imageLink: String, + authors: [Number], + // emailNotifications: Boolean, + counter: Number, + discourseTopicId: Number, + relatedObjects: [ + { + objectId: String, + reason: String, + pid: String, + objectType: String, + user: String, + updated: String, + }, + ], + activeflag: String, + }, + { + collection: 'collections', //will be created when first posting + timestamps: true, + } ); -export const Collections = model('Collections', CollectionSchema) \ No newline at end of file +export const Collections = model('Collections', CollectionSchema); diff --git a/src/resources/collections/collections.repository.js b/src/resources/collections/collections.repository.js new file mode 100644 index 00000000..dcf842c6 --- /dev/null +++ b/src/resources/collections/collections.repository.js @@ -0,0 +1,83 @@ +import { Data } from '../tool/data.model'; +import { Course } from '../course/course.model'; +import { Collections } from './collections.model'; +import _ from 'lodash'; + +const getCollectionObjects = async (req, res) => { + let relatedObjects = []; + await Collections.find( + { id: parseInt(req.params.collectionID) }, + { 'relatedObjects._id': 1, 'relatedObjects.objectId': 1, 'relatedObjects.objectType': 1, 'relatedObjects.pid': 1 } + ).then(async res => { + await new Promise(async (resolve, reject) => { + if (_.isEmpty(res)) { + reject(`Collection not found for Id: ${req.params.collectionID}.`); + } else { + for (let object of res[0].relatedObjects) { + let relatedObject = await getCollectionObject(object.objectId, object.objectType, object.pid); + if (!_.isUndefined(relatedObject)) { + relatedObjects.push(relatedObject); + } else { + await Collections.findOneAndUpdate( + { id: parseInt(req.params.collectionID) }, + { $pull: { relatedObjects: { _id: object._id } } } + ); + } + } + resolve(relatedObjects); + } + }); + }); + return relatedObjects; +}; + +function getCollectionObject(objectId, objectType, pid) { + let id = pid && pid.length > 0 ? pid : objectId; + + return new Promise(async (resolve, reject) => { + let data; + if (!isNaN(id) && objectType !== 'course') { + data = await Data.find( + { id: parseInt(id) }, + { + id: 1, + type: 1, + activeflag: 1, + tags: 1, + description: 1, + name: 1, + persons: 1, + categories: 1, + programmingLanguage: 1, + firstname: 1, + lastname: 1, + bio: 1, + authors: 1, + } + ).populate([{ path: 'persons', options: { select: { id: 1, firstname: 1, lastname: 1 } } }]); + } else if (!isNaN(id) && objectType === 'course') { + data = await Course.find( + { id: parseInt(id) }, + { id: 1, type: 1, activeflag: 1, title: 1, provider: 1, courseOptions: 1, award: 1, domains: 1, tags: 1, description: 1 } + ); + } else { + // 1. Search for a dataset based on pid + data = await Data.find( + { pid: id, activeflag: 'active' }, + { id: 1, datasetid: 1, pid: 1, type: 1, activeflag: 1, name: 1, datasetv2: 1, datasetfields: 1, tags: 1, description: 1 } + ); + // 2. If dataset not found search for a dataset based on datasetID + if (!data || data.length <= 0) { + data = await Data.find({ datasetid: id }, { datasetid: 1, pid: 1 }); + // 3. Use retrieved dataset's pid to search by pid again + data = await Data.find( + { pid: data[0].pid, activeflag: 'active' }, + { id: 1, datasetid: 1, pid: 1, type: 1, activeflag: 1, name: 1, datasetv2: 1, datasetfields: 1, tags: 1, description: 1 } + ); + } + } + resolve(data[0]); + }); +} + +export { getCollectionObjects }; diff --git a/src/resources/collections/collections.route.js b/src/resources/collections/collections.route.js index 007be96b..92148555 100644 --- a/src/resources/collections/collections.route.js +++ b/src/resources/collections/collections.route.js @@ -1,317 +1,305 @@ -import express from 'express' -import { ROLES } from '../user/user.roles' -import passport from "passport"; -import { utils } from "../auth"; +import express from 'express'; +import { ROLES } from '../user/user.roles'; +import passport from 'passport'; +import { utils } from '../auth'; // import { UserModel } from '../user/user.model' import { Collections } from '../collections/collections.model'; import { Data } from '../tool/data.model'; import { MessagesModel } from '../message/message.model'; -import { UserModel } from '../user/user.model' +import { UserModel } from '../user/user.model'; import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; import _ from 'lodash'; import escape from 'escape-html'; +import { getCollectionObjects } from './collections.repository'; + const inputSanitizer = require('../utilities/inputSanitizer'); const urlValidator = require('../utilities/urlValidator'); const hdrukEmail = `enquiry@healthdatagateway.org`; -const router = express.Router() +const router = express.Router(); router.get('/:collectionID', async (req, res) => { - var q = Collections.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.collectionID) }] } }, - - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } } - - ]); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - - if (_.isEmpty(data)) return res.status(404).send(`Collection not found for Id: ${escape(req.params.collectionID)}`); - - data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); - return res.json({ success: true, data: data }); - }); -}) - - router.get('/entityid/:entityID', async (req, res) => { - let entityID = req.params.entityID - let dataVersions = await Data.find({ pid: entityID }, { _id: 0, datasetid: 1 }); - let dataVersionsArray = dataVersions.map(a => a.datasetid); - dataVersionsArray.push(entityID); - - var q = Collections.aggregate([ - { - $match: { - $and: [ - { - relatedObjects: { - $elemMatch: { - $or: [ - { - objectId: { $in: dataVersionsArray }, - }, - { - pid: entityID, - } - ], - }, - }, - }, - { publicflag: true }, - { activeflag: 'active' }, - ], - }, - }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - { - $project: { _id: 1, id: 1, name: 1, description: 1, imageLink: 1, relatedObjects: 1, 'persons.firstname': 1, 'persons.lastname': 1 }, - }, - ]); - - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - }); - - router.put('/edit', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - const collectionCreator = req.body.collectionCreator; - var { id, name, description, imageLink, authors, relatedObjects } = req.body; - imageLink = urlValidator.validateURL(imageLink); - - Collections.findOneAndUpdate({ id: id }, - { - name: inputSanitizer.removeNonBreakingSpaces(name), - description: inputSanitizer.removeNonBreakingSpaces(description), - imageLink: imageLink, - authors: authors, - relatedObjects: relatedObjects - }, (err) => { - if (err) { - return res.json({ success: false, error: err }); - } - }).then(() => { - return res.json({ success: true }); - }) - }); - - router.post('/add', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - let collections = new Collections(); - - const collectionCreator = req.body.collectionCreator; - - const { name, description, imageLink, authors, relatedObjects } = req.body; - - collections.id = parseInt(Math.random().toString().replace('0.', '')); - collections.name = inputSanitizer.removeNonBreakingSpaces(name); - collections.description = inputSanitizer.removeNonBreakingSpaces(description); - collections.imageLink = imageLink; - collections.authors = authors; - collections.relatedObjects = relatedObjects; - collections.activeflag = 'active'; - - try { - if (collections.authors) { - collections.authors.forEach(async (authorId) => { - await createMessage(authorId, collections, collections.activeflag, collectionCreator); - }); - } - await createMessage(0, collections, collections.activeflag, collectionCreator); - - // Send email notifications to all admins and authors who have opted in - await sendEmailNotifications(collections, collections.activeflag, collectionCreator); - - } catch (err) { - console.log(err); - // return res.status(500).json({ success: false, error: err }); - } - - collections.save((err) => { - if (err) { - return res.json({ success: false, error: err }) - } else { - return res.json({ success: true, id: collections.id }) - } - }); - - }); - - router.put('/status', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - - var { id, activeflag } = req.body; - var isAuthorAdmin = false; - - var q = Collections.aggregate([ - { $match: { $and: [{ id: parseInt(req.body.id) }, { authors: req.user.id }] } } - ]); - q.exec((err, data) => { - if (data.length === 1) { - isAuthorAdmin = true; - } - - if (req.user.role === 'Admin') { - isAuthorAdmin = true; - } - - if (isAuthorAdmin) { - Collections.findOneAndUpdate({ id: id }, - { - activeflag: activeflag - }, (err) => { - if (err) { - return res.json({ success: false, error: err }); - } - }).then(() => { - return res.json({ success: true }); - }) - - } else { - return res.json({ success: false, error: 'Not authorised' }); - } - }); - }); - - router.delete('/delete/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - var isAuthorAdmin = false; - - var q = Collections.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.id) }, { authors: req.user.id }] } } - ]); - q.exec((err, data) => { - - if (data.length === 1) { - isAuthorAdmin = true; - } - - if (req.user.role === 'Admin') { - isAuthorAdmin = true; - } - - if (isAuthorAdmin) { - Collections.findOneAndRemove({ id: req.params.id }, (err) => { - if (err) return res.send(err); - return res.json({ success: true }); - }); - - } else { - return res.json({ success: false, error: 'Not authorised' }); - } - }); - }); - - module.exports = router; - - async function createMessage(authorId, collections, activeflag, collectionCreator) { - let message = new MessagesModel(); - - const collectionLink = process.env.homeURL + '/collection/' + collections.id; - const messageRecipients = await UserModel.find({ $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] }); - async function saveMessage() { - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = authorId; - message.messageObjectID = collections.id; - message.messageSent = Date.now(); - message.isRead = false; - await message.save(); - } - - if (authorId === 0) { - message.messageType = 'added collection'; - message.messageDescription = `${collectionCreator.name} added a new collection: ${collections.name}.` - saveMessage(); - } - - for (let messageRecipient of messageRecipients) { - if (activeflag === 'active' && authorId === messageRecipient.id && authorId === collectionCreator.id) { - message.messageType = 'added collection'; - message.messageDescription = `Your new collection ${collections.name} has been added.` - saveMessage(); - } - else if (activeflag === 'active' && authorId === messageRecipient.id && authorId !== collectionCreator.id) { - message.messageType = 'added collection'; - message.messageDescription = `${collectionCreator.name} added you as a collaborator on the new collection ${collections.name}.` - saveMessage(); - } - } - - //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS - // else if (activeflag === 'archive') { - // message.messageType = 'rejected'; - // message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${collectionLink}` - // } - } - - async function sendEmailNotifications(collections, activeflag, collectionCreator) { - let subject; - let html; - // 1. Generate URL for linking collection in email - const collectionLink = process.env.homeURL + '/collection/' + collections.id; - - // 2. Build email body - emailRecipients.map((emailRecipient) => { - if (activeflag === 'active' && emailRecipient.role === 'Admin') { - subject = `New collection ${collections.name} has been added and is now live` - html = `New collection ${collections.name} has been added and is now live

${collectionLink}` - } - - collections.authors.map((author) => { - if (activeflag === 'active' && author === emailRecipient.id && author === collectionCreator.id) { - subject = `Your collection ${collections.name} has been added and is now live` - html = `Your collection ${collections.name} has been added and is now live

${collectionLink}` - } else if (activeflag === 'active' && author === emailRecipient.id && author !== collectionCreator.id) { - subject = `You have been added as a collaborator on collection ${collections.name}` - html = `${collectionCreator.name} has added you as a collaborator to the collection ${collections.name} which is now live

${collectionLink}` - } - }) - }) - - if (activeflag === 'active') { - subject = `Your collection ${collections.name} has been approved and is now live` - html = `Your collection ${collections.name} has been approved and is now live

${collectionLink}` - } - //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS - // else if (activeflag === 'archive') { - // subject = `Your collection ${collections.name} has been rejected` - // html = `Your collection ${collections.name} has been rejected

${collectionLink}` - // } - - // 3. Query Db for all admins or authors of the collection who have opted in to email updates - var q = UserModel.aggregate([ - // Find all users who are admins or authors of this collection - { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html - ); - }); - } + var q = Collections.aggregate([ + { $match: { $and: [{ id: parseInt(req.params.collectionID) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + + if (_.isEmpty(data)) return res.status(404).send(`Collection not found for Id: ${escape(req.params.collectionID)}`); + + data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); + return res.json({ success: true, data: data }); + }); +}); + +router.get('/relatedobjects/:collectionID', async (req, res) => { + await getCollectionObjects(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); + +router.get('/entityid/:entityID', async (req, res) => { + let entityID = req.params.entityID; + let dataVersions = await Data.find({ pid: entityID }, { _id: 0, datasetid: 1 }); + let dataVersionsArray = dataVersions.map(a => a.datasetid); + dataVersionsArray.push(entityID); + + var q = Collections.aggregate([ + { + $match: { + $and: [ + { + relatedObjects: { + $elemMatch: { + $or: [ + { + objectId: { $in: dataVersionsArray }, + }, + { + pid: entityID, + }, + ], + }, + }, + }, + { publicflag: true }, + { activeflag: 'active' }, + ], + }, + }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { + $project: { _id: 1, id: 1, name: 1, description: 1, imageLink: 1, relatedObjects: 1, 'persons.firstname': 1, 'persons.lastname': 1 }, + }, + ]); + + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); +}); + +router.put('/edit', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + const collectionCreator = req.body.collectionCreator; + var { id, name, description, imageLink, authors, relatedObjects } = req.body; + imageLink = urlValidator.validateURL(imageLink); + + Collections.findOneAndUpdate( + { id: id }, + { + name: inputSanitizer.removeNonBreakingSpaces(name), + description: inputSanitizer.removeNonBreakingSpaces(description), + imageLink: imageLink, + authors: authors, + relatedObjects: relatedObjects, + }, + err => { + if (err) { + return res.json({ success: false, error: err }); + } + } + ).then(() => { + return res.json({ success: true }); + }); +}); + +router.post('/add', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + let collections = new Collections(); + + const collectionCreator = req.body.collectionCreator; + + const { name, description, imageLink, authors, relatedObjects } = req.body; + + collections.id = parseInt(Math.random().toString().replace('0.', '')); + collections.name = inputSanitizer.removeNonBreakingSpaces(name); + collections.description = inputSanitizer.removeNonBreakingSpaces(description); + collections.imageLink = imageLink; + collections.authors = authors; + collections.relatedObjects = relatedObjects; + collections.activeflag = 'active'; + + try { + if (collections.authors) { + collections.authors.forEach(async authorId => { + await createMessage(authorId, collections, collections.activeflag, collectionCreator); + }); + } + await createMessage(0, collections, collections.activeflag, collectionCreator); + + // Send email notifications to all admins and authors who have opted in + await sendEmailNotifications(collections, collections.activeflag, collectionCreator); + } catch (err) { + console.log(err); + // return res.status(500).json({ success: false, error: err }); + } + + collections.save(err => { + if (err) { + return res.json({ success: false, error: err }); + } else { + return res.json({ success: true, id: collections.id }); + } + }); +}); + +router.put('/status', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + var { id, activeflag } = req.body; + var isAuthorAdmin = false; + + var q = Collections.aggregate([{ $match: { $and: [{ id: parseInt(req.body.id) }, { authors: req.user.id }] } }]); + q.exec((err, data) => { + if (data.length === 1) { + isAuthorAdmin = true; + } + + if (req.user.role === 'Admin') { + isAuthorAdmin = true; + } + + if (isAuthorAdmin) { + Collections.findOneAndUpdate( + { id: id }, + { + activeflag: activeflag, + }, + err => { + if (err) { + return res.json({ success: false, error: err }); + } + } + ).then(() => { + return res.json({ success: true }); + }); + } else { + return res.json({ success: false, error: 'Not authorised' }); + } + }); +}); + +router.delete('/delete/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + var isAuthorAdmin = false; + + var q = Collections.aggregate([{ $match: { $and: [{ id: parseInt(req.params.id) }, { authors: req.user.id }] } }]); + q.exec((err, data) => { + if (data.length === 1) { + isAuthorAdmin = true; + } + + if (req.user.role === 'Admin') { + isAuthorAdmin = true; + } + + if (isAuthorAdmin) { + Collections.findOneAndRemove({ id: req.params.id }, err => { + if (err) return res.send(err); + return res.json({ success: true }); + }); + } else { + return res.json({ success: false, error: 'Not authorised' }); + } + }); +}); + +module.exports = router; + +async function createMessage(authorId, collections, activeflag, collectionCreator) { + let message = new MessagesModel(); + + const collectionLink = process.env.homeURL + '/collection/' + collections.id; + const messageRecipients = await UserModel.find({ $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] }); + async function saveMessage() { + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = authorId; + message.messageObjectID = collections.id; + message.messageSent = Date.now(); + message.isRead = false; + await message.save(); + } + + if (authorId === 0) { + message.messageType = 'added collection'; + message.messageDescription = `${collectionCreator.name} added a new collection: ${collections.name}.`; + saveMessage(); + } + + for (let messageRecipient of messageRecipients) { + if (activeflag === 'active' && authorId === messageRecipient.id && authorId === collectionCreator.id) { + message.messageType = 'added collection'; + message.messageDescription = `Your new collection ${collections.name} has been added.`; + saveMessage(); + } else if (activeflag === 'active' && authorId === messageRecipient.id && authorId !== collectionCreator.id) { + message.messageType = 'added collection'; + message.messageDescription = `${collectionCreator.name} added you as a collaborator on the new collection ${collections.name}.`; + saveMessage(); + } + } + + //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS + // else if (activeflag === 'archive') { + // message.messageType = 'rejected'; + // message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${collectionLink}` + // } +} + +async function sendEmailNotifications(collections, activeflag, collectionCreator) { + let subject; + let html; + // 1. Generate URL for linking collection in email + const collectionLink = process.env.homeURL + '/collection/' + collections.id; + + // 2. Build email body + emailRecipients.map(emailRecipient => { + if (activeflag === 'active' && emailRecipient.role === 'Admin') { + subject = `New collection ${collections.name} has been added and is now live`; + html = `New collection ${collections.name} has been added and is now live

${collectionLink}`; + } + + collections.authors.map(author => { + if (activeflag === 'active' && author === emailRecipient.id && author === collectionCreator.id) { + subject = `Your collection ${collections.name} has been added and is now live`; + html = `Your collection ${collections.name} has been added and is now live

${collectionLink}`; + } else if (activeflag === 'active' && author === emailRecipient.id && author !== collectionCreator.id) { + subject = `You have been added as a collaborator on collection ${collections.name}`; + html = `${collectionCreator.name} has added you as a collaborator to the collection ${collections.name} which is now live

${collectionLink}`; + } + }); + }); + + if (activeflag === 'active') { + subject = `Your collection ${collections.name} has been approved and is now live`; + html = `Your collection ${collections.name} has been approved and is now live

${collectionLink}`; + } + //UPDATE WHEN ARCHIVE/DELETE IS AVAILABLE FOR COLLECTIONS + // else if (activeflag === 'archive') { + // subject = `Your collection ${collections.name} has been rejected` + // html = `Your collection ${collections.name} has been rejected

${collectionLink}` + // } + + // 3. Query Db for all admins or authors of the collection who have opted in to email updates + var q = UserModel.aggregate([ + // Find all users who are admins or authors of this collection + { $match: { $or: [{ role: 'Admin' }, { id: { $in: collections.authors } }] } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 4. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); + }); +} diff --git a/src/resources/course/course.model.js b/src/resources/course/course.model.js index e901d38c..4e5c0029 100644 --- a/src/resources/course/course.model.js +++ b/src/resources/course/course.model.js @@ -1,62 +1,66 @@ import { model, Schema } from 'mongoose'; const CourseSchema = new Schema( - { - id: Number, - type: String, - creator: Number, - activeflag: String, - //updatedon: Date, - counter: Number, - discourseTopicId: Number, - relatedObjects: [ - { - objectId: String, - reason: String, - objectType: String, - pid: String, - user: String, - updated: String, - }, - ], + { + id: Number, + type: String, + creator: Number, + activeflag: String, + //updatedon: Date, + counter: Number, + discourseTopicId: Number, + relatedObjects: [ + { + objectId: String, + reason: String, + objectType: String, + pid: String, + user: String, + updated: String, + }, + ], - title: String, - link: String, - provider: String, - description: String, - courseDelivery: String, - location: String, - keywords: [String], - domains: [String], - courseOptions: [{ - flexibleDates: { type: Boolean, default: false }, - startDate: Date, - studyMode: String, - studyDurationNumber: Number, - studyDurationMeasure: String, - fees: [{ - feeDescription: String, - feeAmount: Number, - feePer: String - }], - }], - entries: [ - { - level: String, - subject: String - } - ], - restrictions: String, - award: [String], - competencyFramework: String, - nationalPriority: String - }, - { - collection: 'course', - timestamps: true, - toJSON: { virtuals: true }, - toObject: { virtuals: true } - } + title: String, + link: String, + provider: String, + description: String, + courseDelivery: String, + location: String, + keywords: [String], + domains: [String], + courseOptions: [ + { + flexibleDates: { type: Boolean, default: false }, + startDate: Date, + studyMode: String, + studyDurationNumber: Number, + studyDurationMeasure: String, + fees: [ + { + feeDescription: String, + feeAmount: Number, + feePer: String, + }, + ], + }, + ], + entries: [ + { + level: String, + subject: String, + }, + ], + restrictions: String, + award: [String], + competencyFramework: String, + nationalPriority: String, + }, + { + collection: 'course', + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } ); -export const Course = model('Course', CourseSchema) \ No newline at end of file +export const Course = model('Course', CourseSchema); diff --git a/src/resources/course/course.repository.js b/src/resources/course/course.repository.js index 7a500e2e..cb1e016f 100644 --- a/src/resources/course/course.repository.js +++ b/src/resources/course/course.repository.js @@ -1,498 +1,580 @@ import { Course } from './course.model'; -import { MessagesModel } from '../message/message.model' -import { UserModel } from '../user/user.model' -import { createDiscourseTopic } from '../discourse/discourse.service' +import { MessagesModel } from '../message/message.model'; +import { UserModel } from '../user/user.model'; +import { createDiscourseTopic } from '../discourse/discourse.service'; import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; +import { utils } from '../auth'; +import { ROLES } from '../user/user.roles'; const asyncModule = require('async'); const hdrukEmail = `enquiry@healthdatagateway.org`; const urlValidator = require('../utilities/urlValidator'); const inputSanitizer = require('../utilities/inputSanitizer'); export async function getObjectById(id) { - return await Course.findOne({ id }).exec() + return await Course.findOne({ id }).exec(); } const addCourse = async (req, res) => { - return new Promise(async(resolve, reject) => { - let course = new Course(); - course.id = parseInt(Math.random().toString().replace('0.', '')); - course.type = 'course'; - course.creator = req.user.id; - course.activeflag = 'review'; - course.updatedon = Date.now(); - course.relatedObjects = req.body.relatedObjects; - - course.title = inputSanitizer.removeNonBreakingSpaces(req.body.title); - course.link = inputSanitizer.removeNonBreakingSpaces(req.body.link); - course.provider = inputSanitizer.removeNonBreakingSpaces(req.body.provider); - course.description = inputSanitizer.removeNonBreakingSpaces(req.body.description); - course.courseDelivery = inputSanitizer.removeNonBreakingSpaces(req.body.courseDelivery); - course.location = inputSanitizer.removeNonBreakingSpaces(req.body.location); - course.keywords = inputSanitizer.removeNonBreakingSpaces(req.body.keywords); - course.domains = inputSanitizer.removeNonBreakingSpaces(req.body.domains); - - if (req.body.courseOptions) { - req.body.courseOptions.forEach((x) => { - if (x.flexibleDates) x.startDate = null; - x.studyMode = inputSanitizer.removeNonBreakingSpaces(x.studyMode); - x.studyDurationMeasure = inputSanitizer.removeNonBreakingSpaces(x.studyDurationMeasure); - if (x.fees) { - x.fees.forEach((y) => { - y.feeDescription = inputSanitizer.removeNonBreakingSpaces(y.feeDescription); - y.feePer = inputSanitizer.removeNonBreakingSpaces(y.feePer); - }); - } - }); - } - course.courseOptions = req.body.courseOptions; - - if (req.body.entries) { - req.body.entries.forEach((x) => { - x.level = inputSanitizer.removeNonBreakingSpaces(x.level); - x.subject = inputSanitizer.removeNonBreakingSpaces(x.subject); - }); - } - course.entries = req.body.entries; - - course.restrictions = inputSanitizer.removeNonBreakingSpaces(req.body.restrictions); - course.award = inputSanitizer.removeNonBreakingSpaces(req.body.award); - course.competencyFramework = inputSanitizer.removeNonBreakingSpaces(req.body.competencyFramework); - course.nationalPriority = inputSanitizer.removeNonBreakingSpaces(req.body.nationalPriority); - - - - - - - - - - - let newCourse = await course.save(); - if(!newCourse) - reject(new Error(`Can't persist data object to DB.`)); - - await createMessage(course.creator, course.id, course.title, course.type, 'add'); - await createMessage(0, course.id, course.title, course.type, 'add'); - // Send email notification of status update to admins and authors who have opted in - await sendEmailNotifications(course, 'add'); - resolve(newCourse); - }) + return new Promise(async (resolve, reject) => { + let course = new Course(); + course.id = parseInt(Math.random().toString().replace('0.', '')); + course.type = 'course'; + course.creator = req.user.id; + course.activeflag = 'review'; + course.updatedon = Date.now(); + course.relatedObjects = req.body.relatedObjects; + + course.title = inputSanitizer.removeNonBreakingSpaces(req.body.title); + course.link = inputSanitizer.removeNonBreakingSpaces(req.body.link); + course.provider = inputSanitizer.removeNonBreakingSpaces(req.body.provider); + course.description = inputSanitizer.removeNonBreakingSpaces(req.body.description); + course.courseDelivery = inputSanitizer.removeNonBreakingSpaces(req.body.courseDelivery); + course.location = inputSanitizer.removeNonBreakingSpaces(req.body.location); + course.keywords = inputSanitizer.removeNonBreakingSpaces(req.body.keywords); + course.domains = inputSanitizer.removeNonBreakingSpaces(req.body.domains); + + if (req.body.courseOptions) { + req.body.courseOptions.forEach(x => { + if (x.flexibleDates) x.startDate = null; + x.studyMode = inputSanitizer.removeNonBreakingSpaces(x.studyMode); + x.studyDurationMeasure = inputSanitizer.removeNonBreakingSpaces(x.studyDurationMeasure); + if (x.fees) { + x.fees.forEach(y => { + y.feeDescription = inputSanitizer.removeNonBreakingSpaces(y.feeDescription); + y.feePer = inputSanitizer.removeNonBreakingSpaces(y.feePer); + }); + } + }); + } + course.courseOptions = req.body.courseOptions; + + if (req.body.entries) { + req.body.entries.forEach(x => { + x.level = inputSanitizer.removeNonBreakingSpaces(x.level); + x.subject = inputSanitizer.removeNonBreakingSpaces(x.subject); + }); + } + course.entries = req.body.entries; + + course.restrictions = inputSanitizer.removeNonBreakingSpaces(req.body.restrictions); + course.award = inputSanitizer.removeNonBreakingSpaces(req.body.award); + course.competencyFramework = inputSanitizer.removeNonBreakingSpaces(req.body.competencyFramework); + course.nationalPriority = inputSanitizer.removeNonBreakingSpaces(req.body.nationalPriority); + + let newCourse = await course.save(); + if (!newCourse) reject(new Error(`Can't persist data object to DB.`)); + + await createMessage(course.creator, course.id, course.title, course.type, 'add'); + await createMessage(0, course.id, course.title, course.type, 'add'); + // Send email notification of status update to admins and authors who have opted in + await sendEmailNotifications(course, 'add'); + resolve(newCourse); + }); }; +const editCourse = async (req, res) => { + return new Promise(async (resolve, reject) => { + let id = req.params.id; + + if (req.body.entries) { + req.body.entries.forEach(e => { + e.level = inputSanitizer.removeNonBreakingSpaces(e.level); + e.subject = inputSanitizer.removeNonBreakingSpaces(e.subject); + }); + } + + if (req.body.courseOptions) { + req.body.courseOptions.forEach(x => { + if (x.flexibleDates) x.startDate = null; + x.studyMode = inputSanitizer.removeNonBreakingSpaces(x.studyMode); + x.studyDurationMeasure = inputSanitizer.removeNonBreakingSpaces(x.studyDurationMeasure); + if (x.fees) { + x.fees.forEach(y => { + y.feeDescription = inputSanitizer.removeNonBreakingSpaces(y.feeDescription); + y.feePer = inputSanitizer.removeNonBreakingSpaces(y.feePer); + }); + } + }); + } + + let relatedObjects = req.body.relatedObjects; + let courseOptions = req.body.courseOptions; + let entries = req.body.entries; + + Course.findOneAndUpdate( + { id: id }, + { + title: inputSanitizer.removeNonBreakingSpaces(req.body.title), + link: urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)), + provider: inputSanitizer.removeNonBreakingSpaces(req.body.provider), + description: inputSanitizer.removeNonBreakingSpaces(req.body.description), + courseDelivery: inputSanitizer.removeNonBreakingSpaces(req.body.courseDelivery), + location: inputSanitizer.removeNonBreakingSpaces(req.body.location), + keywords: inputSanitizer.removeNonBreakingSpaces(req.body.keywords), + domains: inputSanitizer.removeNonBreakingSpaces(req.body.domains), + relatedObjects: relatedObjects, + courseOptions: courseOptions, + entries: entries, + restrictions: inputSanitizer.removeNonBreakingSpaces(req.body.restrictions), + award: inputSanitizer.removeNonBreakingSpaces(req.body.award), + competencyFramework: inputSanitizer.removeNonBreakingSpaces(req.body.competencyFramework), + nationalPriority: inputSanitizer.removeNonBreakingSpaces(req.body.nationalPriority), + }, + err => { + if (err) { + reject(new Error(`Failed to update.`)); + } + } + ).then(async course => { + if (course == null) { + reject(new Error(`No record found with id of ${id}.`)); + } + + await createMessage(course.creator, id, course.title, course.type, 'edit'); + await createMessage(0, id, course.title, course.type, 'edit'); + // Send email notification of status update to admins and authors who have opted in + await sendEmailNotifications(course, 'edit'); + + resolve(course); + }); + }); +}; +const deleteCourse = async (req, res) => { + return new Promise(async (resolve, reject) => { + const { id } = req.params.id; + Course.findOneAndDelete({ id: req.params.id }, err => { + if (err) reject(err); + }).then(course => { + if (course == null) { + reject(`No Content`); + } else { + resolve(id); + } + }); + }); +}; +const getAllCourses = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 1000; + let typeString = ''; + let searchString = ''; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + + let searchQuery = { $and: [{ type: 'course' }] }; + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + await Promise.all([getObjectResult(typeString, searchAll, searchQuery, startIndex, limit)]).then(values => { + resolve(values[0]); + }); + }); +}; +const getCourseAdmin = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let typeString = ''; + let searchString = ''; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + if (req.query.status) { + status = req.query.status + } + + let searchQuery; + if(status === 'all'){ + searchQuery = { $and: [{ type: 'course' }] }; + } else { + searchQuery = { $and: [{ type: 'course' }, { activeflag: status }] }; + } + + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + await Promise.all([getObjectResult(typeString, searchAll, searchQuery, startIndex, limit), getCountsByStatus()]).then(values => { + resolve(values); + }); + }); +}; +const getCourse = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let idString = req.user.id; + let status = 'all'; - -const editCourse = async (req, res) => { - return new Promise(async(resolve, reject) => { - let id = req.params.id; - - if(req.body.entries){ - req.body.entries.forEach((e) => { - e.level = inputSanitizer.removeNonBreakingSpaces(e.level); - e.subject = (inputSanitizer.removeNonBreakingSpaces(e.subject)); - }); - } - - if (req.body.courseOptions) { - req.body.courseOptions.forEach((x) => { - if (x.flexibleDates) x.startDate = null; - x.studyMode = inputSanitizer.removeNonBreakingSpaces(x.studyMode); - x.studyDurationMeasure = inputSanitizer.removeNonBreakingSpaces(x.studyDurationMeasure); - if (x.fees) { - x.fees.forEach((y) => { - y.feeDescription = inputSanitizer.removeNonBreakingSpaces(y.feeDescription); - y.feePer = inputSanitizer.removeNonBreakingSpaces(y.feePer); - }); - } - }); - } - - let relatedObjects = req.body.relatedObjects; - let courseOptions = req.body.courseOptions; - let entries = req.body.entries; - - Course.findOneAndUpdate({ id: id }, - { - title: inputSanitizer.removeNonBreakingSpaces(req.body.title), - link: urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)), - provider: inputSanitizer.removeNonBreakingSpaces(req.body.provider), - description: inputSanitizer.removeNonBreakingSpaces(req.body.description), - courseDelivery: inputSanitizer.removeNonBreakingSpaces(req.body.courseDelivery), - location: inputSanitizer.removeNonBreakingSpaces(req.body.location), - keywords: inputSanitizer.removeNonBreakingSpaces(req.body.keywords), - domains: inputSanitizer.removeNonBreakingSpaces(req.body.domains), - relatedObjects: relatedObjects, - courseOptions: courseOptions, - entries:entries, - restrictions: inputSanitizer.removeNonBreakingSpaces(req.body.restrictions), - award: inputSanitizer.removeNonBreakingSpaces(req.body.award), - competencyFramework: inputSanitizer.removeNonBreakingSpaces(req.body.competencyFramework), - nationalPriority: inputSanitizer.removeNonBreakingSpaces(req.body.nationalPriority), - }, (err) => { - if (err) { - reject(new Error(`Failed to update.`)); - } - }).then(async (course) => { - if(course == null){ - reject(new Error(`No record found with id of ${id}.`)); - } - - await createMessage(course.creator, id, course.title, course.type, 'edit'); - await createMessage(0, id, course.title, course.type, 'edit'); - // Send email notification of status update to admins and authors who have opted in - await sendEmailNotifications(course, 'edit'); - - resolve(course); - }); - }) - }; - - const deleteCourse = async(req, res) => { - return new Promise(async(resolve, reject) => { - const { id } = req.params.id; - Course.findOneAndDelete({ id: req.params.id }, (err) => { - if (err) reject(err); - - - }).then((course) => { - if(course == null){ - reject(`No Content`); - } - else{ - resolve(id); - } - } - ) - })}; - - const getCourseAdmin = async (req, res) => { - return new Promise(async (resolve, reject) => { - - let startIndex = 0; - let limit = 1000; - let typeString = ""; - let searchString = ""; - - if (req.query.offset) { + if (req.query.offset) { startIndex = req.query.offset; } if (req.query.limit) { limit = req.query.limit; - } - if (req.query.q) { - searchString = req.query.q || "";; - } + } + if (req.query.id) { + idString = req.query.id; + } + + let searchQuery; + if(status === 'all'){ + searchQuery = [{ type: 'course' }, { creator: parseInt(idString) }] + } else { + searchQuery = [{ type: 'course' }, { creator: parseInt(idString) }, { activeflag: status }] + } + + let query = Course.aggregate([ + { $match: { $and: searchQuery} }, + { $lookup: { from: 'tools', localField: 'creator', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + + await Promise.all([getUserCourses(query), getCountsByStatusCreator(idString)]).then(values => { + resolve(values); + }); + + function getUserCourses(query) { + return new Promise((resolve, reject) => { + query.exec((err, data) => { + if (err) reject({ success: false, error: err }); + + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); + } + }); +}; - let searchQuery = { $and: [{ type: 'course' }] }; - let searchAll = false; +const setStatus = async (req, res) => { + return new Promise(async (resolve, reject) => { + try { + const { activeflag, rejectionReason } = req.body; + const id = req.params.id; + const userId = req.user.id; + let course; + + if (utils.whatIsRole(req) === ROLES.Admin) { + course = await Course.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag } }); + if (!course) { + reject(new Error('Course not found')); + } + } else if (activeflag === 'archive') { + course = await Course.findOneAndUpdate({ $and: [{ id: id }, { creator: userId }] }, { $set: { activeflag: activeflag } }); + if (!course) { + reject(new Error('Course not found or user not authorised to change Course status')); + } + } else { + reject(new Error('Not authorised to change the status of this Course')); + } + + await createMessage(course.creator, id, course.title, course.type, activeflag, rejectionReason); + await createMessage(0, id, course.title, course.type, activeflag, rejectionReason); + + if (!course.discourseTopicId && course.activeflag === 'active') { + await createDiscourseTopic(course); + } + + // Send email notification of status update to admins and authors who have opted in + await sendEmailNotifications(course, activeflag, rejectionReason); + + resolve(id); + } catch (err) { + console.log(err); + reject(new Error(err)); + } + }); +}; - if (searchString.length > 0) { - searchQuery["$and"].push({ $text: { $search: searchString } }); - } - else { - searchAll = true; - } - await Promise.all([ - getObjectResult(typeString, searchAll, searchQuery, startIndex, limit), - ]).then((values) => { - resolve(values[0]); - }); - }); - } - - const getCourse = async (req, res) => { - return new Promise(async (resolve, reject) => { - //let startIndex = 0; - //let limit = 1000; - let idString = req.user.id; - - /* if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.limit) { - limit = req.query.limit; - } */ - if (req.query.id) { - idString = req.query.id; - } - - let query = Course.aggregate([ - { $match: { $and: [{ type: 'course' }, { creator: parseInt(idString) }] } }, - { $lookup: { from: "tools", localField: "creator", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ]);//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - query.exec((err, data) => { - if (err) reject({ success: false, error: err }); - - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - resolve(data); - }); - }); - } - - const setStatus = async (req, res) => { - return new Promise(async (resolve, reject) => { - try { - const { activeflag, rejectionReason } = req.body; - const id = req.params.id; - - let course = await Course.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag } }); - if (!course) { - reject(new Error('Course not found')); - } - - - await createMessage(course.creator, id, course.title, course.type, activeflag, rejectionReason); - await createMessage(0, id, course.title, course.type, activeflag, rejectionReason); - - if (!course.discourseTopicId && course.activeflag === 'active') { - await createDiscourseTopic(course); - } - - // Send email notification of status update to admins and authors who have opted in - await sendEmailNotifications(course, activeflag, rejectionReason); - - resolve(id); - - } catch (err) { - console.log(err); - reject(new Error(err)); - } - }); - }; - - async function createMessage(authorId, toolId, toolName, toolType, activeflag, rejectionReason) { - let message = new MessagesModel(); - const toolLink = process.env.homeURL + '/' + toolType + '/' + toolId; - - if (activeflag === 'active') { - message.messageType = 'approved'; - message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}` - } else if (activeflag === 'archive') { - message.messageType = 'archive'; - message.messageDescription = `Your ${toolType} ${toolName} has been archived ${toolLink}` - } else if (activeflag === 'rejected') { - message.messageType = 'rejected'; - message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}` - message.messageDescription = (rejectionReason) ? message.messageDescription.concat(` Rejection reason: ${rejectionReason}`) : message.messageDescription - } - else if (activeflag === 'add') { - message.messageType = 'add'; - message.messageDescription = `Your ${toolType} ${toolName} has been submitted for approval` - } - else if (activeflag === 'edit') { - message.messageType = 'edit'; - message.messageDescription = `Your ${toolType} ${toolName} has been updated` - } - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = authorId; - message.messageObjectID = toolId; - message.messageSent = Date.now(); - message.isRead = false; - await message.save(); - } - - async function sendEmailNotifications(tool, activeflag, rejectionReason) { - let subject; - let html; - let adminCanUnsubscribe = true; - // 1. Generate tool URL for linking user from email - const toolLink = process.env.homeURL + '/' + tool.type + '/' + tool.id - - // 2. Build email body - if (activeflag === 'active') { - subject = `Your ${tool.type} ${tool.title} has been approved and is now live` - html = `Your ${tool.type} ${tool.title} has been approved and is now live

${toolLink}` - } else if (activeflag === 'archive') { - subject = `Your ${tool.type} ${tool.title} has been archived` - html = `Your ${tool.type} ${tool.title} has been archived

${toolLink}` - } else if (activeflag === 'rejected') { - subject = `Your ${tool.type} ${tool.title} has been rejected` - html = `Your ${tool.type} ${tool.title} has been rejected

Rejection reason: ${rejectionReason}

${toolLink}` - } - else if (activeflag === 'add') { - subject = `Your ${tool.type} ${tool.title} has been submitted for approval` - html = `Your ${tool.type} ${tool.title} has been submitted for approval

${toolLink}` - adminCanUnsubscribe = false; - } - else if (activeflag === 'edit') { - subject = `Your ${tool.type} ${tool.title} has been updated` - html = `Your ${tool.type} ${tool.title} has been updated

${toolLink}` - } +async function createMessage(authorId, toolId, toolName, toolType, activeflag, rejectionReason) { + let message = new MessagesModel(); + const toolLink = process.env.homeURL + '/' + toolType + '/' + toolId; + + if (activeflag === 'active') { + message.messageType = 'approved'; + message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}`; + } else if (activeflag === 'archive') { + message.messageType = 'archive'; + message.messageDescription = `Your ${toolType} ${toolName} has been archived ${toolLink}`; + } else if (activeflag === 'rejected') { + message.messageType = 'rejected'; + message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}`; + message.messageDescription = rejectionReason + ? message.messageDescription.concat(` Rejection reason: ${rejectionReason}`) + : message.messageDescription; + } else if (activeflag === 'add') { + message.messageType = 'add'; + message.messageDescription = `Your ${toolType} ${toolName} has been submitted for approval`; + } else if (activeflag === 'edit') { + message.messageType = 'edit'; + message.messageDescription = `Your ${toolType} ${toolName} has been updated`; + } + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = authorId; + message.messageObjectID = toolId; + message.messageSent = Date.now(); + message.isRead = false; + await message.save(); +} - if(adminCanUnsubscribe){ - // 3. Find the creator of the course and admins if they have opted in to email updates - var q = UserModel.aggregate([ - // Find the creator of the course and Admins - { $match: { $or: [{ role: 'Admin' }, { id: tool.creator }] } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html - ); - }); - } - else{ - // 3. Find the creator of the course if they have opted in to email updates - var q = UserModel.aggregate([ - // Find all authors of this tool - { $match: { id: tool.creator } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html - ); - }); - - // 5. Find all admins regardless of email opt-in preference - q = UserModel.aggregate([ - // Find all admins - { $match: { role: 'Admin' } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1 } } - ]); - - // 6. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html, - adminCanUnsubscribe - ); - }); - } - } +async function sendEmailNotifications(tool, activeflag, rejectionReason) { + let subject; + let html; + let adminCanUnsubscribe = true; + // 1. Generate tool URL for linking user from email + const toolLink = process.env.homeURL + '/' + tool.type + '/' + tool.id; + + // 2. Build email body + if (activeflag === 'active') { + subject = `Your ${tool.type} ${tool.title} has been approved and is now live`; + html = `Your ${tool.type} ${tool.title} has been approved and is now live

${toolLink}`; + } else if (activeflag === 'archive') { + subject = `Your ${tool.type} ${tool.title} has been archived`; + html = `Your ${tool.type} ${tool.title} has been archived

${toolLink}`; + } else if (activeflag === 'rejected') { + subject = `Your ${tool.type} ${tool.title} has been rejected`; + html = `Your ${tool.type} ${tool.title} has been rejected

Rejection reason: ${rejectionReason}

${toolLink}`; + } else if (activeflag === 'add') { + subject = `Your ${tool.type} ${tool.title} has been submitted for approval`; + html = `Your ${tool.type} ${tool.title} has been submitted for approval

${toolLink}`; + adminCanUnsubscribe = false; + } else if (activeflag === 'edit') { + subject = `Your ${tool.type} ${tool.title} has been updated`; + html = `Your ${tool.type} ${tool.title} has been updated

${toolLink}`; + } + + if (adminCanUnsubscribe) { + // 3. Find the creator of the course and admins if they have opted in to email updates + var q = UserModel.aggregate([ + // Find the creator of the course and Admins + { $match: { $or: [{ role: 'Admin' }, { id: tool.creator }] } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 4. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); + }); + } else { + // 3. Find the creator of the course if they have opted in to email updates + var q = UserModel.aggregate([ + // Find all authors of this tool + { $match: { id: tool.creator } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 4. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); + }); + + // 5. Find all admins regardless of email opt-in preference + q = UserModel.aggregate([ + // Find all admins + { $match: { role: 'Admin' } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1 } }, + ]); + + // 6. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html, adminCanUnsubscribe); + }); + } +} async function sendEmailNotificationToAuthors(tool, toolOwner) { - // 1. Generate tool URL for linking user from email - const toolLink = process.env.homeURL + '/course/' + tool.id - - // 2. Find all authors of the tool who have opted in to email updates - var q = UserModel.aggregate([ - // Find all authors of this tool - { $match: { id: tool.creator } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 3. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - `${toolOwner.name} added you as an author of the tool ${tool.name}`, - `${toolOwner.name} added you as an author of the tool ${tool.name}

${toolLink}` - ); - }); - }; + // 1. Generate tool URL for linking user from email + const toolLink = process.env.homeURL + '/course/' + tool.id; + + // 2. Find all authors of the tool who have opted in to email updates + var q = UserModel.aggregate([ + // Find all authors of this tool + { $match: { id: tool.creator } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 3. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail( + emailRecipients, + `${hdrukEmail}`, + `${toolOwner.name} added you as an author of the tool ${tool.name}`, + `${toolOwner.name} added you as an author of the tool ${tool.name}

${toolLink}` + ); + }); +} async function storeNotificationsForAuthors(tool, toolOwner) { - //store messages to alert a user has been added as an author - - //normal user - var toolCopy = JSON.parse(JSON.stringify(tool)); - var listToEmail = [toolCopy.creator]; - - asyncModule.eachSeries(listToEmail, async (author) => { - const user = await UserModel.findById(author) - let message = new MessagesModel(); - message.messageType = 'author'; - message.messageSent = Date.now(); - message.messageDescription = `${toolOwner.name} added you as an author of the ${toolCopy.type} ${toolCopy.title}` - message.isRead = false; - message.messageObjectID = toolCopy.id; - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = author; - - await message.save(async (err) => { - if (err) { - return new Error({ success: false, error: err }); - } - return { success: true, id: message.messageID }; - }); - }); -}; + //store messages to alert a user has been added as an author + + //normal user + var toolCopy = JSON.parse(JSON.stringify(tool)); + var listToEmail = [toolCopy.creator]; + + asyncModule.eachSeries(listToEmail, async author => { + const user = await UserModel.findById(author); + let message = new MessagesModel(); + message.messageType = 'author'; + message.messageSent = Date.now(); + message.messageDescription = `${toolOwner.name} added you as an author of the ${toolCopy.type} ${toolCopy.title}`; + message.isRead = false; + message.messageObjectID = toolCopy.id; + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = author; + + await message.save(async err => { + if (err) { + return new Error({ success: false, error: err }); + } + return { success: true, id: message.messageID }; + }); + }); +} function getObjectResult(type, searchAll, searchQuery, startIndex, limit) { - let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - let q = ''; - - if (searchAll) { - q = Course.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "creator", foreignField: "id", as: "persons" } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "objects" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "toolID", as: "reviews" } } - ]).sort({ updatedAt : -1}).skip(parseInt(startIndex)).limit(parseInt(limit)); - } - else{ - q = Course.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "creator", foreignField: "id", as: "persons" } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "objects" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "toolID", as: "reviews" } } - ]).sort({ score: { $meta: "textScore" } }).skip(parseInt(startIndex)).limit(parseInt(limit)); - } - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === "undefined") { - resolve([]); - } - else { - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - resolve(data); - } - }) - }) -}; + let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + let q = ''; + + if (searchAll) { + q = Course.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'creator', foreignField: 'id', as: 'persons' } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'authors', as: 'objects' } }, + { $lookup: { from: 'reviews', localField: 'id', foreignField: 'toolID', as: 'reviews' } }, + ]) + .sort({ updatedAt: -1 }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } else { + q = Course.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'creator', foreignField: 'id', as: 'persons' } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'authors', as: 'objects' } }, + { $lookup: { from: 'reviews', localField: 'id', foreignField: 'toolID', as: 'reviews' } }, + ]) + .sort({ score: { $meta: 'textScore' } }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') { + resolve([]); + } else { + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + resolve(data); + } + }); + }); +} + +function getCountsByStatus() { + + let q = Course.find({ }, { id: 1, title: 1, activeflag: 1 }); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length + const reviewCount = data.filter(dat => dat.activeflag === 'review').length + const rejectedCount = data.filter(dat => dat.activeflag === 'rejected').length + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length + + let countSummary = {'activeCount': activeCount, + 'reviewCount': reviewCount, + 'rejectedCount': rejectedCount, + 'archiveCount': archiveCount + } + + resolve(countSummary); + }) + }); +} + +function getCountsByStatusCreator(idString) { + + let q = Course.find({ $and: [{ type: 'course' }, { creator: parseInt(idString) }] }, { id: 1, title: 1, activeflag: 1 }); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length + const reviewCount = data.filter(dat => dat.activeflag === 'review').length + const rejectedCount = data.filter(dat => dat.activeflag === 'rejected').length + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length + + let countSummary = {'activeCount': activeCount, + 'reviewCount': reviewCount, + 'rejectedCount': rejectedCount, + 'archiveCount': archiveCount + } + + resolve(countSummary); + }) + }); +} -export { addCourse, editCourse, deleteCourse, setStatus, getCourse, getCourseAdmin } \ No newline at end of file +export { addCourse, editCourse, deleteCourse, setStatus, getCourse, getCourseAdmin, getAllCourses }; diff --git a/src/resources/course/course.route.js b/src/resources/course/course.route.js index 89fdda26..6c427a4a 100644 --- a/src/resources/course/course.route.js +++ b/src/resources/course/course.route.js @@ -4,116 +4,87 @@ import { Data } from '../tool/data.model'; import { Course } from './course.model'; import passport from 'passport'; import { utils } from '../auth'; -import { - addCourse, - editCourse, - setStatus, - getCourseAdmin, - getCourse -} from './course.repository'; +import { addCourse, editCourse, setStatus, getCourseAdmin, getCourse, getAllCourses } from './course.repository'; import escape from 'escape-html'; const router = express.Router(); // @router POST /api/v1/course // @desc Add Course as user // @access Private -router.post( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await addCourse(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await addCourse(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PUT /api/v1/course/{id} // @desc Edit Course as user // @access Private -router.put( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await editCourse(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, error: err.message }); - }); - } -); +router.put('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await editCourse(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, error: err.message }); + }); +}); // @router GET /api/v1/get/admin // @desc Returns List of Tool objects -// @access Private -router.get( - '/getList', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - let role = req.user.role; - - if (role === ROLES.Admin) { - await getCourseAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } else if (role === ROLES.Creator) { - await getCourse(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } - } -); +// @access Private +router.get('/getList', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + let role = req.user.role; + + if (role === ROLES.Admin) { + await getCourseAdmin(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } else if (role === ROLES.Creator) { + await getCourse(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } +}); // @router GET /api/v1/ // @desc Returns List of Tool Objects No auth // This unauthenticated route was created specifically for API-docs // @access Public -router.get( - '/', - async (req, res) => { - await getCourseAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.get('/', async (req, res) => { + await getAllCourses(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PATCH /api/v1/status // @desc Set course status // @access Private -router.patch( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - await setStatus(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, error: err.message }); - }); - } -); +router.patch('/:id', passport.authenticate('jwt'), async (req, res) => { + await setStatus(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, error: err.message }); + }); +}); /** * {get} /tool/:id Tool @@ -121,56 +92,54 @@ router.patch( * Return the details on the tool based on the tool ID. */ router.get('/:id', async (req, res) => { - let id = parseInt(req.params.id) - var query = Course.aggregate([ - { $match: { id: parseInt(req.params.id) } }, - { - $lookup: { - from: 'tools', - localField: 'creator', - foreignField: 'id', - as: 'creator', - }, - } - ]); - query.exec((err, data) => { - if (data.length > 0) { - var p = Data.aggregate([ - { - $match: { - $and: [ - { relatedObjects: { $elemMatch: { objectId: req.params.id } } }, - ], - }, - }, - ]); - p.exec((err, relatedData) => { - relatedData.forEach((dat) => { - dat.relatedObjects.forEach((x) => { - if (x.objectId === req.params.id && dat.id !== req.params.id) { - let relatedObject = { - objectId: dat.id, - reason: x.reason, - objectType: dat.type, - user: x.user, - updated: x.updated - }; - data[0].relatedObjects = [relatedObject, ...data[0].relatedObjects || []]; - } - }); - }); - - if (err) return res.json({ success: false, error: err }); - - return res.json({ - success: true, - data: data - }); - }); - } else { - return res.status(404).send(`Course not found for Id: ${escape(id)}`); - } - }); + let id = parseInt(req.params.id); + var query = Course.aggregate([ + { $match: { id: parseInt(req.params.id) } }, + { + $lookup: { + from: 'tools', + localField: 'creator', + foreignField: 'id', + as: 'creator', + }, + }, + ]); + query.exec((err, data) => { + if (data.length > 0) { + var p = Data.aggregate([ + { + $match: { + $and: [{ relatedObjects: { $elemMatch: { objectId: req.params.id } } }], + }, + }, + ]); + p.exec((err, relatedData) => { + relatedData.forEach(dat => { + dat.relatedObjects.forEach(x => { + if (x.objectId === req.params.id && dat.id !== req.params.id) { + let relatedObject = { + objectId: dat.id, + reason: x.reason, + objectType: dat.type, + user: x.user, + updated: x.updated, + }; + data[0].relatedObjects = [relatedObject, ...(data[0].relatedObjects || [])]; + } + }); + }); + + if (err) return res.json({ success: false, error: err }); + + return res.json({ + success: true, + data: data, + }); + }); + } else { + return res.status(404).send(`Course not found for Id: ${escape(id)}`); + } + }); }); /** @@ -179,32 +148,29 @@ router.get('/:id', async (req, res) => { * Return the details on the tool based on the tool ID for edit. */ router.get('/edit/:id', async (req, res) => { - var query = Course.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.id) }] } }, - { - $lookup: { - from: 'tools', - localField: 'authors', - foreignField: 'id', - as: 'creator', - }, - }, - ]); - query.exec((err, data) => { - if (data.length > 0) { - return res.json({ success: true, data: data }); - } else { - return res.json({ - success: false, - error: `Course not found for course id ${req.params.id}`, - }); - } - }); + var query = Course.aggregate([ + { $match: { $and: [{ id: parseInt(req.params.id) }] } }, + { + $lookup: { + from: 'tools', + localField: 'authors', + foreignField: 'id', + as: 'creator', + }, + }, + ]); + query.exec((err, data) => { + if (data.length > 0) { + return res.json({ success: true, data: data }); + } else { + return res.json({ + success: false, + error: `Course not found for course id ${req.params.id}`, + }); + } + }); }); - - - //Validation required if Delete is to be implemented // router.delete('/:id', // passport.authenticate('jwt'), @@ -221,6 +187,3 @@ router.get('/edit/:id', async (req, res) => { // ); module.exports = router; - - - diff --git a/src/resources/course/coursecounter.route.js b/src/resources/course/coursecounter.route.js index 226b29f5..52b263d8 100644 --- a/src/resources/course/coursecounter.route.js +++ b/src/resources/course/coursecounter.route.js @@ -1,6 +1,6 @@ -import express from "express"; -import { Course } from "./course.model"; -const rateLimit = require("express-rate-limit"); +import express from 'express'; +import { Course } from './course.model'; +const rateLimit = require('express-rate-limit'); const router = express.Router(); @@ -10,7 +10,7 @@ const datasetLimiter = rateLimit({ message: 'Too many calls have been made to this api from this IP, please try again after an hour', }); -router.post("/update", datasetLimiter, async (req, res) => { +router.post('/update', datasetLimiter, async (req, res) => { const { id, counter } = req.body; Course.findOneAndUpdate({ id: id }, { counter: counter }, err => { if (err) return res.json({ success: false, error: err }); @@ -18,4 +18,4 @@ router.post("/update", datasetLimiter, async (req, res) => { }); }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/datarequest/__mocks__/datarequest.js b/src/resources/datarequest/__mocks__/datarequest.js index cb5aba15..115eb0db 100644 --- a/src/resources/datarequest/__mocks__/datarequest.js +++ b/src/resources/datarequest/__mocks__/datarequest.js @@ -6,15 +6,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -147,8 +145,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -190,15 +187,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -310,15 +305,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -451,8 +444,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -494,15 +486,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -556,8 +546,7 @@ module.exports = [ lastName: 'adsf', }, dateSubmitted: '2020-10-23T10:55:47.231+00:00', - amendmentIterations: [ - ], + amendmentIterations: [], }, { applicationStatus: 'inReview', @@ -566,15 +555,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -707,8 +694,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -750,15 +736,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -826,7 +810,7 @@ module.exports = [ dateUpdated: '2020-10-30T11:14:01.843+00:00', }, }, - } + }, ], }, { @@ -836,15 +820,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -977,8 +959,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -1020,15 +1001,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -1080,7 +1059,7 @@ module.exports = [ principleInvestigator: 'true', regICONumber: '333', lastName: 'Connilly', - country: '' + country: '', }, dateSubmitted: '2020-10-23T10:55:47.231+00:00', amendmentIterations: [ @@ -1138,10 +1117,10 @@ module.exports = [ reason: 'country selection is invalid', requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', - dateRequested: '2020-10-26T17:14:01.843+00:00' - } + dateRequested: '2020-10-26T17:14:01.843+00:00', + }, }, - } + }, ], }, { @@ -1151,15 +1130,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -1292,8 +1269,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -1335,15 +1311,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -1395,7 +1369,7 @@ module.exports = [ principleInvestigator: 'true', regICONumber: '333', lastName: 'Connilly', - country: 'UK' + country: 'UK', }, dateSubmitted: '2020-10-23T10:55:47.231+00:00', amendmentIterations: [ @@ -1456,10 +1430,18 @@ module.exports = [ requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', dateRequested: '2020-10-04T17:14:01.843+00:00', - answer: 'United Kingdom' - } + answer: 'United Kingdom', + }, + reasonforaccess: { + questionSetId: 'reasons', + requested: true, + reason: 'reason for access is not accepted', + requestedBy: 'Robin Kavanagh', + requestedByUser: '5f03530178e28143d7af2eb1', + dateRequested: '2020-10-04T17:14:01.843+00:00', + }, }, - } + }, ], }, { @@ -1469,15 +1451,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -1610,8 +1590,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -1653,15 +1632,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -1707,7 +1684,13 @@ module.exports = [ }, ], }, - questionAnswers: { firstName:'David', passportNumber: '223458340957032498570234785', principleInvestigator: true, regICONumber: '333', lastName: 'Connilly' }, + questionAnswers: { + firstName: 'David', + passportNumber: '223458340957032498570234785', + principleInvestigator: true, + regICONumber: '333', + lastName: 'Connilly', + }, dateSubmitted: '2020-10-23T10:55:47.231+00:00', amendmentIterations: [ { @@ -1767,7 +1750,7 @@ module.exports = [ requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', dateRequested: '2020-10-04T17:14:01.843+00:00', - answer: 'United Kingdom' + answer: 'United Kingdom', }, orcid: { questionSetId: 'applicant', @@ -1775,7 +1758,7 @@ module.exports = [ reason: 'orcid is invalid', requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', - dateRequested: '2020-10-04T17:14:01.843+00:00' + dateRequested: '2020-10-04T17:14:01.843+00:00', }, affiliation: { questionSetId: 'applicant', @@ -1783,10 +1766,10 @@ module.exports = [ reason: 'affiliation is invalid', requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', - dateRequested: '2020-10-04T17:14:01.843+00:00' - } + dateRequested: '2020-10-04T17:14:01.843+00:00', + }, }, - } + }, ], }, { @@ -1796,15 +1779,13 @@ module.exports = [ { pageId: 'preSubmission', title: 'Pre-submission', - description: - 'Make sure you have everything you need before you start the application process!!', + description: 'Make sure you have everything you need before you start the application process!!', active: true, }, { pageId: 'safePeople', title: 'Safe People', - description: - 'Please identify any persons or organisations who will have access to the data', + description: 'Please identify any persons or organisations who will have access to the data', active: false, }, { @@ -1937,8 +1918,7 @@ module.exports = [ params: [1, 90], }, ], - guidance: - 'Guidance information for applicant name, please insert your fullname.', + guidance: 'Guidance information for applicant name, please insert your fullname.', }, { questionId: 'passportNumber', @@ -1980,15 +1960,13 @@ module.exports = [ params: [18], }, ], - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, ], }, - guidance: - 'A reason for requesting this information, we will use this to monitor.', + guidance: 'A reason for requesting this information, we will use this to monitor.', }, ], }, @@ -2034,9 +2012,14 @@ module.exports = [ }, ], }, - questionAnswers: { firstName:'David', passportNumber: '223458340957032498570234785', principleInvestigator: true, regICONumber: '333', lastName: 'Connilly' }, + questionAnswers: { + firstName: 'David', + passportNumber: '223458340957032498570234785', + principleInvestigator: true, + regICONumber: '333', + lastName: 'Connilly', + }, dateSubmitted: '2020-10-23T10:55:47.231+00:00', - amendmentIterations: [ - ], + amendmentIterations: [], }, ]; diff --git a/src/resources/datarequest/__mocks__/testData.json b/src/resources/datarequest/__mocks__/testData.json index 71fc9bf6..eae848a1 100644 --- a/src/resources/datarequest/__mocks__/testData.json +++ b/src/resources/datarequest/__mocks__/testData.json @@ -1,9 +1,9 @@ { - "tasks": [ - { - "name": "test", - "started": "2017-08-28T16:07:38.268Z", - "completed": false - } - ] + "tasks": [ + { + "name": "test", + "started": "2017-08-28T16:07:38.268Z", + "completed": false + } + ] } diff --git a/src/resources/datarequest/__mocks__/users.js b/src/resources/datarequest/__mocks__/users.js index 9befc4d2..5239aba2 100644 --- a/src/resources/datarequest/__mocks__/users.js +++ b/src/resources/datarequest/__mocks__/users.js @@ -1,19 +1,19 @@ import mongoose from 'mongoose'; module.exports = { - applicant : { + applicant: { _id: new mongoose.Types.ObjectId(), firstname: 'test', - lastname: 'applicant 1' + lastname: 'applicant 1', }, - collaborator : { + collaborator: { _id: new mongoose.Types.ObjectId(), firstname: 'test', - lastname: 'collaborator 1' + lastname: 'collaborator 1', }, - custodian : { + custodian: { _id: new mongoose.Types.ObjectId(), firstname: 'test', - lastname: 'custodian 1' - } -} + lastname: 'custodian 1', + }, +}; diff --git a/src/resources/datarequest/amendment/__tests__/amendments.test.it.js b/src/resources/datarequest/amendment/__tests__/amendments.test.it.js index 2adc2073..605e9908 100644 --- a/src/resources/datarequest/amendment/__tests__/amendments.test.it.js +++ b/src/resources/datarequest/amendment/__tests__/amendments.test.it.js @@ -8,17 +8,17 @@ const dataRequest = require('../../__mocks__/datarequest'); /** * Connect to a new in-memory database before running any tests. */ -beforeAll(async () => { +beforeAll(async () => { await dbHandler.connect(); - await dbHandler.loadData({ 'data_requests': dataRequest }); + await dbHandler.loadData({ data_requests: dataRequest }); }); /** * Revert to initial test data after every test. */ afterEach(async () => { - await dbHandler.clearDatabase() - await dbHandler.loadData({ 'data_requests': dataRequest }); + await dbHandler.clearDatabase(); + await dbHandler.loadData({ data_requests: dataRequest }); }); /** @@ -31,4 +31,4 @@ describe('', () => { test('', () => { expect(1).toBe(1); }); -}); \ No newline at end of file +}); diff --git a/src/resources/datarequest/amendment/__tests__/amendments.test.js b/src/resources/datarequest/amendment/__tests__/amendments.test.js index 31baed87..7f10c286 100755 --- a/src/resources/datarequest/amendment/__tests__/amendments.test.js +++ b/src/resources/datarequest/amendment/__tests__/amendments.test.js @@ -6,16 +6,21 @@ const dataRequest = require('../../__mocks__/datarequest'); const users = require('../../__mocks__/users'); describe('addAmendment', () => { - test('given a data request with an existing active amendment iteration, and a custodian triggers an amendment request, then the specified amendment is added to the active iteration', () => { + test('given a data request with an existing active amendment iteration, and a custodian triggers an amendment request, then the specified amendment is added to the active iteration', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); - const questionId = 'title', questionSetId = 'applicant', answer = '', reason = 'the title was incorrectly selected', user = users.custodian, requested = true; + const questionId = 'title', + questionSetId = 'applicant', + answer = '', + reason = 'the title was incorrectly selected', + user = users.custodian, + requested = true; const expected = { questionSetId, requested, reason, requestedBy: `${user.firstname} ${user.lastname}`, - requestedByUser: user._id + requestedByUser: user._id, }; // Act amendmentController.addAmendment(data, questionId, questionSetId, answer, reason, user, requested); @@ -30,14 +35,19 @@ describe('addAmendment', () => { test('given a data request with an existing active iteration, and an applicant makes an unrequested amendment, then the specified amendment including the updated answer is added to the current iteration', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); - const questionId = 'dateofbirth', questionSetId = 'applicant', answer = '15/01/1982', reason = '', user = users.applicant, requested = false; + const questionId = 'dateofbirth', + questionSetId = 'applicant', + answer = '15/01/1982', + reason = '', + user = users.applicant, + requested = false; const expected = { questionSetId, answer, requested, reason, updatedBy: `${user.firstname} ${user.lastname}`, - updatedByUser: user._id + updatedByUser: user._id, }; // Act amendmentController.addAmendment(data, questionId, questionSetId, answer, reason, user, requested); @@ -55,14 +65,20 @@ describe('addAmendment', () => { test('given a data request with an existing active iteration, and an applicant updates an existing amendment, the new amendment takes precedence', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); - const questionId = 'dateofbirth', questionSetId = 'applicant', answer = '15/01/1982', secondAnswer = '16/01/1982', reason = '', user = users.applicant, requested = false; + const questionId = 'dateofbirth', + questionSetId = 'applicant', + answer = '15/01/1982', + secondAnswer = '16/01/1982', + reason = '', + user = users.applicant, + requested = false; const expected = { questionSetId, answer: secondAnswer, requested, reason, updatedBy: `${user.firstname} ${user.lastname}`, - updatedByUser: user._id + updatedByUser: user._id, }; // Act amendmentController.addAmendment(data, questionId, questionSetId, answer, reason, user, requested); @@ -88,8 +104,13 @@ describe('addAmendment', () => { test('given a data request without an active amendment iteration, and a custodian triggers an amendment request, then the specified amendment is added to a new iteration as the only key', () => { // Arrange let data = _.cloneDeep(dataRequest[1]); - const questionId = 'title', questionSetId = 'applicant', answer = '', reason = 'the title was incorrectly selected', user = users.custodian, requested = true; - const expected = { + const questionId = 'title', + questionSetId = 'applicant', + answer = '', + reason = 'the title was incorrectly selected', + user = users.custodian, + requested = true; + const expected = { createdBy: user._id, questionAnswers: { title: { @@ -97,10 +118,10 @@ describe('addAmendment', () => { requested, reason, requestedBy: `${user.firstname} ${user.lastname}`, - requestedByUser: user._id - } - } - }; + requestedByUser: user._id, + }, + }, + }; // Act amendmentController.addAmendment(data, questionId, questionSetId, answer, reason, user, requested); // Assert @@ -115,8 +136,13 @@ describe('addAmendment', () => { test('given a data request without an existing active iteration, and an applicant makes an unrequested amendment, then the specified amendment including the updated answer is added to a new iteration as the only key', () => { // Arrange let data = _.cloneDeep(dataRequest[1]); - const questionId = 'dateofbirth', questionSetId = 'applicant', answer = '15/01/1982', reason = '', user = users.applicant, requested = false; - const expected = { + const questionId = 'dateofbirth', + questionSetId = 'applicant', + answer = '15/01/1982', + reason = '', + user = users.applicant, + requested = false; + const expected = { createdBy: user._id, questionAnswers: { dateofbirth: { @@ -125,10 +151,10 @@ describe('addAmendment', () => { requested, reason, updatedBy: `${user.firstname} ${user.lastname}`, - updatedByUser: user._id - } - } - }; + updatedByUser: user._id, + }, + }, + }; // Act amendmentController.addAmendment(data, questionId, questionSetId, answer, reason, user, requested); // Assert @@ -142,10 +168,10 @@ describe('addAmendment', () => { }); describe('getCurrentAmendmentIteration', () => { - test('extracts most recent iteration object by created date', () => { + test('extracts most recent iteration object by created date', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); - const expected = { + const expected = { dateCreated: '2020-11-03T11:14:01.843+00:00', createdBy: '5f03530178e28143d7af2eb1', questionAnswers: { @@ -173,12 +199,12 @@ describe('getLatestAmendmentIterationIndex', () => { // Act const result = amendmentController.getLatestAmendmentIterationIndex(data); // Assert - expect(result).toBe(1); - }); + expect(result).toBe(1); + }); }); describe('getAmendmentIterationParty', () => { - test('given a data request application has been submitted by the applicant, the custodian is now the current responsible party until application is returned', () => { + test('given a data request application has been submitted by the applicant, the custodian is now the current responsible party until application is returned', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); // Act @@ -186,7 +212,7 @@ describe('getAmendmentIterationParty', () => { // Assert expect(result).toBe(constants.userTypes.CUSTODIAN); }); - + test('given a data request application has been returned by the custodian, the applicant is now the current responsible party', () => { // Arrange let data = _.cloneDeep(dataRequest[0]); @@ -212,14 +238,24 @@ describe('removeIterationAnswers', () => { requestedBy: 'Robin Kavanagh', requestedByUser: '5f03530178e28143d7af2eb1', dateRequested: '2020-10-04T17:14:01.843+00:00', - answer: 'UK' - } + answer: 'UK', + }, + reasonforaccess: { + questionSetId: 'reasons', + requested: true, + reason: 'reason for access is not accepted', + requestedBy: 'Robin Kavanagh', + requestedByUser: '5f03530178e28143d7af2eb1', + dateRequested: '2020-10-04T17:14:01.843+00:00', + }, }, - } + }; const data = _.cloneDeep(dataRequest); - const cases = [[data[4], data[4].amendmentIterations[2], expected], [data[1], {}, undefined]]; + const cases = [ + [data[4], data[4].amendmentIterations[2], expected] + ]; test.each(cases)( - "given an amendment iteration which is not resubmitted, it strips answers", + 'given an amendment iteration which is not resubmitted, it strips answers', (accessRecord, iteration, expectedResult) => { // Act const result = amendmentController.removeIterationAnswers(accessRecord, iteration); @@ -233,7 +269,10 @@ describe('handleApplicantAmendment', () => { test('given an applicant makes an amendment, then the corresponding amendment is updated or created depending on existance of requested or previous amendment', () => { // Arrange let data = _.cloneDeep(dataRequest[1]); - const questionId = 'lastName', questionSetId = 'applicant', answer = 'Smith', user = users.applicant; + const questionId = 'lastName', + questionSetId = 'applicant', + answer = 'Smith', + user = users.applicant; // Act data = amendmentController.handleApplicantAmendment(data, questionId, questionSetId, answer, user); // Assert @@ -248,7 +287,11 @@ describe('handleApplicantAmendment', () => { test('given an applicant makes an amendment, and updates the same question, then the latest answer is correctly stored in the same iteration version', () => { // Arrange let data = _.cloneDeep(dataRequest[1]); - const questionId = 'lastName', questionSetId = 'applicant', answer = 'Smyth', secondAnswer = 'Smith', user = users.applicant; + const questionId = 'lastName', + questionSetId = 'applicant', + answer = 'Smyth', + secondAnswer = 'Smith', + user = users.applicant; data = amendmentController.handleApplicantAmendment(data, questionId, questionSetId, answer, user); // Act data = amendmentController.handleApplicantAmendment(data, questionId, questionSetId, secondAnswer, user); @@ -268,7 +311,7 @@ describe('removeAmendment', () => { let data = _.cloneDeep(dataRequest[0]); const questionId = 'lastName'; const initialLastName = data.amendmentIterations[1].questionAnswers[questionId]; - const expected = { + const expected = { questionSetId: 'applicant', requested: true, reason: 'test reason', @@ -280,17 +323,22 @@ describe('removeAmendment', () => { amendmentController.removeAmendment(data, questionId); //Assert expect(initialLastName).toEqual(expected); - expect(Object.keys(data.amendmentIterations[1].questionAnswers).length).toBe(0); - expect(data.amendmentIterations[1].questionAnswers[questionId]).toBeFalsy(); + expect(dataRequest[0].amendmentIterations[1]).not.toBeFalsy(); + expect(data.amendmentIterations[1]).toBeFalsy(); }); }); describe('doesAmendmentExist', () => { // Arrange const data = _.cloneDeep(dataRequest); - const cases = [[data[0], 'lastName', true], [data[0], 'firstName', false], [{}, '', false], [data[1], 'firstName', false]]; + const cases = [ + [data[0], 'lastName', true], + [data[0], 'firstName', false], + [{}, '', false], + [data[1], 'firstName', false], + ]; test.each(cases)( - "given a data request object %p and %p as the question amended, returns %p for an amendment existing", + 'given a data request object %p and %p as the question amended, returns %p for an amendment existing', (data, questionId, expectedResult) => { // Act const result = amendmentController.doesAmendmentExist(data, questionId); @@ -304,12 +352,17 @@ describe('updateAmendment', () => { test('given a data request with an existing active amendment iteration, and an applicant updates their own existing amendment, then the existing amendment is updated', () => { // Arrange let data = _.cloneDeep(dataRequest[2]); - const questionId = 'lastName', answer = 'Smith', user = users.applicant, initialUpdatedDate = dataRequest[2].amendmentIterations[0].questionAnswers['lastName'].dateUpdated; + const questionId = 'lastName', + answer = 'Smith', + user = users.applicant, + initialUpdatedDate = dataRequest[2].amendmentIterations[0].questionAnswers['lastName'].dateUpdated; // Act data = amendmentController.updateAmendment(data, questionId, answer, user); // Assert expect(Object.keys(data.amendmentIterations[0].questionAnswers).length).toBe(1); - expect(new Date(data.amendmentIterations[0].questionAnswers['lastName']['dateUpdated']).getTime()).toBeGreaterThan(new Date(initialUpdatedDate).getTime()); + expect(new Date(data.amendmentIterations[0].questionAnswers['lastName']['dateUpdated']).getTime()).toBeGreaterThan( + new Date(initialUpdatedDate).getTime() + ); expect(data.amendmentIterations[0].questionAnswers['lastName']['answer']).toBe('Smith'); expect(data.amendmentIterations[0].questionAnswers['lastName']['updatedBy']).toBe('test applicant 1'); expect(data.amendmentIterations[0].questionAnswers['lastName']['updatedByUser']).toBe(user._id); @@ -317,14 +370,20 @@ describe('updateAmendment', () => { test('given a data request with an existing active amendment iteration, and a collaborator updates an amendment they did not create, then the existing amendment is updated', () => { // Arrange let data = _.cloneDeep(dataRequest[2]); - const questionId = 'lastName', answer = 'Smith', user = users.collaborator; - const { dateUpdated: initialUpdatedDate, updatedBy: initialUpdatedBy } = dataRequest[2].amendmentIterations[0].questionAnswers['lastName'] + const questionId = 'lastName', + answer = 'Smith', + user = users.collaborator; + const { dateUpdated: initialUpdatedDate, updatedBy: initialUpdatedBy } = dataRequest[2].amendmentIterations[0].questionAnswers[ + 'lastName' + ]; // Act data = amendmentController.updateAmendment(data, questionId, answer, user); // Assert expect(initialUpdatedBy).toBe('test applicant 1'); expect(Object.keys(data.amendmentIterations[0].questionAnswers).length).toBe(1); - expect(new Date(data.amendmentIterations[0].questionAnswers['lastName']['dateUpdated']).getTime()).toBeGreaterThan(new Date(initialUpdatedDate).getTime()); + expect(new Date(data.amendmentIterations[0].questionAnswers['lastName']['dateUpdated']).getTime()).toBeGreaterThan( + new Date(initialUpdatedDate).getTime() + ); expect(data.amendmentIterations[0].questionAnswers['lastName']['answer']).toBe('Smith'); expect(data.amendmentIterations[0].questionAnswers['lastName']['updatedBy']).toBe('test collaborator 1'); expect(data.amendmentIterations[0].questionAnswers['lastName']['updatedByUser']).toBe(user._id); @@ -333,7 +392,9 @@ describe('updateAmendment', () => { test('given a data request with an existing active amendment iteration, and an applicant updates a non-existing amendment which is an invalid operation, then the access record is unchanged', () => { // Arrange let data = _.cloneDeep(dataRequest[2]); - const questionId = 'firstName', answer = 'James', user = users.applicant; + const questionId = 'firstName', + answer = 'James', + user = users.applicant; // Act data = amendmentController.updateAmendment(data, questionId, answer, user); // Assert @@ -344,7 +405,9 @@ describe('updateAmendment', () => { test('given a data request without an active amendment iteration, and an applicant updates an existing amendment which is an invalid operation, then the access record is unchanged', () => { // Arrange let data = _.cloneDeep(dataRequest[1]); - const questionId = 'firstName', answer = 'James', user = users.applicant; + const questionId = 'firstName', + answer = 'James', + user = users.applicant; // Act data = amendmentController.updateAmendment(data, questionId, answer, user); // Assert @@ -503,14 +566,114 @@ describe('countUnsubmittedAmendments', () => { describe('getLatestQuestionAnswer', () => { // Arrange let data = _.cloneDeep(dataRequest); - const cases = [[data[0], 'firstName', 'James'], [data[0], 'lastName', 'Smyth'], [data[2], 'lastName', 'Connilly'], [data[3], 'country', ''], [data[3], 'firstName', 'Mark']]; + const cases = [ + [data[0], 'firstName', 'James'], + [data[0], 'lastName', 'Smyth'], + [data[2], 'lastName', 'Connilly'], + [data[3], 'country', ''], + [data[3], 'firstName', 'Mark'], + ]; test.each(cases)( - "given a data access record with multiple amendment versions, the latest previous answer is returned", + 'given a data access record with multiple amendment versions, the latest previous answer is returned', (accessRecord, questionId, expectedResult) => { + // Act + const result = amendmentController.getLatestQuestionAnswer(accessRecord, questionId); + // Assert + expect(result).toBe(expectedResult); + } + ); +}); + +describe('revertAmendmentAnswer', () => { + test('given a data access record with an unsubmitted amendment, and the applicant reverts the amendment answer, then the updated answer is removed from the current iteration', () => { + // Arrange + let data = _.cloneDeep(dataRequest[4]); + let questionId = 'country'; + let user = users.applicant; + // Act + amendmentController.revertAmendmentAnswer(data, questionId, user); + // Assert + expect(dataRequest[4].amendmentIterations[2].questionAnswers[questionId].answer).not.toBeFalsy(); + expect(data.amendmentIterations[2].questionAnswers[questionId].answer).toBeFalsy(); + }); + test('given an invalid revert amendment operation occurs for an existing question with no answer to remove, then the access record remains unchanged', () => { + // Arrange + let data = _.cloneDeep(dataRequest[4]); + let questionId = 'reasonforaccess'; + let user = users.applicant; + // Act + amendmentController.revertAmendmentAnswer(data, questionId, user); + // Assert + expect(dataRequest[4]).toEqual(data); + }); + test('given an invalid revert amendment operation occurs on a data access record, then the access record remains unchanged', () => { + // Arrange + let data = _.cloneDeep(dataRequest[4]); + let questionId = 'firstname'; + let user = users.applicant; // Act - const result = amendmentController.getLatestQuestionAnswer(accessRecord, questionId); + amendmentController.revertAmendmentAnswer(data, questionId, user); // Assert - expect(result).toBe(expectedResult); + expect(dataRequest[4]).toEqual(data); + }); +}); + +describe('injectNavigationAmendment', () => { + // Arrange + const cases = [ + [_.cloneDeep(dataRequest[0].jsonSchema), 'applicant', 'safePeople', constants.userTypes.CUSTODIAN, 'completed', 'returned', {"flag": "WARNING"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'applicant', 'safePeople', constants.userTypes.CUSTODIAN, 'incomplete', 'returned', {"flag": "WARNING"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'principleInvestigator', 'safePeople', constants.userTypes.CUSTODIAN, 'completed', 'submitted', {"flag": "SUCCESS"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'principleInvestigator', 'safePeople', constants.userTypes.CUSTODIAN, 'incomplete', 'inProgress', {"flag": "WARNING"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'applicant', 'safePeople', constants.userTypes.APPLICANT, 'completed', 'returned', {"flag": "SUCCESS"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'applicant', 'safePeople', constants.userTypes.APPLICANT, 'incomplete', 'returned', {"flag": "DANGER"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'principleInvestigator', 'safePeople', constants.userTypes.APPLICANT, 'completed', 'submitted', {"flag": "SUCCESS"}, {}], + [_.cloneDeep(dataRequest[0].jsonSchema), 'principleInvestigator', 'safePeople', constants.userTypes.APPLICANT, 'incomplete', 'submitted', {"flag": "DANGER"}, {}] + ]; + test.each(cases)( + 'given a valid json schema, and a requested amendment, then the corresponding navigation panels are highlighted to reflect the amendment status', + (jsonSchema, questionSetId, pageId, userType, completed, iterationStatus, expectedPageResult, expectedPanelResult) => { + // Act + const result = amendmentController.injectNavigationAmendment(jsonSchema, questionSetId, userType, completed, iterationStatus); + // Assert + expect(result.pages.find(page => page.pageId === pageId)).toMatchObject(expectedPageResult); + expect(result.questionPanels.find(panel => panel.panelId === questionSetId)).toMatchObject(expectedPageResult); } ); -}); \ No newline at end of file + test('given a valid json schema, containing multiple amendments with differing statuses, then the corresponding navigation panels are highlighted to reflect the amendment status', () => { + // Arrange + let data = _.cloneDeep(dataRequest[0]); + let pageId = 'safePeople'; + // Act + let jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'applicant', constants.userTypes.APPLICANT, 'completed', 'submitted'); + jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'principleInvestigator', constants.userTypes.APPLICANT, 'incomplete', 'submitted'); + // Assert + expect(jsonSchema.pages.find(page => page.pageId === pageId)).toMatchObject({"flag": "DANGER"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'applicant')).toMatchObject({"flag": "SUCCESS"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'principleInvestigator')).toMatchObject({"flag": "DANGER"}); + }); + test('given a valid json schema, containing multiple amendments with incomplete statuses, then the corresponding navigation panels are highlighted as danger', () => { + // Arrange + let data = _.cloneDeep(dataRequest[0]); + let pageId = 'safePeople'; + // Act + let jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'applicant', constants.userTypes.APPLICANT, 'incomplete', 'submitted'); + jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'principleInvestigator', constants.userTypes.APPLICANT, 'incomplete', 'submitted'); + // Assert + expect(jsonSchema.pages.find(page => page.pageId === pageId)).toMatchObject({"flag": "DANGER"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'applicant')).toMatchObject({"flag": "DANGER"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'principleInvestigator')).toMatchObject({"flag": "DANGER"}); + }); + test('given a valid json schema, containing multiple amendments with entirely complete statuses, then the corresponding navigation panels are highlighted as success', () => { + // Arrange + let data = _.cloneDeep(dataRequest[0]); + let pageId = 'safePeople'; + // Act + let jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'applicant', constants.userTypes.APPLICANT, 'completed', 'submitted'); + jsonSchema = amendmentController.injectNavigationAmendment(data.jsonSchema, 'principleInvestigator', constants.userTypes.APPLICANT, 'completed', 'submitted'); + // Assert + expect(jsonSchema.pages.find(page => page.pageId === pageId)).toMatchObject({"flag": "SUCCESS"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'applicant')).toMatchObject({"flag": "SUCCESS"}); + expect(jsonSchema.questionPanels.find(panel => panel.panelId === 'principleInvestigator')).toMatchObject({"flag": "SUCCESS"}); + }); +}); diff --git a/src/resources/datarequest/amendment/amendment.controller.js b/src/resources/datarequest/amendment/amendment.controller.js index 91fd7506..c9ba0391 100644 --- a/src/resources/datarequest/amendment/amendment.controller.js +++ b/src/resources/datarequest/amendment/amendment.controller.js @@ -2,9 +2,12 @@ import { DataRequestModel } from '../datarequest.model'; import { AmendmentModel } from './amendment.model'; import constants from '../../utilities/constants.util'; import helperUtil from '../../utilities/helper.util'; -import _ from 'lodash'; +import datarequestUtil from '../utils/datarequest.util'; +import teamController from '../../team/team.controller'; +import notificationBuilder from '../../utilities/notificationBuilder'; +import emailGenerator from '../../utilities/emailGenerator.util'; -const teamController = require('../../team/team.controller'); +import _ from 'lodash'; //POST api/v1/data-access-request/:id/amendments const setAmendment = async (req, res) => { @@ -13,85 +16,209 @@ const setAmendment = async (req, res) => { const { params: { id }, } = req; - let { _id: userId } = req.user; - let { - questionId = '', - questionSetId = '', - mode = '', - reason = '', - answer = '', - } = req.body; + let { questionId, questionSetId, mode, reason, answer } = req.body; if (_.isEmpty(questionId) || _.isEmpty(questionSetId)) { return res.status(400).json({ success: false, - message: - 'You must supply the unique identifiers for the question requiring amendment', + message: 'You must supply the unique identifiers for the question requiring amendment', }); } // 2. Retrieve DAR from database - let accessRecord = await DataRequestModel.findOne({ _id: id }) - .select('publisher amendmentIterations') - .populate({ + let accessRecord = await DataRequestModel.findOne({ _id: id }).populate([ + { + path: 'datasets dataset', + }, + { path: 'publisherObj', - select: '_id', populate: { path: 'team', + populate: { + path: 'users', + }, }, + }, + ]); + if (!accessRecord) { + return res.status(404).json({ status: 'error', message: 'Application not found.' }); + } + // 3. If application is not in review or submitted, amendments cannot be made + if ( + accessRecord.applicationStatus !== constants.applicationStatuses.SUBMITTED && + accessRecord.applicationStatus !== constants.applicationStatuses.INREVIEW + ) { + return res.status(400).json({ + success: false, + message: 'This application is not within a reviewable state and amendments cannot be made or requested at this time.', + }); + } + // 4. Get the requesting users permission levels + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord.toObject(), req.user.id, req.user._id); + // 5. Get the current iteration amendment party + let validParty = false; + let activeParty = getAmendmentIterationParty(accessRecord); + // 6. Add/remove/revert amendment depending on mode + if (authorised) { + switch (mode) { + case constants.amendmentModes.ADDED: + authorised = userType === constants.userTypes.CUSTODIAN; + validParty = activeParty === constants.userTypes.CUSTODIAN; + if (!authorised || !validParty) { + break; + } + addAmendment(accessRecord, questionId, questionSetId, answer, reason, req.user, true); + break; + case constants.amendmentModes.REMOVED: + authorised = userType === constants.userTypes.CUSTODIAN; + validParty = activeParty === constants.userTypes.CUSTODIAN; + if (!authorised || !validParty) { + break; + } + removeAmendment(accessRecord, questionId); + break; + case constants.amendmentModes.REVERTED: + authorised = userType === constants.userTypes.APPLICANT; + validParty = activeParty === constants.userTypes.APPLICANT; + if (!authorised || !validParty) { + break; + } + revertAmendmentAnswer(accessRecord, questionId, req.user); + break; + } + } + // 7. Return unauthorised message if the user did not have sufficient access for action requested + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 8. Return bad request if the opposite party is editing the application + if (!validParty) { + return res.status(400).json({ + status: 'failure', + message: 'You cannot make or request amendments to this application as the opposite party are currently responsible for it.', }); + } + // 9. Save changes to database + await accessRecord.save(async err => { + if (err) { + console.error(err); + return res.status(500).json({ status: 'error', message: err }); + } else { + // 10. Update json schema and question answers with modifications since original submission + let accessRecordObj = accessRecord.toObject(); + accessRecordObj.questionAnswers = JSON.parse(accessRecordObj.questionAnswers); + accessRecordObj.jsonSchema = JSON.parse(accessRecordObj.jsonSchema); + accessRecordObj = injectAmendments(accessRecordObj, userType, req.user); + // 11. Append question actions depending on user type and application status + let userRole = activeParty === constants.userTypes.CUSTODIAN ? constants.roleTypes.MANAGER : ''; + accessRecordObj.jsonSchema = datarequestUtil.injectQuestionActions( + accessRecordObj.jsonSchema, + userType, + accessRecordObj.applicationStatus, + userRole + ); + // 12. Count the number of answered/unanswered amendments + const { answeredAmendments = 0, unansweredAmendments = 0 } = countUnsubmittedAmendments(accessRecord, userType); + return res.status(200).json({ + success: true, + accessRecord: { + amendmentIterations: accessRecordObj.amendmentIterations, + questionAnswers: accessRecordObj.questionAnswers, + jsonSchema: accessRecordObj.jsonSchema, + answeredAmendments, + unansweredAmendments, + }, + }); + } + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred updating the application amendment', + }); + } +}; + +//POST api/v1/data-access-request/:id/requestAmendments +const requestAmendments = async (req, res) => { + try { + // 1. Get the required request params + const { + params: { id }, + } = req; + // 2. Retrieve DAR from database + let accessRecord = await DataRequestModel.findOne({ _id: id }) + .select({ + _id: 1, + publisher: 1, + amendmentIterations: 1, + datasetIds: 1, + dataSetId: 1, + userId: 1, + authorIds: 1, + applicationStatus: 1, + aboutApplication: 1, + dateSubmitted: 1, + }) + .populate([ + { + path: 'datasets dataset mainApplicant authors', + }, + { + path: 'publisherObj', + select: '_id', + populate: { + path: 'team', + populate: { + path: 'users', + }, + }, + }, + ]); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. Check permissions of user is manager of associated team let authorised = false; - let { - publisherObj: { team }, - } = accessRecord; - authorised = teamController.checkTeamPermissions( - constants.roleTypes.REVIEWER, - team.toObject(), - userId - ); - // 4. Refuse access if not authorised + if (_.has(accessRecord.toObject(), 'publisherObj.team')) { + const { team } = accessRecord.publisherObj; + authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, team.toObject(), req.user._id); + } if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } - // 5. Ensure the current iteration is not being modified by applicants - if ( - getAmendmentIterationParty(accessRecord) === constants.userTypes.APPLICANT - ) { + // 4. Ensure single datasets are mapped correctly into array (backward compatibility for single dataset applications) + if (_.isEmpty(accessRecord.datasets)) { + accessRecord.datasets = [accessRecord.dataset]; + } + // 5. Get the current iteration amendment party and return bad request if the opposite party is editing the application + const activeParty = getAmendmentIterationParty(accessRecord); + if (activeParty !== constants.userTypes.CUSTODIAN) { return res.status(400).json({ status: 'failure', - message: - 'You cannot request amendments to this application as the applicant(s) are currently editing the submission.', + message: 'You cannot make or request amendments to this application as the applicant(s) are amending the current version.', }); } - // 6. Add or remove amendment depending on mode - switch (mode) { - case constants.amendmentModes.ADDED: - addAmendment( - accessRecord, - questionId, - questionSetId, - answer, - reason, - req.user, - true - ); - break; - case constants.amendmentModes.REMOVED: - removeAmendment(accessRecord, questionId); - break; + // 6. Check some amendments exist to be submitted to the applicant(s) + const { unansweredAmendments } = countUnsubmittedAmendments(accessRecord, constants.userTypes.CUSTODIAN); + if (unansweredAmendments === 0) { + return res.status(400).json({ + status: 'failure', + message: 'You cannot submit requested amendments as none have been requested in the current version', + }); } - // 7. Save changes to database - await accessRecord.save(async (err) => { + // 7. Find current amendment iteration index + const index = getLatestAmendmentIterationIndex(accessRecord); + // 8. Update amendment iteration status to returned, handing responsibility over to the applicant(s) + accessRecord.amendmentIterations[index].dateReturned = new Date(); + accessRecord.amendmentIterations[index].returnedBy = req.user._id; + // 9. Save changes to database + await accessRecord.save(async err => { if (err) { console.error(err); - res.status(500).json({ status: 'error', message: err }); + return res.status(500).json({ status: 'error', message: err }); } else { + // 10. Send update request notifications + createNotifications(constants.notificationTypes.RETURNED, accessRecord); return res.status(200).json({ success: true, }); @@ -101,20 +228,12 @@ const setAmendment = async (req, res) => { console.error(err.message); return res.status(500).json({ success: false, - message: 'An error occurred updating the application amendment', + message: 'An error occurred attempting to submit the requested updates', }); } }; -const addAmendment = ( - accessRecord, - questionId, - questionSetId, - answer, - reason, - user, - requested -) => { +const addAmendment = (accessRecord, questionId, questionSetId, answer, reason, user, requested) => { // 1. Create new amendment object with key representing the questionId let amendment = { [`${questionId}`]: new AmendmentModel({ @@ -122,12 +241,12 @@ const addAmendment = ( requested, reason, answer, - requestedBy: requested ? `${user.firstname} ${user.lastname}` : '', - requestedByUser: requested ? user._id : '', - dateRequested: requested ? Date.now() : '', - updatedBy: requested ? '' : `${user.firstname} ${user.lastname}`, - updatedByUser: requested ? '' : user._id, - dateUpdated: requested ? '' : Date.now(), + requestedBy: requested ? `${user.firstname} ${user.lastname}` : undefined, + requestedByUser: requested ? user._id : undefined, + dateRequested: requested ? Date.now() : undefined, + updatedBy: requested ? undefined : `${user.firstname} ${user.lastname}`, + updatedByUser: requested ? undefined : user._id, + dateUpdated: requested ? undefined : Date.now(), }), }; // 2. Find the index of the latest amendment iteration of the DAR @@ -141,16 +260,13 @@ const addAmendment = ( } else { // 4. If new iteration has been trigger by applicant given requested is false, then we automatically return the iteration let amendmentIteration = { - dateReturned: requested ? '' : Date.now(), - returnedBy: requested ? '' : user._id, + dateReturned: requested ? undefined : Date.now(), + returnedBy: requested ? undefined : user._id, dateCreated: Date.now(), createdBy: user._id, questionAnswers: { ...amendment }, }; - accessRecord.amendmentIterations = [ - ...accessRecord.amendmentIterations, - amendmentIteration, - ]; + accessRecord.amendmentIterations = [...accessRecord.amendmentIterations, amendmentIteration]; } }; @@ -158,40 +274,32 @@ const updateAmendment = (accessRecord, questionId, answer, user) => { // 1. Locate amendment in current iteration const currentIterationIndex = getLatestAmendmentIterationIndex(accessRecord); // 2. Return unmoodified record if invalid update - if ( - currentIterationIndex === -1 || - _.isNil( - accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[ - questionId - ] - ) - ) { + if (currentIterationIndex === -1 || _.isNil(accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[questionId])) { return accessRecord; } // 3. Check if the update amendment reflects a change since the last version of the answer if (currentIterationIndex > -1) { const latestAnswer = getLatestQuestionAnswer(accessRecord, questionId); + const requested = accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[questionId].requested || false; if (!_.isNil(latestAnswer)) { - if ( - answer === latestAnswer || - helperUtil.arraysEqual(answer, latestAnswer) - ) { - removeAmendment(accessRecord, questionId); + if (answer === latestAnswer || helperUtil.arraysEqual(answer, latestAnswer)) { + if (requested) { + // Retain the requested amendment but remove the answer + delete accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[questionId].answer; + } else { + removeAmendment(accessRecord, questionId); + } return accessRecord; } - } else if (_.isNil(latestAnswer) && _.isEmpty(answer)) { + } else if (_.isNil(latestAnswer) && _.isEmpty(answer) && !requested) { // Remove the amendment if there was no previous answer and the latest update is empty removeAmendment(accessRecord, questionId); return accessRecord; } } // 4. Find and update the question with the new answer - accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[ - questionId - ] = { - ...accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[ - questionId - ], + accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[questionId] = { + ...accessRecord.amendmentIterations[currentIterationIndex].questionAnswers[questionId], answer, updatedBy: `${user.firstname} ${user.lastname}`, updatedByUser: user._id, @@ -206,18 +314,17 @@ const removeAmendment = (accessRecord, questionId) => { let index = getLatestAmendmentIterationIndex(accessRecord); // 2. Remove the key and associated object from the current iteration if it exists if (index !== -1) { - accessRecord.amendmentIterations[index].questionAnswers = _.omit( - accessRecord.amendmentIterations[index].questionAnswers, - questionId - ); + accessRecord.amendmentIterations[index].questionAnswers = _.omit(accessRecord.amendmentIterations[index].questionAnswers, questionId); + // 3. If question answers is now empty, remove the iteration + _.remove(accessRecord.amendmentIterations, amendmentIteration => { + return _.isEmpty(amendmentIteration.questionAnswers); + }); } }; const doesAmendmentExist = (accessRecord, questionId) => { // 1. Get current amendment iteration - const latestIteration = getCurrentAmendmentIteration( - accessRecord.amendmentIterations - ); + const latestIteration = getCurrentAmendmentIteration(accessRecord.amendmentIterations); if (_.isNil(latestIteration) || _.isNil(latestIteration.questionAnswers)) { return false; } @@ -225,13 +332,7 @@ const doesAmendmentExist = (accessRecord, questionId) => { return latestIteration.questionAnswers.hasOwnProperty(questionId); }; -const handleApplicantAmendment = ( - accessRecord, - questionId, - questionSetId, - answer = '', - user -) => { +const handleApplicantAmendment = (accessRecord, questionId, questionSetId, answer = '', user) => { // 1. Check if an amendment already exists for the question let isExisting = doesAmendmentExist(accessRecord, questionId); // 2. Update existing @@ -245,38 +346,25 @@ const handleApplicantAmendment = ( if (_.isNil(latestAnswer)) { performAdd = true; // 5. If a previous answer exists, ensure it is different to the most recent answer before adding - } else if ( - answer !== latestAnswer || - !helperUtil.arraysEqual(answer, latestAnswer) - ) { + } else if (answer !== latestAnswer || !helperUtil.arraysEqual(answer, latestAnswer)) { performAdd = true; } if (performAdd) { // 6. Add new amendment otherwise - addAmendment( - accessRecord, - questionId, - questionSetId, - answer, - '', - user, - false - ); + addAmendment(accessRecord, questionId, questionSetId, answer, '', user, false); } } // 7. Update the amendment count - let { - unansweredAmendments = 0, - answeredAmendments = 0, - } = countUnsubmittedAmendments(accessRecord, constants.userTypes.APPLICANT); + let { unansweredAmendments = 0, answeredAmendments = 0 } = countUnsubmittedAmendments(accessRecord, constants.userTypes.APPLICANT); accessRecord.unansweredAmendments = unansweredAmendments; accessRecord.answeredAmendments = answeredAmendments; + accessRecord.dirtySchema = true; // 8. Return updated access record return accessRecord; }; -const getLatestAmendmentIterationIndex = (accessRecord) => { +const getLatestAmendmentIterationIndex = accessRecord => { // 1. Guard for incorrect type passed let { amendmentIterations = [] } = accessRecord; if (_.isEmpty(amendmentIterations)) { @@ -286,26 +374,20 @@ const getLatestAmendmentIterationIndex = (accessRecord) => { let mostRecentDate = new Date( Math.max.apply( null, - amendmentIterations.map((iteration) => - _.isUndefined(iteration.dateSubmitted) - ? new Date(iteration.dateCreated) - : '' - ) + amendmentIterations.map(iteration => (_.isUndefined(iteration.dateSubmitted) ? new Date(iteration.dateCreated) : '')) ) ); // 3. Pull out the related object using a filter to find the object with the latest date - return amendmentIterations.findIndex((iteration) => { + return amendmentIterations.findIndex(iteration => { let date = new Date(iteration.dateCreated); return date.getTime() == mostRecentDate.getTime(); }); }; -const getAmendmentIterationParty = (accessRecord) => { +const getAmendmentIterationParty = accessRecord => { // 1. Look for an amendment iteration that is in flight // An empty date submitted with populated date returned indicates that the current correction iteration is now with the applicants - let index = accessRecord.amendmentIterations.findIndex( - (v) => _.isUndefined(v.dateSubmitted) && !_.isUndefined(v.dateReturned) - ); + let index = accessRecord.amendmentIterations.findIndex(v => _.isUndefined(v.dateSubmitted) && !_.isUndefined(v.dateReturned)); // 2. Deduce the user type from the current iteration state if (index === -1) { return constants.userTypes.CUSTODIAN; @@ -319,75 +401,146 @@ const filterAmendments = (accessRecord = {}, userType) => { return {}; } let { amendmentIterations = [] } = accessRecord; - // 1. Extract all revelant iteration objects and answers based on the user type + // 1. Extract all relevant iteration objects and answers based on the user type // Applicant should only see requested amendments that have been returned by the custodian if (userType === constants.userTypes.APPLICANT) { - amendmentIterations = [...amendmentIterations].filter((iteration) => { + amendmentIterations = [...amendmentIterations].filter(iteration => { return !_.isUndefined(iteration.dateReturned); }); } else if (userType === constants.userTypes.CUSTODIAN) { // Custodian should only see amendment answers that have been submitted by the applicants - amendmentIterations = [...amendmentIterations].map((iteration) => { - if (_.isUndefined(iteration.dateSubmitted)) { + amendmentIterations = [...amendmentIterations].map(iteration => { + if (_.isUndefined(iteration.dateSubmitted) && !_.isNil(iteration.questionAnswers)) { iteration = removeIterationAnswers(accessRecord, iteration); } return iteration; }); } - // 2. Return relevant iteratiions + // 2. Return relevant iterations return amendmentIterations; }; -const injectAmendments = (accessRecord, userType) => { - // 1. Filter out amendments that have not yet been exposed to the opposite party +const injectAmendments = (accessRecord, userType, user) => { + // 1. Get latest iteration created by Custodian + if (accessRecord.amendmentIterations.length === 0) { + return accessRecord; + } + const lastIndex = _.findLastIndex(accessRecord.amendmentIterations); + let latestIteration = accessRecord.amendmentIterations[lastIndex]; + const { dateReturned } = latestIteration; + // 2. Applicants should see previous amendment iteration requests until current iteration has been returned with new requests + if ( + lastIndex > 0 && (userType === constants.userTypes.APPLICANT && _.isNil(dateReturned)) || + (userType === constants.userTypes.CUSTODIAN && _.isNil(latestIteration.questionAnswers)) + ) { + latestIteration = accessRecord.amendmentIterations[lastIndex - 1]; + } else if (lastIndex === 0 && userType === constants.userTypes.APPLICANT && _.isNil(dateReturned)) { + return accessRecord; + } + // 3. Update schema if there is a new iteration + const { publisher = 'Custodian' } = accessRecord; + if(!_.isNil(latestIteration)) { + accessRecord.jsonSchema = formatSchema(accessRecord.jsonSchema, latestIteration, userType, user, publisher); + } + // 4. Filter out amendments that have not yet been exposed to the opposite party let amendmentIterations = filterAmendments(accessRecord, userType); - // 2. Update the question answers to reflect all the changes that have been made in later iterations - accessRecord.questionAnswers = formatQuestionAnswers( - accessRecord.questionAnswers, - amendmentIterations - ); - // 3. Add amendment requests from latest iteration and append historic responses - //accessRecord.jsonSchema = formatSchema(JSON.parse(accessRecord.jsonSchema), amendmentIterations); - // 4. Return the updated access record + // 5. Update the question answers to reflect all the changes that have been made in later iterations + accessRecord.questionAnswers = formatQuestionAnswers(accessRecord.questionAnswers, amendmentIterations); + // 6. Return the updated access record return accessRecord; }; -//const formatSchema = (jsonSchema, amendmentIterations) => { -// 1. Add history for all questions in previous iterations -// TODO for versioning -// 2. Get latest iteration to add amendment requests -//const latestIteration = getCurrentAmendmentIteration(amendmentIterations); -// 3. Loop through each key in the iteration to append review indicator -// Version 2 placeholderr -//return jsonSchema; -//}; +const formatSchema = (jsonSchema, latestAmendmentIteration, userType, user, publisher) => { + const { questionAnswers = {}, dateSubmitted, dateReturned } = latestAmendmentIteration; + if(_.isEmpty(questionAnswers)) { + return jsonSchema; + } + // Loop through each amendment + for (let questionId in questionAnswers) { + const { questionSetId, answer } = questionAnswers[questionId]; + // 1. Update parent/child navigation with flags for amendments + const amendmentCompleted = _.isNil(answer) ? 'incomplete' : 'completed'; + const iterationStatus = !_.isNil(dateSubmitted) ? 'submitted' : !_.isNil(dateReturned) ? 'returned' : 'inProgress'; + jsonSchema = injectNavigationAmendment(jsonSchema, questionSetId, userType, amendmentCompleted, iterationStatus); + // 2. Update questions with alerts/actions + jsonSchema = injectQuestionAmendment( + jsonSchema, + questionId, + questionAnswers[questionId], + userType, + amendmentCompleted, + iterationStatus, + user, + publisher + ); + } + return jsonSchema; +}; + +const injectQuestionAmendment = (jsonSchema, questionId, amendment, userType, completed, iterationStatus, user, publisher) => { + const { questionSetId } = amendment; + // 1. Find question set containing question + const qsIndex = jsonSchema.questionSets.findIndex(qs => qs.questionSetId === questionSetId); + if (qsIndex === -1) { + return jsonSchema; + } + let { questions } = jsonSchema.questionSets[qsIndex]; + // 2. Find question object + let question = datarequestUtil.findQuestion(questions, questionId); + if (_.isEmpty(question) || _.isNil(question.input)) { + return jsonSchema; + } + // 3. Create question alert object to highlight amendment + const questionAlert = datarequestUtil.buildQuestionAlert(userType, iterationStatus, completed, amendment, user, publisher); + // 4. Update question to contain amendment state + const readOnly = userType === constants.userTypes.CUSTODIAN || iterationStatus === 'submitted'; + question = datarequestUtil.setQuestionState(question, questionAlert, readOnly); + // 5. Update jsonSchema with updated question + jsonSchema.questionSets[qsIndex].questions = datarequestUtil.updateQuestion(questions, question); + // 6. Return updated schema + return jsonSchema; +}; + +const injectNavigationAmendment = (jsonSchema, questionSetId, userType, completed, iterationStatus) => { + // 1. Find question in schema + const qpIndex = jsonSchema.questionPanels.findIndex(qp => qp.panelId === questionSetId); + if (qpIndex === -1) { + return jsonSchema; + } + const pageIndex = jsonSchema.pages.findIndex(page => page.pageId === jsonSchema.questionPanels[qpIndex].pageId); + if (pageIndex === -1) { + return jsonSchema; + } + // 2. Update child navigation item (panel) + jsonSchema.questionPanels[qpIndex].flag = constants.navigationFlags[userType][iterationStatus][completed].status; + // 3. Update parent navigation item (page) + const { flag: pageFlag = '' } = jsonSchema.pages[pageIndex]; + if (pageFlag !== 'DANGER' && pageFlag !== 'WARNING') { + jsonSchema.pages[pageIndex].flag = constants.navigationFlags[userType][iterationStatus][completed].status; + } + // 4. Return schema + return jsonSchema; +}; const getLatestQuestionAnswer = (accessRecord, questionId) => { // 1. Include original submission of question answer - let parsedQuestionAnwsers = {}; + let parsedQuestionAnswers = {}; if (typeof accessRecord.questionAnswers === 'string') { - parsedQuestionAnwsers = JSON.parse(accessRecord.questionAnswers); + parsedQuestionAnswers = JSON.parse(accessRecord.questionAnswers); } else { - parsedQuestionAnwsers = _.cloneDeep(accessRecord.questionAnswers); + parsedQuestionAnswers = _.cloneDeep(accessRecord.questionAnswers); } let initialSubmission = { questionAnswers: { [`${questionId}`]: { - answer: parsedQuestionAnwsers[questionId], + answer: parsedQuestionAnswers[questionId], dateUpdated: accessRecord.dateSubmitted, }, }, }; - let relevantVersions = [ - initialSubmission, - ...accessRecord.amendmentIterations, - ]; + let relevantVersions = [initialSubmission, ...accessRecord.amendmentIterations]; if (relevantVersions.length > 1) { - relevantVersions = _.slice( - relevantVersions, - 0, - relevantVersions.length - 1 - ); + relevantVersions = _.slice(relevantVersions, 0, relevantVersions.length - 1); } // 2. Reduce all versions to find latest instance of question answer const latestAnswers = relevantVersions.reduce((arr, version) => { @@ -396,17 +549,12 @@ const getLatestQuestionAnswer = (accessRecord, questionId) => { return arr; } let { answer, dateUpdated } = version.questionAnswers[questionId]; - let foundIndex = arr.findIndex( - (amendment) => amendment.questionId === questionId - ); + let foundIndex = arr.findIndex(amendment => amendment.questionId === questionId); // 4. If the amendment does not exist in our array of latest answers, add it if (foundIndex === -1) { arr.push({ questionId, answer, dateUpdated }); // 5. Otherwise update the amendment if this amendment was made more recently - } else if ( - new Date(dateUpdated).getTime() > - new Date(arr[foundIndex].dateUpdated).getTime() - ) { + } else if (new Date(dateUpdated).getTime() > new Date(arr[foundIndex].dateUpdated).getTime()) { arr[foundIndex] = { questionId, answer, dateUpdated }; } return arr; @@ -420,25 +568,23 @@ const getLatestQuestionAnswer = (accessRecord, questionId) => { }; const formatQuestionAnswers = (questionAnswers, amendmentIterations) => { + if (_.isNil(amendmentIterations) || _.isEmpty(amendmentIterations)) { + return questionAnswers; + } // 1. Reduce all amendment iterations to find latest answers const latestAnswers = amendmentIterations.reduce((arr, iteration) => { if (_.isNil(iteration.questionAnswers)) { return arr; } // 2. Loop through each amendment key per iteration - Object.keys(iteration.questionAnswers).forEach((questionId) => { + Object.keys(iteration.questionAnswers).forEach(questionId => { let { answer, dateUpdated } = iteration.questionAnswers[questionId]; - let foundIndex = arr.findIndex( - (amendment) => amendment.questionId === questionId - ); + let foundIndex = arr.findIndex(amendment => amendment.questionId === questionId); // 3. If the amendment does not exist in our array of latest answers, add it if (foundIndex === -1) { arr.push({ questionId, answer, dateUpdated }); // 4. Otherwise update the amendment if this amendment was made more recently - } else if ( - new Date(dateUpdated).getTime() > - new Date(arr[foundIndex].dateUpdated).getTime() - ) { + } else if (new Date(dateUpdated).getTime() > new Date(arr[foundIndex].dateUpdated).getTime()) { arr[foundIndex] = { questionId, answer, dateUpdated }; } }); @@ -446,35 +592,29 @@ const formatQuestionAnswers = (questionAnswers, amendmentIterations) => { }, []); // 5. Format data correctly for question answers const formattedLatestAnswers = [...latestAnswers].reduce((obj, item) => { - obj[item.questionId] = item.answer; + if (!_.isNil(item.answer)) { + obj[item.questionId] = item.answer; + } return obj; }, {}); // 6. Return combined object return { ...questionAnswers, ...formattedLatestAnswers }; }; -const getCurrentAmendmentIteration = (amendmentIterations) => { +const getCurrentAmendmentIteration = amendmentIterations => { // 1. Guard for incorrect type passed - if ( - _.isEmpty(amendmentIterations) || - _.isNull(amendmentIterations) || - _.isUndefined(amendmentIterations) - ) { + if (_.isEmpty(amendmentIterations) || _.isNull(amendmentIterations) || _.isUndefined(amendmentIterations)) { return undefined; } // 2. Find the latest unsubmitted date created in the amendment iterations array let mostRecentDate = new Date( Math.max.apply( null, - amendmentIterations.map((iteration) => - _.isUndefined(iteration.dateSubmitted) - ? new Date(iteration.dateCreated) - : '' - ) + amendmentIterations.map(iteration => (_.isUndefined(iteration.dateSubmitted) ? new Date(iteration.dateCreated) : '')) ) ); // 3. Pull out the related object using a filter to find the object with the latest date - let mostRecentObject = amendmentIterations.filter((iteration) => { + let mostRecentObject = amendmentIterations.filter(iteration => { let date = new Date(iteration.dateCreated); return date.getTime() == mostRecentDate.getTime(); })[0]; @@ -484,16 +624,13 @@ const getCurrentAmendmentIteration = (amendmentIterations) => { const removeIterationAnswers = (accessRecord = {}, iteration) => { // 1. Guard for invalid object passed - if (!iteration || !iteration.questionAnswers || _.isEmpty(accessRecord)) { + if (!iteration || _.isEmpty(accessRecord)) { return undefined; } // 2. Loop through each question answer by key (questionId) - Object.keys(iteration.questionAnswers).forEach((key) => { + Object.keys(iteration.questionAnswers).forEach(key => { // 3. Fetch the previous answer - iteration.questionAnswers[key]['answer'] = getLatestQuestionAnswer( - accessRecord, - key - ); + iteration.questionAnswers[key]['answer'] = getLatestQuestionAnswer(accessRecord, key); }); // 4. Return answer stripped iteration object return iteration; @@ -524,35 +661,165 @@ const countUnsubmittedAmendments = (accessRecord, userType) => { if ( index === -1 || _.isNil(accessRecord.amendmentIterations[index].questionAnswers) || - (_.isNil(accessRecord.amendmentIterations[index].dateSubmitted) && - userType === constants.userTypes.CUSTODIAN) + (_.isNil(accessRecord.amendmentIterations[index].dateReturned) && userType == constants.userTypes.APPLICANT) ) { return { unansweredAmendments: 0, answeredAmendments: 0 }; } // 2. Count answered and unanswered amendments in unsubmitted iteration - Object.keys(accessRecord.amendmentIterations[index].questionAnswers).forEach( - (questionId) => { - if ( - _.isNil( - accessRecord.amendmentIterations[index].questionAnswers[questionId] - .answer - ) - ) { - unansweredAmendments++; - } else { - answeredAmendments++; - } + Object.keys(accessRecord.amendmentIterations[index].questionAnswers).forEach(questionId => { + if (_.isNil(accessRecord.amendmentIterations[index].questionAnswers[questionId].answer)) { + unansweredAmendments++; + } else { + answeredAmendments++; } - ); + }); // 3. Return counts return { unansweredAmendments, answeredAmendments }; }; +const revertAmendmentAnswer = (accessRecord, questionId, user) => { + // 1. Locate the latest amendment iteration + let index = getLatestAmendmentIterationIndex(accessRecord); + // 2. Verify the amendment was previously requested and a new answer exists + let amendment = accessRecord.amendmentIterations[index].questionAnswers[questionId]; + if (_.isNil(amendment) || _.isNil(amendment.answer)) { + return; + } else { + // 3. Remove the updated answer + amendment = { + [`${questionId}`]: new AmendmentModel({ + ...amendment, + updatedBy: undefined, + updatedByUser: undefined, + dateUpdated: undefined, + answer: undefined, + }), + }; + accessRecord.amendmentIterations[index].questionAnswers = { ...accessRecord.amendmentIterations[index].questionAnswers, ...amendment }; + } +}; + +const createNotifications = async (type, accessRecord) => { + // Project details from about application + let { aboutApplication = {}, questionAnswers } = accessRecord; + if (typeof aboutApplication === 'string') { + aboutApplication = JSON.parse(accessRecord.aboutApplication); + } + if (typeof questionAnswers === 'string') { + questionAnswers = JSON.parse(accessRecord.questionAnswers); + } + let { projectName = 'No project name set' } = aboutApplication; + let { dateSubmitted = '' } = accessRecord; + // Publisher details from single dataset + let { + datasetfields: { publisher }, + } = accessRecord.datasets[0]; + // Dataset titles + let datasetTitles = accessRecord.datasets.map(dataset => dataset.name).join(', '); + // Main applicant (user obj) + let { firstname: appFirstName, lastname: appLastName } = accessRecord.mainApplicant; + // Instantiate default params + let emailRecipients = [], + options = {}, + html = '', + authors = []; + let applicants = datarequestUtil.extractApplicantNames(questionAnswers).join(', '); + // Fall back for single applicant + if (_.isEmpty(applicants)) { + applicants = `${appFirstName} ${appLastName}`; + } + // Get authors/contributors (user obj) + if (!_.isEmpty(accessRecord.authors)) { + authors = accessRecord.authors.map(author => { + let { firstname, lastname, email, id } = author; + return { firstname, lastname, email, id }; + }); + } + + switch (type) { + case constants.notificationTypes.RETURNED: + // 1. Create notifications + // Applicant notification + await notificationBuilder.triggerNotificationMessage( + [accessRecord.userId], + `Updates have been requested by ${publisher} for your Data Access Request application`, + 'data access request', + accessRecord._id + ); + + // Authors notification + if (!_.isEmpty(authors)) { + await notificationBuilder.triggerNotificationMessage( + authors.map(author => author.id), + `Updates have been requested by ${publisher} for a Data Access Request application you are contributing to`, + 'data access request', + accessRecord._id + ); + } + + // 2. Send emails to relevant users + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; + // Create object to pass through email data + options = { + id: accessRecord._id, + publisher, + projectName, + datasetTitles, + dateSubmitted, + applicants, + }; + // Create email body content + html = emailGenerator.generateDARReturnedEmail(options); + // Send email + await emailGenerator.sendEmail( + emailRecipients, + constants.hdrukEmail, + `Updates have been requested by ${publisher} for your Data Access Request application`, + html, + false + ); + break; + } +}; + +const calculateAmendmentStatus = (accessRecord, userType) => { + let amendmentStatus = ''; + const lastAmendmentIteration = _.last(accessRecord.amendmentIterations); + const { applicationStatus } = accessRecord; + // 1. Amendment status is blank if no amendments have ever been created or the application has had a final decision + if ( + _.isNil(lastAmendmentIteration) || + applicationStatus === constants.applicationStatuses.APPROVED || + applicationStatus === constants.applicationStatuses.APPROVEDWITHCONDITIONS || + applicationStatus === constants.applicationStatuses.REJECTED + ) { + return ''; + } + const { dateSubmitted = '', dateReturned = '' } = lastAmendmentIteration; + // 2a. If the requesting user is the applicant + if (userType === constants.userTypes.APPLICANT) { + if (!_.isEmpty(dateSubmitted.toString())) { + amendmentStatus = constants.amendmentStatuses.UPDATESSUBMITTED; + } else if (!_.isEmpty(dateReturned.toString())) { + amendmentStatus = constants.amendmentStatuses.UPDATESREQUESTED; + } + // 2b. If the requester user is the custodian + } else if (userType === constants.userTypes.CUSTODIAN) { + if (!_.isEmpty(dateSubmitted.toString())) { + amendmentStatus = constants.amendmentStatuses.UPDATESRECEIVED; + } else if (!_.isEmpty(dateReturned.toString())) { + amendmentStatus = constants.amendmentStatuses.AWAITINGUPDATES; + } + } + return amendmentStatus; +}; + module.exports = { handleApplicantAmendment: handleApplicantAmendment, doesAmendmentExist: doesAmendmentExist, doResubmission: doResubmission, updateAmendment: updateAmendment, + revertAmendmentAnswer: revertAmendmentAnswer, setAmendment: setAmendment, addAmendment: addAmendment, removeAmendment: removeAmendment, @@ -565,4 +832,7 @@ module.exports = { formatQuestionAnswers: formatQuestionAnswers, countUnsubmittedAmendments: countUnsubmittedAmendments, getLatestQuestionAnswer: getLatestQuestionAnswer, + requestAmendments: requestAmendments, + calculateAmendmentStatus: calculateAmendmentStatus, + injectNavigationAmendment: injectNavigationAmendment, }; diff --git a/src/resources/datarequest/amendment/amendment.model.js b/src/resources/datarequest/amendment/amendment.model.js index 594167e3..2d41943c 100644 --- a/src/resources/datarequest/amendment/amendment.model.js +++ b/src/resources/datarequest/amendment/amendment.model.js @@ -1,16 +1,16 @@ import { model, Schema } from 'mongoose'; const AmendmentSchema = new Schema({ - questionSetId: String, - requested: Boolean, - reason: String, - requestedBy: String, - requestedByUser: { type : Schema.Types.ObjectId, ref: 'User' }, - dateRequested: Date, - answer: { type: Schema.Types.Mixed }, - updatedBy: String, - updatedByUser: { type : Schema.Types.ObjectId, ref: 'User' }, - dateUpdated: Date, + questionSetId: String, + requested: Boolean, + reason: String, + requestedBy: String, + requestedByUser: { type: Schema.Types.ObjectId, ref: 'User' }, + dateRequested: Date, + answer: { type: Schema.Types.Mixed }, + updatedBy: String, + updatedByUser: { type: Schema.Types.ObjectId, ref: 'User' }, + dateUpdated: Date, }); -export const AmendmentModel = model('amendment', AmendmentSchema) +export const AmendmentModel = model('amendment', AmendmentSchema); diff --git a/src/resources/datarequest/datarequest.controller.js b/src/resources/datarequest/datarequest.controller.js index 7048c1d1..b81c963a 100644 --- a/src/resources/datarequest/datarequest.controller.js +++ b/src/resources/datarequest/datarequest.controller.js @@ -6,15 +6,14 @@ import { UserModel } from '../user/user.model'; import teamController from '../team/team.controller'; import workflowController from '../workflow/workflow.controller'; +import datarequestUtil from './utils/datarequest.util'; +import notificationBuilder from '../utilities/notificationBuilder'; import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; +import dynamicForm from '../utilities/dynamicForms/dynamicForm.util'; import constants from '../utilities/constants.util'; -import { - processFile, - getFile, - fileStatus, -} from '../utilities/cloudStorage.util'; +import { processFile, getFile, fileStatus } from '../utilities/cloudStorage.util'; import _ from 'lodash'; import inputSanitizer from '../utilities/inputSanitizer'; @@ -23,8 +22,6 @@ import mongoose from 'mongoose'; const amendmentController = require('./amendment/amendment.controller'); const bpmController = require('../bpmnworkflow/bpmnworkflow.controller'); -const notificationBuilder = require('../utilities/notificationBuilder'); -const hdrukEmail = `enquiry@healthdatagateway.org`; module.exports = { //GET api/v1/data-access-request @@ -34,24 +31,19 @@ module.exports = { let { id: userId } = req.user; // 2. Find all data access request applications created with multi dataset version - let applications = await DataRequestModel.find({ - $or: [{ userId: parseInt(userId) }, { authorIds: userId }], - }).populate('datasets mainApplicant'); + let applications = await DataRequestModel.find({ $or: [{ userId: parseInt(userId) }, { authorIds: userId }] }).populate( + 'datasets mainApplicant' + ); // 3. Append project name and applicants let modifiedApplications = [...applications] - .map((app) => { - return module.exports.createApplicationDTO( - app.toObject(), - constants.userTypes.APPLICANT - ); + .map(app => { + return module.exports.createApplicationDTO(app.toObject(), constants.userTypes.APPLICANT); }) .sort((a, b) => b.updatedAt - a.updatedAt); // 4. Calculate average decision time across submitted applications - let avgDecisionTime = module.exports.calculateAvgDecisionTime( - applications - ); + let avgDecisionTime = module.exports.calculateAvgDecisionTime(applications); // 5. Return payload return res.status(200).json({ @@ -90,9 +82,7 @@ module.exports = { ]); // 3. If no matching application found, return 404 if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } else { accessRecord = accessRecord.toObject(); } @@ -101,77 +91,58 @@ module.exports = { accessRecord.datasets = [accessRecord.dataset]; } // 5. Check if requesting user is custodian member or applicant/contributor - let { - authorised, - userType, - } = module.exports.getUserPermissionsForApplication( - accessRecord, - req.user.id, - req.user._id - ); + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); let readOnly = true; if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); - } - // 6. Set edit mode for applicants who have not yet submitted or are in the process of fixing amendments - let countUnsubmittedAmendments = amendmentController.countUnsubmittedAmendments( - accessRecord, - userType - ); - if ( - userType === constants.userTypes.APPLICANT && - (accessRecord.applicationStatus === - constants.applicationStatuses.INPROGRESS || - countUnsubmittedAmendments.unansweredAmendments > 0 || - countUnsubmittedAmendments.answeredAmendments > 0) - ) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 6. Set edit mode for applicants who have not yet submitted + if (userType === constants.userTypes.APPLICANT && accessRecord.applicationStatus === constants.applicationStatuses.INPROGRESS) { readOnly = false; } - // 7. Set the review mode if user is a custodian reviewing the current step - let { - inReviewMode, - reviewSections, - hasRecommended, - } = workflowController.getReviewStatus(accessRecord, req.user._id); - // 8. Get the workflow/voting status + // 7. Count unsubmitted amendments + let countUnsubmittedAmendments = amendmentController.countUnsubmittedAmendments(accessRecord, userType); + // 8. Set the review mode if user is a custodian reviewing the current step + let { inReviewMode, reviewSections, hasRecommended } = workflowController.getReviewStatus(accessRecord, req.user._id); + // 9. Get the workflow/voting status let workflow = workflowController.getWorkflowStatus(accessRecord); - // 9. Check if the current user can override the current step + let isManager = false; + // 10. Check if the current user can override the current step if (_.has(accessRecord.datasets[0], 'publisher.team')) { - let isManager = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - accessRecord.datasets[0].publisher.team, - req.user._id - ); + isManager = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, accessRecord.datasets[0].publisher.team, req.user._id); // Set the workflow override capability if there is an active step and user is a manager if (!_.isEmpty(workflow)) { workflow.canOverrideStep = !workflow.isCompleted && isManager; } } - // 10. Update json schema and question answers with modifications since original submission + // 11. Update json schema and question answers with modifications since original submission accessRecord.questionAnswers = JSON.parse(accessRecord.questionAnswers); accessRecord.jsonSchema = JSON.parse(accessRecord.jsonSchema); - accessRecord = amendmentController.injectAmendments( - accessRecord, - userType + accessRecord = amendmentController.injectAmendments(accessRecord, userType, req.user); + // 12. Determine the current active party handling the form + let activeParty = amendmentController.getAmendmentIterationParty(accessRecord); + // 13. Append question actions depending on user type and application status + let userRole = + userType === constants.userTypes.APPLICANT ? '' : isManager ? constants.roleTypes.MANAGER : constants.roleTypes.REVIEWER; + accessRecord.jsonSchema = datarequestUtil.injectQuestionActions( + accessRecord.jsonSchema, + userType, + accessRecord.applicationStatus, + userRole ); - // 11. Return application form + // 14. Return application form return res.status(200).json({ status: 'success', data: { ...accessRecord, aboutApplication: - typeof accessRecord.aboutApplication === 'string' - ? JSON.parse(accessRecord.aboutApplication) - : accessRecord.aboutApplication, + typeof accessRecord.aboutApplication === 'string' ? JSON.parse(accessRecord.aboutApplication) : accessRecord.aboutApplication, datasets: accessRecord.datasets, readOnly, ...countUnsubmittedAmendments, userType, - projectId: - accessRecord.projectId || - helper.generateFriendlyId(accessRecord._id), + activeParty, + projectId: accessRecord.projectId || helper.generateFriendlyId(accessRecord._id), inReviewMode, reviewSections, hasRecommended, @@ -207,20 +178,15 @@ module.exports = { select: 'firstname lastname -id -_id', }); // 4. Get dataset - dataset = await ToolModel.findOne({ datasetid: dataSetId }).populate( - 'publisher' - ); + dataset = await ToolModel.findOne({ datasetid: dataSetId }).populate('publisher'); // 5. If no record create it and pass back if (!accessRecord) { if (!dataset) { - return res - .status(500) - .json({ status: 'error', message: 'No dataset available.' }); + return res.status(500).json({ status: 'error', message: 'No dataset available.' }); } let { datasetfields: { publisher = '' }, } = dataset; - // 1. GET the template from the custodian const accessRequestTemplate = await DataRequestSchemaModel.findOne({ $or: [{ dataSetId }, { publisher }, { dataSetId: 'default' }], @@ -235,7 +201,6 @@ module.exports = { } // 2. Build up the accessModel for the user let { jsonSchema, version, _id: schemaId } = accessRequestTemplate; - // 3. create new DataRequestModel let record = new DataRequestModel({ version, @@ -252,9 +217,7 @@ module.exports = { }); // 4. save record const newApplication = await record.save(); - newApplication.projectId = helper.generateFriendlyId( - newApplication._id - ); + newApplication.projectId = helper.generateFriendlyId(newApplication._id); await newApplication.save(); // 5. return record @@ -265,20 +228,21 @@ module.exports = { } else { data = { ...accessRecord.toObject() }; } - + // 6. Parse json to allow us to modify schema + data.jsonSchema = JSON.parse(data.jsonSchema); + // 7. Append question actions depending on user type and application status + data.jsonSchema = datarequestUtil.injectQuestionActions(data.jsonSchema, constants.userTypes.APPLICANT, data.applicationStatus); + // 8. Return payload return res.status(200).json({ status: 'success', data: { ...data, - jsonSchema: JSON.parse(data.jsonSchema), questionAnswers: JSON.parse(data.questionAnswers), - aboutApplication: - typeof data.aboutApplication === 'string' - ? JSON.parse(data.aboutApplication) - : data.aboutApplication, + aboutApplication: typeof data.aboutApplication === 'string' ? JSON.parse(data.aboutApplication) : data.aboutApplication, dataset, projectId: data.projectId || helper.generateFriendlyId(data._id), userType: constants.userTypes.APPLICANT, + activeParty: constants.userTypes.APPLICANT, inReviewMode: false, reviewSections: [], files: data.files || [], @@ -321,12 +285,11 @@ module.exports = { datasets = await ToolModel.find({ datasetid: { $in: arrDatasetIds }, }).populate('publisher'); + const arrDatasetNames = datasets.map(dataset => dataset.name); // 5. If no record create it and pass back if (!accessRecord) { if (_.isEmpty(datasets)) { - return res - .status(500) - .json({ status: 'error', message: 'No datasets available.' }); + return res.status(500).json({ status: 'error', message: 'No datasets available.' }); } let { datasetfields: { publisher = '' }, @@ -351,6 +314,7 @@ module.exports = { version, userId, datasetIds: arrDatasetIds, + datasetTitles: arrDatasetNames, jsonSchema, schemaId, publisher, @@ -360,9 +324,7 @@ module.exports = { }); // 4. save record const newApplication = await record.save(); - newApplication.projectId = helper.generateFriendlyId( - newApplication._id - ); + newApplication.projectId = helper.generateFriendlyId(newApplication._id); await newApplication.save(); // 5. return record data = { @@ -372,20 +334,21 @@ module.exports = { } else { data = { ...accessRecord.toObject() }; } - + // 6. Parse json to allow us to modify schema + data.jsonSchema = JSON.parse(data.jsonSchema); + // 7. Append question actions depending on user type and application status + data.jsonSchema = datarequestUtil.injectQuestionActions(data.jsonSchema, constants.userTypes.APPLICANT, data.applicationStatus); + // 8. Return payload return res.status(200).json({ status: 'success', data: { ...data, - jsonSchema: JSON.parse(data.jsonSchema), questionAnswers: JSON.parse(data.questionAnswers), - aboutApplication: - typeof data.aboutApplication === 'string' - ? JSON.parse(data.aboutApplication) - : data.aboutApplication, + aboutApplication: typeof data.aboutApplication === 'string' ? JSON.parse(data.aboutApplication) : data.aboutApplication, datasets, projectId: data.projectId || helper.generateFriendlyId(data._id), userType: constants.userTypes.APPLICANT, + activeParty: constants.userTypes.APPLICANT, inReviewMode: false, reviewSections: [], files: data.files || [], @@ -416,48 +379,52 @@ module.exports = { }); // 4. Check access record if (!accessRequestRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Data Access Request not found.' }); + return res.status(404).json({ status: 'error', message: 'Data Access Request not found.' }); } // 5. Update record object - module.exports - .updateApplication(accessRequestRecord, updateObj) - .then((accessRequestRecord) => { - const { - unansweredAmendments, - answeredAmendments, - } = accessRequestRecord; - // 6. Return new data object - return res.status(200).json({ - status: 'success', - unansweredAmendments, - answeredAmendments, - }); - }); + module.exports.updateApplication(accessRequestRecord, updateObj).then(accessRequestRecord => { + const { unansweredAmendments = 0, answeredAmendments = 0, dirtySchema = false } = accessRequestRecord; + if (dirtySchema) { + accessRequestRecord.jsonSchema = JSON.parse(accessRequestRecord.jsonSchema); + accessRequestRecord = amendmentController.injectAmendments(accessRequestRecord, constants.userTypes.APPLICANT, req.user); + } + let data = { + status: 'success', + unansweredAmendments, + answeredAmendments, + }; + if (dirtySchema) { + data = { + ...data, + jsonSchema: accessRequestRecord.jsonSchema, + }; + } + // 6. Return new data object + return res.status(200).json(data); + }); } catch (err) { console.log(err.message); res.status(500).json({ status: 'error', message: err.message }); } }, - buildUpdateObject: (data) => { + buildUpdateObject: data => { let updateObj = {}; - let { - aboutApplication, - questionAnswers, - updatedQuestionId, - user, - jsonSchema = '', - } = data; + let { aboutApplication, questionAnswers, updatedQuestionId, user, jsonSchema = '' } = data; if (aboutApplication) { if (typeof aboutApplication === 'string') { aboutApplication = JSON.parse(aboutApplication); } - let updatedDatasetIds = aboutApplication.selectedDatasets.map( - (dataset) => dataset.datasetId + const { datasetIds, datasetTitles } = aboutApplication.selectedDatasets.reduce( + (newObj, dataset) => { + newObj.datasetIds = [...newObj.datasetIds, dataset.datasetId]; + newObj.datasetTitles = [...newObj.datasetTitles, dataset.name]; + return newObj; + }, + { datasetIds: [], datasetTitles: [] } ); - updateObj = { aboutApplication, datasetIds: updatedDatasetIds }; + + updateObj = { aboutApplication, datasetIds, datasetTitles }; } if (questionAnswers) { updateObj = { ...updateObj, questionAnswers, updatedQuestionId, user }; @@ -476,17 +443,12 @@ module.exports = { let { updatedQuestionId = '', user } = updateObj; // 2. If application is in progress, update initial question answers if (applicationStatus === constants.applicationStatuses.INPROGRESS) { - await DataRequestModel.findByIdAndUpdate( - _id, - updateObj, - { new: true }, - (err) => { - if (err) { - console.error(err); - throw err; - } + await DataRequestModel.findByIdAndUpdate(_id, updateObj, { new: true }, err => { + if (err) { + console.error(err); + throw err; } - ); + }); return accessRecord; // 3. Else if application has already been submitted make amendment } else if ( @@ -496,17 +458,9 @@ module.exports = { if (_.isNil(updateObj.questionAnswers)) { return accessRecord; } - let updatedAnswer = JSON.parse(updateObj.questionAnswers)[ - updatedQuestionId - ]; - accessRecord = amendmentController.handleApplicantAmendment( - accessRecord.toObject(), - updatedQuestionId, - '', - updatedAnswer, - user - ); - await DataRequestModel.replaceOne({ _id }, accessRecord, (err) => { + let updatedAnswer = JSON.parse(updateObj.questionAnswers)[updatedQuestionId]; + accessRecord = amendmentController.handleApplicantAmendment(accessRecord.toObject(), updatedQuestionId, '', updatedAnswer, user); + await DataRequestModel.replaceOne({ _id }, accessRecord, err => { if (err) { console.error(err); throw err; @@ -556,10 +510,9 @@ module.exports = { select: 'id email', }, ]); + if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 4. Ensure single datasets are mapped correctly into array (backward compatibility for single dataset applications) if (_.isEmpty(accessRecord.datasets)) { @@ -570,14 +523,7 @@ module.exports = { let isDirty = false, statusChange = false, contributorChange = false; - let { - authorised, - userType, - } = module.exports.getUserPermissionsForApplication( - accessRecord.toObject(), - userId, - _id - ); + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord.toObject(), userId, _id); if (!authorised) { return res.status(401).json({ @@ -601,17 +547,11 @@ module.exports = { } if (!_.isEmpty(team)) { - authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - team, - _id - ); + authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, team, _id); } if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // Extract params from body ({ applicationStatus, applicationStatusDesc } = req.body); @@ -634,28 +574,24 @@ module.exports = { // Update any attached workflow in Mongo to show workflow is finished let { workflow = {} } = accessRecord; if (!_.isEmpty(workflow)) { - accessRecord.workflow.steps = accessRecord.workflow.steps.map( - (step) => { - let updatedStep = { - ...step.toObject(), - active: false, + accessRecord.workflow.steps = accessRecord.workflow.steps.map(step => { + let updatedStep = { + ...step.toObject(), + active: false, + }; + if (step.active) { + updatedStep = { + ...updatedStep, + endDateTime: new Date(), + completed: true, }; - if (step.active) { - updatedStep = { - ...updatedStep, - endDateTime: new Date(), - completed: true, - }; - } - return updatedStep; } - ); + return updatedStep; + }); } } if (applicationStatusDesc) { - accessRecord.applicationStatusDesc = inputSanitizer.removeNonBreakingSpaces( - applicationStatusDesc - ); + accessRecord.applicationStatusDesc = inputSanitizer.removeNonBreakingSpaces(applicationStatusDesc); isDirty = true; } // If applicant, allow update to contributors/authors @@ -674,7 +610,7 @@ module.exports = { } // 7. If a change has been made, notify custodian and main applicant if (isDirty) { - await accessRecord.save(async (err) => { + await accessRecord.save(async err => { if (err) { console.error(err); return res.status(500).json({ status: 'error', message: err }); @@ -698,9 +634,7 @@ module.exports = { req.user ); // Ensure Camunda ends workflow processes given that manager has made final decision - let { - name: dataRequestPublisher, - } = accessRecord.datasets[0].publisher; + let { name: dataRequestPublisher } = accessRecord.datasets[0].publisher; let bpmContext = { dataRequestStatus: applicationStatus, dataRequestManagerId: _id.toString(), @@ -739,24 +673,24 @@ module.exports = { if (_.isEmpty(workflowId)) { return res.status(400).json({ success: false, - message: - 'You must supply the unique identifier to assign a workflow to this application', + message: 'You must supply the unique identifier to assign a workflow to this application', }); } // 2. Retrieve DAR from database let accessRecord = await DataRequestModel.findOne({ _id: id }).populate({ path: 'datasets dataset mainApplicant authors', populate: { - path: 'publisher additionalInfo', + path: 'publisher', populate: { path: 'team', + populate: { + path: 'users', + }, }, }, }); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. Ensure single datasets are mapped correctly into array (backward compatibility for single dataset applications) if (_.isEmpty(accessRecord.datasets)) { @@ -768,23 +702,15 @@ module.exports = { let { publisher: { team }, } = accessRecord.datasets[0]; - authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - team.toObject(), - userId - ); + authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, team.toObject(), userId); } // 5. Refuse access if not authorised if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // 6. Check publisher allows workflows let workflowEnabled = false; - if ( - _.has(accessRecord.datasets[0].toObject(), 'publisher.workflowEnabled') - ) { + if (_.has(accessRecord.datasets[0].toObject(), 'publisher.workflowEnabled')) { ({ publisher: { workflowEnabled }, } = accessRecord.datasets[0]); @@ -808,8 +734,7 @@ module.exports = { if (applicationStatus !== constants.applicationStatuses.INREVIEW) { return res.status(400).json({ success: false, - message: - 'The application status must be set to in review to assign a workflow', + message: 'The application status must be set to in review to assign a workflow', }); } // 9. Retrieve workflow using ID from database @@ -827,7 +752,7 @@ module.exports = { } // 10. Set first workflow step active and ensure all others are false let workflowObj = workflow.toObject(); - workflowObj.steps = workflowObj.steps.map((step) => { + workflowObj.steps = workflowObj.steps.map(step => { return { ...step, active: false }; }); workflowObj.steps[0].active = true; @@ -845,37 +770,24 @@ module.exports = { }); } else { // 13. Contact Camunda to start workflow process - let { - name: dataRequestPublisher, - } = accessRecord.datasets[0].publisher; - let reviewerList = workflowObj.steps[0].reviewers.map((reviewer) => - reviewer._id.toString() - ); + let { name: dataRequestPublisher } = accessRecord.datasets[0].publisher; + let reviewerList = workflowObj.steps[0].reviewers.map(reviewer => reviewer._id.toString()); let bpmContext = { businessKey: id, dataRequestStatus: constants.applicationStatuses.INREVIEW, dataRequestUserId: userId.toString(), dataRequestPublisher, dataRequestStepName: workflowObj.steps[0].stepName, - notifyReviewerSLA: workflowController.calculateStepDeadlineReminderDate( - workflowObj.steps[0] - ), + notifyReviewerSLA: workflowController.calculateStepDeadlineReminderDate(workflowObj.steps[0]), reviewerList, }; bpmController.postStartStepReview(bpmContext); // 14. Gather context for notifications - const emailContext = workflowController.getWorkflowEmailContext( - accessRecord, - workflowObj, - 0 - ); + const emailContext = workflowController.getWorkflowEmailContext(accessRecord, workflowObj, 0); // 15. Create notifications to reviewers of the step that has been completed - module.exports.createNotifications( - constants.notificationTypes.REVIEWSTEPSTART, - emailContext, - accessRecord, - req.user - ); + module.exports.createNotifications(constants.notificationTypes.REVIEWSTEPSTART, emailContext, accessRecord, req.user); + // 16. Create our notifications to the custodian team managers if assigned a workflow to a DAR application + module.exports.createNotifications(constants.notificationTypes.WORKFLOWASSIGNED, emailContext, accessRecord, req.user); // 16. Return workflow payload return res.status(200).json({ success: true, @@ -907,40 +819,31 @@ module.exports = { }, }); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. Check permissions of user is reviewer of associated team let authorised = false; if (_.has(accessRecord.toObject(), 'publisherObj.team')) { let { team } = accessRecord.publisherObj; - authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - team.toObject(), - userId - ); + authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, team.toObject(), userId); } // 4. Refuse access if not authorised if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // 5. Check application is in submitted state let { applicationStatus } = accessRecord; if (applicationStatus !== constants.applicationStatuses.SUBMITTED) { return res.status(400).json({ success: false, - message: - 'The application status must be set to submitted to start a review', + message: 'The application status must be set to submitted to start a review', }); } // 6. Update application to 'in review' accessRecord.applicationStatus = constants.applicationStatuses.INREVIEW; accessRecord.dateReviewStart = new Date(); // 7. Save update to access record - await accessRecord.save(async (err) => { + await accessRecord.save(async err => { if (err) { console.error(err); res.status(500).json({ status: 'error', message: err }); @@ -988,12 +891,10 @@ module.exports = { // 4. get access record let accessRecord = await DataRequestModel.findOne({ _id: id }); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 5. Check if requesting user is custodian member or applicant/contributor - // let { authorised } = module.exports.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); + // let { authorised } = datarequestUtil.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); // 6. check authorisation // if (!authorised) { // return res @@ -1002,9 +903,7 @@ module.exports = { // } // 7. check files if (_.isEmpty(files)) { - return res - .status(400) - .json({ status: 'error', message: 'No files to upload' }); + return res.status(400).json({ status: 'error', message: 'No files to upload' }); } let fileArr = []; // check and see if descriptions and ids are an array @@ -1030,10 +929,7 @@ module.exports = { size: files[i].size, name: files[i].originalname, owner: req.user._id, - error: - status === fileStatus.ERROR - ? 'Could not upload. Unknown error. Please try again.' - : '', + error: status === fileStatus.ERROR ? 'Could not upload. Unknown error. Please try again.' : '', }; // update local for post back to FE fileArr.push(newFile); @@ -1053,7 +949,7 @@ module.exports = { // 11. process access record into object let record = updatedRecord._doc; // 12. fet files - let mediaFiles = record.files.map((f) => { + let mediaFiles = record.files.map(f => { return f._doc; }); // 10. return response @@ -1075,15 +971,13 @@ module.exports = { // 2. get AccessRecord let accessRecord = await DataRequestModel.findOne({ _id: id }); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. process access record into object let record = accessRecord._doc; // 4. find the file in the files array from db let mediaFile = - record.files.find((f) => { + record.files.find(f => { let { fileId: dbFileId } = f._doc; return dbFileId === fileId; }) || {}; @@ -1099,9 +993,7 @@ module.exports = { // 7. get the file await getFile(name, dbFileId, id); // 8. send file back to user - return res - .status(200) - .sendFile(`${process.env.TMPDIR}${id}/${dbFileId}_${name}`); + return res.status(200).sendFile(`${process.env.TMPDIR}${id}/${dbFileId}_${name}`); } catch (err) { console.log(err); res.status(500).json({ status: 'error', message: err }); @@ -1146,33 +1038,24 @@ module.exports = { }, ]); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. Check permissions of user is reviewer of associated team let authorised = false; if (_.has(accessRecord.toObject(), 'publisherObj.team')) { let { team } = accessRecord.publisherObj; - authorised = teamController.checkTeamPermissions( - constants.roleTypes.REVIEWER, - team.toObject(), - userId - ); + authorised = teamController.checkTeamPermissions(constants.roleTypes.REVIEWER, team.toObject(), userId); } // 4. Refuse access if not authorised if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // 5. Check application is in-review let { applicationStatus } = accessRecord; if (applicationStatus !== constants.applicationStatuses.INREVIEW) { return res.status(400).json({ success: false, - message: - 'The application status must be set to in review to cast a vote', + message: 'The application status must be set to in review to cast a vote', }); } // 6. Ensure a workflow has been attached to this application @@ -1180,20 +1063,15 @@ module.exports = { if (!workflow) { return res.status(400).json({ success: false, - message: - 'There is no workflow attached to this application in order to cast a vote', + message: 'There is no workflow attached to this application in order to cast a vote', }); } // 7. Ensure the requesting user is expected to cast a vote let { steps } = workflow; - let activeStepIndex = steps.findIndex((step) => { + let activeStepIndex = steps.findIndex(step => { return step.active === true; }); - if ( - !steps[activeStepIndex].reviewers - .map((reviewer) => reviewer._id.toString()) - .includes(userId.toString()) - ) { + if (!steps[activeStepIndex].reviewers.map(reviewer => reviewer._id.toString()).includes(userId.toString())) { return res.status(400).json({ success: false, message: 'You have not been assigned to vote on this review phase', @@ -1202,7 +1080,7 @@ module.exports = { //8. Ensure the requesting user has not already voted let { recommendations = [] } = steps[activeStepIndex]; if (recommendations) { - let found = recommendations.some((rec) => { + let found = recommendations.some(rec => { return rec.reviewer.equals(userId); }); if (found) { @@ -1225,12 +1103,7 @@ module.exports = { newRecommendation, ]; // 11. Workflow management - construct Camunda payloads - let bpmContext = workflowController.buildNextStep( - userId, - accessRecord, - activeStepIndex, - false - ); + let bpmContext = workflowController.buildNextStep(userId, accessRecord, activeStepIndex, false); // 12. If step is now complete, update database record if (bpmContext.stepComplete) { accessRecord.workflow.steps[activeStepIndex].active = false; @@ -1240,12 +1113,10 @@ module.exports = { // 13. If it was not the final phase that was completed, move to next step in database if (!bpmContext.finalPhaseApproved) { accessRecord.workflow.steps[activeStepIndex + 1].active = true; - accessRecord.workflow.steps[ - activeStepIndex + 1 - ].startDateTime = new Date(); + accessRecord.workflow.steps[activeStepIndex + 1].startDateTime = new Date(); } // 14. Update MongoDb record for DAR - await accessRecord.save(async (err) => { + await accessRecord.save(async err => { if (err) { console.error(err); res.status(500).json({ status: 'error', message: err }); @@ -1256,36 +1127,23 @@ module.exports = { if (bpmContext.stepComplete && !bpmContext.finalPhaseApproved) { // Create notifications to reviewers of the next step that has been activated relevantStepIndex = activeStepIndex + 1; - relevantNotificationType = - constants.notificationTypes.REVIEWSTEPSTART; + relevantNotificationType = constants.notificationTypes.REVIEWSTEPSTART; } else if (bpmContext.stepComplete && bpmContext.finalPhaseApproved) { // Create notifications to managers that the application is awaiting final approval relevantStepIndex = activeStepIndex; - relevantNotificationType = - constants.notificationTypes.FINALDECISIONREQUIRED; + relevantNotificationType = constants.notificationTypes.FINALDECISIONREQUIRED; } // Continue only if notification required if (!_.isEmpty(relevantNotificationType)) { - const emailContext = workflowController.getWorkflowEmailContext( - accessRecord, - workflow, - relevantStepIndex - ); - module.exports.createNotifications( - relevantNotificationType, - emailContext, - accessRecord, - req.user - ); + const emailContext = workflowController.getWorkflowEmailContext(accessRecord, workflow, relevantStepIndex); + module.exports.createNotifications(relevantNotificationType, emailContext, accessRecord, req.user); } // 16. Call Camunda controller to update workflow process bpmController.postCompleteReview(bpmContext); } }); // 17. Return aplication and successful response - return res - .status(200) - .json({ status: 'success', data: accessRecord._doc }); + return res.status(200).json({ status: 'success', data: accessRecord._doc }); } catch (err) { console.log(err.message); res.status(500).json({ status: 'error', message: err }); @@ -1323,25 +1181,17 @@ module.exports = { }, ]); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } - // 3. Check permissions of user is reviewer of associated team + // 3. Check permissions of user is manager of associated team let authorised = false; if (_.has(accessRecord.toObject(), 'publisherObj.team')) { let { team } = accessRecord.publisherObj; - authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - team.toObject(), - userId - ); + authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, team.toObject(), userId); } // 4. Refuse access if not authorised if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // 5. Check application is in review state let { applicationStatus } = accessRecord; @@ -1361,7 +1211,7 @@ module.exports = { }); } // 7. Get the attached active workflow step - let activeStepIndex = steps.findIndex((step) => { + let activeStepIndex = steps.findIndex(step => { return step.active === true; }); if (activeStepIndex === -1) { @@ -1375,66 +1225,39 @@ module.exports = { accessRecord.workflow.steps[activeStepIndex].completed = true; accessRecord.workflow.steps[activeStepIndex].endDateTime = new Date(); // 9. Set up Camunda payload - let bpmContext = workflowController.buildNextStep( - userId, - accessRecord, - activeStepIndex, - true - ); + let bpmContext = workflowController.buildNextStep(userId, accessRecord, activeStepIndex, true); // 10. If it was not the final phase that was completed, move to next step if (!bpmContext.finalPhaseApproved) { accessRecord.workflow.steps[activeStepIndex + 1].active = true; - accessRecord.workflow.steps[ - activeStepIndex + 1 - ].startDateTime = new Date(); + accessRecord.workflow.steps[activeStepIndex + 1].startDateTime = new Date(); } // 11. Save changes to the DAR - await accessRecord.save(async (err) => { + await accessRecord.save(async err => { if (err) { console.error(err); res.status(500).json({ status: 'error', message: err }); } else { // 12. Gather context for notifications (active step) - let emailContext = workflowController.getWorkflowEmailContext( - accessRecord, - workflow, - activeStepIndex - ); + let emailContext = workflowController.getWorkflowEmailContext(accessRecord, workflow, activeStepIndex); // 13. Create notifications to reviewers of the step that has been completed - module.exports.createNotifications( - constants.notificationTypes.STEPOVERRIDE, - emailContext, - accessRecord, - req.user - ); + module.exports.createNotifications(constants.notificationTypes.STEPOVERRIDE, emailContext, accessRecord, req.user); // 14. Create emails and notifications let relevantStepIndex = 0, relevantNotificationType = ''; if (bpmContext.finalPhaseApproved) { // Create notifications to managers that the application is awaiting final approval relevantStepIndex = activeStepIndex; - relevantNotificationType = - constants.notificationTypes.FINALDECISIONREQUIRED; + relevantNotificationType = constants.notificationTypes.FINALDECISIONREQUIRED; } else { // Create notifications to reviewers of the next step that has been activated relevantStepIndex = activeStepIndex + 1; - relevantNotificationType = - constants.notificationTypes.REVIEWSTEPSTART; + relevantNotificationType = constants.notificationTypes.REVIEWSTEPSTART; } // Get the email context only if required if (relevantStepIndex !== activeStepIndex) { - emailContext = workflowController.getWorkflowEmailContext( - accessRecord, - workflow, - relevantStepIndex - ); + emailContext = workflowController.getWorkflowEmailContext(accessRecord, workflow, relevantStepIndex); } - module.exports.createNotifications( - relevantNotificationType, - emailContext, - accessRecord, - req.user - ); + module.exports.createNotifications(relevantNotificationType, emailContext, accessRecord, req.user); // 15. Call Camunda controller to start manager review process bpmController.postCompleteReview(bpmContext); } @@ -1447,6 +1270,59 @@ module.exports = { } }, + //PUT api/v1/data-access-request/:id/deletefile + updateAccessRequestDeleteFile: async (req, res) => { + try { + const { + params: { id }, + } = req; + + // 1. Id of the file to delete + let { fileId } = req.body; + + // 2. Find the relevant data request application + let accessRecord = await DataRequestModel.findOne({ _id: id }); + + if (!accessRecord) { + return res.status(404).json({ status: 'error', message: 'Application not found.' }); + } + + // 4. Ensure single datasets are mapped correctly into array + if (_.isEmpty(accessRecord.datasets)) { + accessRecord.datasets = [accessRecord.dataset]; + } + + // 5. If application is not in progress, actions cannot be performed + if (accessRecord.applicationStatus !== constants.applicationStatuses.INPROGRESS) { + return res.status(400).json({ + success: false, + message: 'This application is no longer in pre-submission status and therefore this action cannot be performed', + }); + } + + // 6. Get the requesting users permission levels + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord.toObject(), req.user.id, req.user._id); + // 7. Return unauthorised message if the requesting user is not an applicant + if (!authorised || userType !== constants.userTypes.APPLICANT) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + + // 8. Remove the file from the application + const newFileList = accessRecord.files.filter(file => file.fileId !== fileId); + + accessRecord.files = newFileList; + + // 9. write back into mongo + await accessRecord.save(); + + // 10. Return successful response + return res.status(200).json({ status: 'success' }); + } catch (err) { + console.log(err.message); + res.status(500).json({ status: 'error', message: err }); + } + }, + //POST api/v1/data-access-request/:id submitAccessRequestById: async (req, res) => { try { @@ -1482,121 +1358,84 @@ module.exports = { }, ]); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } // 3. Check user type and authentication to submit application - let { - authorised, - userType, - } = module.exports.getUserPermissionsForApplication( - accessRecord, - req.user.id, - req.user._id - ); + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord, req.user.id, req.user._id); if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); } // 4. Ensure single datasets are mapped correctly into array (backward compatibility for single dataset applications) if (_.isEmpty(accessRecord.datasets)) { accessRecord.datasets = [accessRecord.dataset]; } // 5. Perform either initial submission or resubmission depending on application status - if ( - accessRecord.applicationStatus === - constants.applicationStatuses.INPROGRESS - ) { + if (accessRecord.applicationStatus === constants.applicationStatuses.INPROGRESS) { accessRecord = module.exports.doInitialSubmission(accessRecord); } else if ( - accessRecord.applicationStatus === - constants.applicationStatuses.INREVIEW || - accessRecord.applicationStatus === - constants.applicationStatuses.SUBMITTED + accessRecord.applicationStatus === constants.applicationStatuses.INREVIEW || + accessRecord.applicationStatus === constants.applicationStatuses.SUBMITTED ) { - accessRecord = amendmentController.doResubmission( - accessRecord.toObject(), - req.user._id.toString() - ); + accessRecord = amendmentController.doResubmission(accessRecord.toObject(), req.user._id.toString()); } // 6. Ensure a valid submission is taking place if (_.isNil(accessRecord.submissionType)) { return res.status(400).json({ status: 'error', - message: - 'Application cannot be submitted as it has reached a final decision status.', + message: 'Application cannot be submitted as it has reached a final decision status.', }); } // 7. Save changes to db - await DataRequestModel.replaceOne( - { _id: id }, - accessRecord, - async (err) => { - if (err) { - console.error(err); - return res.status(500).json({ - status: 'error', - message: 'An error occurred saving the changes', - }); - } else { - // 8. Send notifications and emails with amendments - accessRecord.questionAnswers = JSON.parse( - accessRecord.questionAnswers - ); - accessRecord.jsonSchema = JSON.parse(accessRecord.jsonSchema); - accessRecord = amendmentController.injectAmendments( - accessRecord, - userType - ); - await module.exports.createNotifications( - accessRecord.submissionType === constants.submissionTypes.INITIAL - ? constants.notificationTypes.SUBMITTED - : constants.notificationTypes.RESUBMITTED, - {}, - accessRecord, - req.user - ); - // 8. Start workflow process in Camunda if publisher requires it and it is the first submission - if ( - accessRecord.workflowEnabled && - accessRecord.submissionType === constants.submissionTypes.INITIAL - ) { - let { - publisherObj: { name: publisher }, - dateSubmitted, - } = accessRecord; - let bpmContext = { - dateSubmitted, - applicationStatus: constants.applicationStatuses.SUBMITTED, - publisher, - businessKey: id, - }; - bpmController.postStartPreReview(bpmContext); - } + await DataRequestModel.replaceOne({ _id: id }, accessRecord, async err => { + if (err) { + console.error(err); + return res.status(500).json({ + status: 'error', + message: 'An error occurred saving the changes', + }); + } else { + // 8. Send notifications and emails with amendments + accessRecord.questionAnswers = JSON.parse(accessRecord.questionAnswers); + accessRecord.jsonSchema = JSON.parse(accessRecord.jsonSchema); + accessRecord = amendmentController.injectAmendments(accessRecord, userType, req.user); + await module.exports.createNotifications( + accessRecord.submissionType === constants.submissionTypes.INITIAL + ? constants.notificationTypes.SUBMITTED + : constants.notificationTypes.RESUBMITTED, + {}, + accessRecord, + req.user + ); + // 8. Start workflow process in Camunda if publisher requires it and it is the first submission + if (accessRecord.workflowEnabled && accessRecord.submissionType === constants.submissionTypes.INITIAL) { + let { + publisherObj: { name: publisher }, + dateSubmitted, + } = accessRecord; + let bpmContext = { + dateSubmitted, + applicationStatus: constants.applicationStatuses.SUBMITTED, + publisher, + businessKey: id, + }; + bpmController.postStartPreReview(bpmContext); } } - ); + }); // 9. Return aplication and successful response - return res - .status(200) - .json({ status: 'success', data: accessRecord._doc }); + return res.status(200).json({ status: 'success', data: accessRecord._doc }); } catch (err) { console.log(err.message); res.status(500).json({ status: 'error', message: err.message }); } }, - doInitialSubmission: (accessRecord) => { + doInitialSubmission: accessRecord => { // 1. Update application to submitted status accessRecord.submissionType = constants.submissionTypes.INITIAL; accessRecord.applicationStatus = constants.applicationStatuses.SUBMITTED; // 2. Check if workflow/5 Safes based application, set final status date if status will never change again - if ( - _.has(accessRecord.datasets[0].toObject(), 'publisher') && - !_.isNull(accessRecord.datasets[0].publisher) - ) { + if (_.has(accessRecord.datasets[0].toObject(), 'publisher') && !_.isNull(accessRecord.datasets[0].publisher)) { if (!accessRecord.datasets[0].publisher.workflowEnabled) { accessRecord.dateFinalStatus = new Date(); accessRecord.workflowEnabled = false; @@ -1639,9 +1478,7 @@ module.exports = { }, ]); if (!accessRecord) { - return res - .status(404) - .json({ status: 'error', message: 'Application not found.' }); + return res.status(404).json({ status: 'error', message: 'Application not found.' }); } let { workflow } = accessRecord; if (_.isEmpty(workflow)) { @@ -1650,34 +1487,141 @@ module.exports = { message: 'There is no workflow attached to this application.', }); } - let activeStepIndex = workflow.steps.findIndex((step) => { + let activeStepIndex = workflow.steps.findIndex(step => { return step.active === true; }); // 3. Determine email context if deadline has elapsed or is approaching - const emailContext = workflowController.getWorkflowEmailContext( - accessRecord, - workflow, - activeStepIndex - ); + const emailContext = workflowController.getWorkflowEmailContext(accessRecord, workflow, activeStepIndex); // 4. Send emails based on deadline elapsed or approaching if (emailContext.deadlineElapsed) { - module.exports.createNotifications( - constants.notificationTypes.DEADLINEPASSED, - emailContext, - accessRecord, - req.user - ); + module.exports.createNotifications(constants.notificationTypes.DEADLINEPASSED, emailContext, accessRecord, req.user); } else { - module.exports.createNotifications( - constants.notificationTypes.DEADLINEWARNING, - emailContext, - accessRecord, - req.user - ); + module.exports.createNotifications(constants.notificationTypes.DEADLINEWARNING, emailContext, accessRecord, req.user); } return res.status(200).json({ status: 'success' }); }, + //POST api/v1/data-access-request/:id/actions + performAction: async (req, res) => { + try { + // 1. Get the required request params + const { + params: { id }, + } = req; + let { questionId, questionSetId, questionIds = [], mode, separatorText = '' } = req.body; + if (_.isEmpty(questionId) || _.isEmpty(questionSetId)) { + return res.status(400).json({ + success: false, + message: 'You must supply the unique identifiers for the question to perform an action', + }); + } + // 2. Retrieve DAR from database + let accessRecord = await DataRequestModel.findOne({ _id: id }).populate([ + { + path: 'datasets dataset', + }, + { + path: 'publisherObj', + populate: { + path: 'team', + populate: { + path: 'users', + }, + }, + }, + ]); + if (!accessRecord) { + return res.status(404).json({ status: 'error', message: 'Application not found.' }); + } + // 3. If application is not in progress, actions cannot be performed + if (accessRecord.applicationStatus !== constants.applicationStatuses.INPROGRESS) { + return res.status(400).json({ + success: false, + message: 'This application is no longer in pre-submission status and therefore this action cannot be performed', + }); + } + // 4. Get the requesting users permission levels + let { authorised, userType } = datarequestUtil.getUserPermissionsForApplication(accessRecord.toObject(), req.user.id, req.user._id); + // 5. Return unauthorised message if the requesting user is not an applicant + if (!authorised || userType !== constants.userTypes.APPLICANT) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 6. Parse the json schema to be modified + let jsonSchema = JSON.parse(accessRecord.jsonSchema); + let questionAnswers = JSON.parse(accessRecord.questionAnswers); + // 7. Perform different action depending on mode passed + switch (mode) { + case constants.formActions.ADDREPEATABLESECTION: + let duplicateQuestionSet = dynamicForm.duplicateQuestionSet(questionSetId, jsonSchema); + jsonSchema = dynamicForm.insertQuestionSet(questionSetId, duplicateQuestionSet, jsonSchema); + break; + case constants.formActions.REMOVEREPEATABLESECTION: + jsonSchema = dynamicForm.removeQuestionSetReferences(questionSetId, questionId, jsonSchema); + questionAnswers = dynamicForm.removeQuestionSetAnswers(questionId, questionAnswers); + break; + case constants.formActions.ADDREPEATABLEQUESTIONS: + if (_.isEmpty(questionIds)) { + return res.status(400).json({ + success: false, + message: 'You must supply the question identifiers to duplicate when performing this action', + }); + } + let duplicateQuestions = dynamicForm.duplicateQuestions(questionSetId, questionIds, separatorText, jsonSchema); + jsonSchema = dynamicForm.insertQuestions(questionSetId, questionId, duplicateQuestions, jsonSchema); + break; + case constants.formActions.REMOVEREPEATABLEQUESTIONS: + if (_.isEmpty(questionIds)) { + return res.status(400).json({ + success: false, + message: 'You must supply the question identifiers to remove when performing this action', + }); + } + // Add clicked 'remove' button to questions to delete (questionId) + questionIds = [...questionIds, questionId]; + jsonSchema = dynamicForm.removeQuestionReferences(questionSetId, questionIds, jsonSchema); + questionAnswers = dynamicForm.removeQuestionAnswers(questionIds, questionAnswers); + break; + default: + return res.status(400).json({ + success: false, + message: 'You must supply a valid action to perform', + }); + } + // 8. Save changes to database + accessRecord.jsonSchema = JSON.stringify(jsonSchema); + accessRecord.questionAnswers = JSON.stringify(questionAnswers); + + await accessRecord.save(async err => { + if (err) { + console.error(err); + return res.status(500).json({ status: 'error', message: err }); + } else { + // 9. Append question actions for in progress applicant + jsonSchema = datarequestUtil.injectQuestionActions( + jsonSchema, + constants.userTypes.APPLICANT, // current user type + constants.applicationStatuses.INPROGRESS, + constants.userTypes.APPLICANT // active party + ); + // 10. Return necessary object to reflect schema update + return res.status(200).json({ + success: true, + accessRecord: { + jsonSchema, + questionAnswers, + }, + }); + } + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred updating the application amendment', + }); + } + }, + createNotifications: async (type, context, accessRecord, user) => { // Project details from about application if 5 Safes let { aboutApplication = {} } = accessRecord; @@ -1685,14 +1629,7 @@ module.exports = { aboutApplication = JSON.parse(accessRecord.aboutApplication); } let { projectName = 'No project name set' } = aboutApplication; - let { - projectId, - _id, - workflow = {}, - dateSubmitted = '', - jsonSchema, - questionAnswers, - } = accessRecord; + let { projectId, _id, workflow = {}, dateSubmitted = '', jsonSchema, questionAnswers } = accessRecord; if (_.isEmpty(projectId)) { projectId = _id; } @@ -1708,15 +1645,9 @@ module.exports = { let { datasetfields: { contactPoint, publisher }, } = accessRecord.datasets[0]; - let datasetTitles = accessRecord.datasets - .map((dataset) => dataset.name) - .join(', '); + let datasetTitles = accessRecord.datasets.map(dataset => dataset.name).join(', '); // Main applicant (user obj) - let { - firstname: appFirstName, - lastname: appLastName, - email: appEmail, - } = accessRecord.mainApplicant; + let { firstname: appFirstName, lastname: appLastName, email: appEmail } = accessRecord.mainApplicant; // Requesting user let { firstname, lastname } = user; // Instantiate default params @@ -1731,16 +1662,14 @@ module.exports = { jsonContent = {}, authors = [], attachments = []; - let applicants = module.exports - .extractApplicantNames(questionAnswers) - .join(', '); + let applicants = datarequestUtil.extractApplicantNames(questionAnswers).join(', '); // Fall back for single applicant on short application form if (_.isEmpty(applicants)) { applicants = `${appFirstName} ${appLastName}`; } // Get authors/contributors (user obj) if (!_.isEmpty(accessRecord.authors)) { - authors = accessRecord.authors.map((author) => { + authors = accessRecord.authors.map(author => { let { firstname, lastname, email, id } = author; return { firstname, lastname, email, id }; }); @@ -1748,6 +1677,7 @@ module.exports = { // Deconstruct workflow context if passed let { workflowName = '', + steps = [], stepName = '', reviewerNames = '', reviewSections = '', @@ -1764,21 +1694,13 @@ module.exports = { case constants.notificationTypes.STATUSCHANGE: // 1. Create notifications // Custodian manager and current step reviewer notifications - if ( - _.has(accessRecord.datasets[0].toObject(), 'publisher.team.users') - ) { + if (_.has(accessRecord.datasets[0].toObject(), 'publisher.team.users')) { // Retrieve all custodian manager user Ids and active step reviewers - custodianManagers = teamController.getTeamMembersByRole( - accessRecord.datasets[0].publisher.team, - constants.roleTypes.MANAGER - ); + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); let activeStep = workflowController.getActiveWorkflowStep(workflow); stepReviewers = workflowController.getStepReviewers(activeStep); // Create custodian notification - let statusChangeUserIds = [ - ...custodianManagers, - ...stepReviewers, - ].map((user) => user.id); + let statusChangeUserIds = [...custodianManagers, ...stepReviewers].map(user => user.id); await notificationBuilder.triggerNotificationMessage( statusChangeUserIds, `${appFirstName} ${appLastName}'s Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${firstname} ${lastname}`, @@ -1797,7 +1719,7 @@ module.exports = { // Create authors notification if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( - authors.map((author) => author.id), + authors.map(author => author.id), `A Data Access Request you are contributing to for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, 'data access request', accessRecord._id @@ -1806,12 +1728,7 @@ module.exports = { // 2. Send emails to relevant users // Aggregate objects for custodian and applicant - emailRecipients = [ - accessRecord.mainApplicant, - ...custodianManagers, - ...stepReviewers, - ...accessRecord.authors, - ]; + emailRecipients = [accessRecord.mainApplicant, ...custodianManagers, ...stepReviewers, ...accessRecord.authors]; if (!dateSubmitted) ({ updatedAt: dateSubmitted } = accessRecord); // Create object to pass through email data options = { @@ -1830,7 +1747,7 @@ module.exports = { // Send email await emailGenerator.sendEmail( emailRecipients, - hdrukEmail, + constants.hdrukEmail, `Data Access Request for ${datasetTitles} was ${context.applicationStatus} by ${publisher}`, html, false @@ -1839,15 +1756,10 @@ module.exports = { case constants.notificationTypes.SUBMITTED: // 1. Create notifications // Custodian notification - if ( - _.has(accessRecord.datasets[0].toObject(), 'publisher.team.users') - ) { + if (_.has(accessRecord.datasets[0].toObject(), 'publisher.team.users')) { // Retrieve all custodian user Ids to generate notifications - custodianManagers = teamController.getTeamMembersByRole( - accessRecord.datasets[0].publisher.team, - constants.roleTypes.MANAGER - ); - custodianUserIds = custodianManagers.map((user) => user.id); + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + custodianUserIds = custodianManagers.map(user => user.id); await notificationBuilder.triggerNotificationMessage( custodianUserIds, `A Data Access Request has been submitted to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, @@ -1855,8 +1767,7 @@ module.exports = { accessRecord._id ); } else { - const dataCustodianEmail = - process.env.DATA_CUSTODIAN_EMAIL || contactPoint; + const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || contactPoint; custodianManagers = [{ email: dataCustodianEmail }]; } // Applicant notification @@ -1869,7 +1780,7 @@ module.exports = { // Contributors/authors notification if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( - accessRecord.authors.map((author) => author.id), + accessRecord.authors.map(author => author.id), `A Data Access Request you are contributing to for ${datasetTitles} was successfully submitted to ${publisher} by ${firstname} ${lastname}`, 'data access request', accessRecord._id @@ -1905,31 +1816,18 @@ module.exports = { if (emailRecipientType === 'dataCustodian') { emailRecipients = [...custodianManagers]; // Generate json attachment for external system integration - attachmentContent = Buffer.from( - JSON.stringify({ id: accessRecord._id, ...jsonContent }) - ).toString('base64'); - filename = `${helper.generateFriendlyId( - accessRecord._id - )} ${moment().format().toString()}.json`; - attachments = [ - await emailGenerator.generateAttachment( - filename, - attachmentContent, - 'application/json' - ), - ]; + attachmentContent = Buffer.from(JSON.stringify({ id: accessRecord._id, ...jsonContent })).toString('base64'); + filename = `${helper.generateFriendlyId(accessRecord._id)} ${moment().format().toString()}.json`; + attachments = [await emailGenerator.generateAttachment(filename, attachmentContent, 'application/json')]; } else { // Send email to main applicant and contributors if they have opted in to email notifications - emailRecipients = [ - accessRecord.mainApplicant, - ...accessRecord.authors, - ]; + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; } // Send email if (!_.isEmpty(emailRecipients)) { await emailGenerator.sendEmail( emailRecipients, - hdrukEmail, + constants.hdrukEmail, `Data Access Request has been submitted to ${publisher} for ${datasetTitles}`, html, false, @@ -1943,11 +1841,8 @@ module.exports = { // Custodian notification if (_.has(accessRecord.datasets[0], 'publisher.team.users')) { // Retrieve all custodian user Ids to generate notifications - custodianManagers = teamController.getTeamMembersByRole( - accessRecord.datasets[0].publisher.team, - constants.roleTypes.MANAGER - ); - custodianUserIds = custodianManagers.map((user) => user.id); + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + custodianUserIds = custodianManagers.map(user => user.id); await notificationBuilder.triggerNotificationMessage( custodianUserIds, `A Data Access Request has been resubmitted with updates to ${publisher} for ${datasetTitles} by ${appFirstName} ${appLastName}`, @@ -1955,8 +1850,7 @@ module.exports = { accessRecord._id ); } else { - const dataCustodianEmail = - process.env.DATA_CUSTODIAN_EMAIL || contactPoint; + const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || contactPoint; custodianManagers = [{ email: dataCustodianEmail }]; } // Applicant notification @@ -1969,7 +1863,7 @@ module.exports = { // Contributors/authors notification if (!_.isEmpty(authors)) { await notificationBuilder.triggerNotificationMessage( - accessRecord.authors.map((author) => author.id), + accessRecord.authors.map(author => author.id), `A Data Access Request you are contributing to for ${datasetTitles} was successfully resubmitted with updates to ${publisher} by ${firstname} ${lastname}`, 'data access request', accessRecord._id @@ -2005,31 +1899,18 @@ module.exports = { if (emailRecipientType === 'dataCustodian') { emailRecipients = [...custodianManagers]; // Generate json attachment for external system integration - attachmentContent = Buffer.from( - JSON.stringify({ id: accessRecord._id, ...jsonContent }) - ).toString('base64'); - filename = `${helper.generateFriendlyId( - accessRecord._id - )} ${moment().format().toString()}.json`; - attachments = [ - await emailGenerator.generateAttachment( - filename, - attachmentContent, - 'application/json' - ), - ]; + attachmentContent = Buffer.from(JSON.stringify({ id: accessRecord._id, ...jsonContent })).toString('base64'); + filename = `${helper.generateFriendlyId(accessRecord._id)} ${moment().format().toString()}.json`; + attachments = [await emailGenerator.generateAttachment(filename, attachmentContent, 'application/json')]; } else { // Send email to main applicant and contributors if they have opted in to email notifications - emailRecipients = [ - accessRecord.mainApplicant, - ...accessRecord.authors, - ]; + emailRecipients = [accessRecord.mainApplicant, ...accessRecord.authors]; } // Send email if (!_.isEmpty(emailRecipients)) { await emailGenerator.sendEmail( emailRecipients, - hdrukEmail, + constants.hdrukEmail, `Data Access Request to ${publisher} for ${datasetTitles} has been updated with updates`, html, false, @@ -2042,13 +1923,9 @@ module.exports = { // 1. Deconstruct authors array from context to compare with existing Mongo authors const { newAuthors, currentAuthors } = context; // 2. Determine authors who have been removed - let addedAuthors = [...newAuthors].filter( - (author) => !currentAuthors.includes(author) - ); + let addedAuthors = [...newAuthors].filter(author => !currentAuthors.includes(author)); // 3. Determine authors who have been added - let removedAuthors = [...currentAuthors].filter( - (author) => !newAuthors.includes(author) - ); + let removedAuthors = [...currentAuthors].filter(author => !newAuthors.includes(author)); // 4. Create emails and notifications for added/removed contributors // Set required data for email generation options = { @@ -2070,14 +1947,14 @@ module.exports = { }).populate('additionalInfo'); await notificationBuilder.triggerNotificationMessage( - addedUsers.map((user) => user.id), + addedUsers.map(user => user.id), `You have been added as a contributor for a Data Access Request to ${publisher} by ${firstname} ${lastname}`, 'data access request', accessRecord._id ); await emailGenerator.sendEmail( addedUsers, - hdrukEmail, + constants.hdrukEmail, `You have been added as a contributor for a Data Access Request to ${publisher} by ${firstname} ${lastname}`, html, false @@ -2093,14 +1970,14 @@ module.exports = { }).populate('additionalInfo'); await notificationBuilder.triggerNotificationMessage( - removedUsers.map((user) => user.id), + removedUsers.map(user => user.id), `You have been removed as a contributor from a Data Access Request to ${publisher} by ${firstname} ${lastname}`, 'data access request unlinked', accessRecord._id ); await emailGenerator.sendEmail( removedUsers, - hdrukEmail, + constants.hdrukEmail, `You have been removed as a contributor from a Data Access Request to ${publisher} by ${firstname} ${lastname}`, html, false @@ -2130,7 +2007,7 @@ module.exports = { html = emailGenerator.generateStepOverrideEmail(options); emailGenerator.sendEmail( stepReviewers, - hdrukEmail, + constants.hdrukEmail, `${firstname} ${lastname} has approved a Data Access Request application phase that you were assigned to review`, html, false @@ -2140,9 +2017,9 @@ module.exports = { // 1. Create reviewer notifications notificationBuilder.triggerNotificationMessage( stepReviewerUserIds, - `You are required to review a new Data Access Request application for ${publisher} by ${moment( - currentDeadline - ).format('D MMM YYYY HH:mm')}`, + `You are required to review a new Data Access Request application for ${publisher} by ${moment(currentDeadline).format( + 'D MMM YYYY HH:mm' + )}`, 'data access request', accessRecord._id ); @@ -2161,21 +2038,18 @@ module.exports = { html = emailGenerator.generateNewReviewPhaseEmail(options); emailGenerator.sendEmail( stepReviewers, - hdrukEmail, - `You are required to review a new Data Access Request application for ${publisher} by ${moment( - currentDeadline - ).format('D MMM YYYY HH:mm')}`, + constants.hdrukEmail, + `You are required to review a new Data Access Request application for ${publisher} by ${moment(currentDeadline).format( + 'D MMM YYYY HH:mm' + )}`, html, false ); break; case constants.notificationTypes.FINALDECISIONREQUIRED: // 1. Get managers for publisher - custodianManagers = teamController.getTeamMembersByRole( - accessRecord.publisherObj.team, - constants.roleTypes.MANAGER - ); - managerUserIds = custodianManagers.map((user) => user.id); + custodianManagers = teamController.getTeamMembersByRole(accessRecord.publisherObj.team, constants.roleTypes.MANAGER); + managerUserIds = custodianManagers.map(user => user.id); // 2. Create manager notifications notificationBuilder.triggerNotificationMessage( @@ -2199,7 +2073,7 @@ module.exports = { html = emailGenerator.generateFinalDecisionRequiredEmail(options); emailGenerator.sendEmail( custodianManagers, - hdrukEmail, + constants.hdrukEmail, `Action is required as a Data Access Request application for ${publisher} is now awaiting a final decision`, html, false @@ -2232,7 +2106,7 @@ module.exports = { html = await emailGenerator.generateReviewDeadlineWarning(options); await emailGenerator.sendEmail( remainingReviewers, - hdrukEmail, + constants.hdrukEmail, `The deadline is approaching for a Data Access Request application you are reviewing`, html, false @@ -2240,16 +2114,10 @@ module.exports = { break; case constants.notificationTypes.DEADLINEPASSED: // 1. Get all managers - custodianManagers = teamController.getTeamMembersByRole( - accessRecord.publisherObj.team, - teamController.roleTypes.MANAGER - ); - managerUserIds = custodianManagers.map((user) => user.id); + custodianManagers = teamController.getTeamMembersByRole(accessRecord.publisherObj.team, constants.roleTypes.MANAGER); + managerUserIds = custodianManagers.map(user => user.id); // 2. Combine managers and reviewers remaining - let deadlinePassedUserIds = [ - ...remainingReviewerUserIds, - ...managerUserIds, - ]; + let deadlinePassedUserIds = [...remainingReviewerUserIds, ...managerUserIds]; let deadlinePassedUsers = [...remainingReviewers, ...custodianManagers]; // 3. Create notifications @@ -2278,84 +2146,51 @@ module.exports = { html = await emailGenerator.generateReviewDeadlinePassed(options); await emailGenerator.sendEmail( deadlinePassedUsers, - hdrukEmail, + constants.hdrukEmail, `The deadline for a Data Access Request review phase has now elapsed`, html, false ); break; - } - }, - - getUserPermissionsForApplication: (application, userId, _id) => { - try { - let authorised = false, - userType = ''; - // Return default unauthorised with no user type if incorrect params passed - if (!application || !userId || !_id) { - return { authorised, userType }; - } - // Check if the user is a custodian team member and assign permissions if so - if (_.has(application.datasets[0], 'publisher.team')) { - let isTeamMember = teamController.checkTeamPermissions( - '', - application.datasets[0].publisher.team, - _id + case constants.notificationTypes.WORKFLOWASSIGNED: + // 1. Get managers for publisher + custodianManagers = teamController.getTeamMembersByRole(accessRecord.datasets[0].publisher.team, constants.roleTypes.MANAGER); + // 2. Get managerIds for notifications + managerUserIds = custodianManagers.map(user => user.id); + // 3. deconstruct and set options for notifications and email + options = { + id: accessRecord._id, + steps, + projectId, + projectName, + applicants, + actioner: `${firstname} ${lastname}`, + workflowName, + dateSubmitted, + datasetTitles, + }; + // 4. Create notifications for the managers only + await notificationBuilder.triggerNotificationMessage( + managerUserIds, + `Workflow of ${workflowName} has been assiged to an appplication`, + 'data access request', + accessRecord._id ); - if (isTeamMember) { - userType = constants.userTypes.CUSTODIAN; - authorised = true; - } - } - // If user is not authenticated as a custodian, check if they are an author or the main applicant - if ( - application.applicationStatus === - constants.applicationStatuses.INPROGRESS || - _.isEmpty(userType) - ) { - if ( - application.authorIds.includes(userId) || - application.userId === userId - ) { - userType = constants.userTypes.APPLICANT; - authorised = true; - } - } - return { authorised, userType }; - } catch (error) { - console.error(error); - return { authorised: false, userType: '' }; - } - }, - - extractApplicantNames: (questionAnswers) => { - let fullnames = [], - autoCompleteLookups = { fullname: ['email'] }; - // spread questionAnswers to new var - let qa = { ...questionAnswers }; - // get object keys of questionAnswers - let keys = Object.keys(qa); - // loop questionAnswer keys - for (const key of keys) { - // get value of key - let value = qa[key]; - // split the key up for unique purposes - let [qId] = key.split('_'); - // check if key in lookup - let lookup = autoCompleteLookups[`${qId}`]; - // if key exists and it has an object do relevant data setting - if (typeof lookup !== 'undefined' && typeof value === 'object') { - switch (qId) { - case 'fullname': - fullnames.push(value.name); - break; - } - } + // 5. Generate the email + html = await emailGenerator.generateWorkflowAssigned(options); + // 6. Send email to custodian managers only within the team + await emailGenerator.sendEmail( + custodianManagers, + constants.hdrukEmail, + `A Workflow has been assigned to an application request`, + html, + false + ); + break; } - return fullnames; }, - createApplicationDTO: (app, userId = '') => { + createApplicationDTO: (app, userType, userId = '') => { let projectName = '', applicants = '', workflowName = '', @@ -2372,7 +2207,8 @@ module.exports = { deadlinePassed = '', reviewStatus = '', isReviewer = false, - reviewPanels = []; + reviewPanels = [], + amendmentStatus = ''; // Check if the application has a workflow assigned let { workflow = {}, applicationStatus } = app; @@ -2382,25 +2218,18 @@ module.exports = { team: { members, users }, }, } = app; - let managers = members.filter((mem) => { + let managers = members.filter(mem => { return mem.roles.includes('manager'); }); managerUsers = users - .filter((user) => - managers.some( - (manager) => manager.memberid.toString() === user._id.toString() - ) - ) - .map((user) => { + .filter(user => managers.some(manager => manager.memberid.toString() === user._id.toString())) + .map(user => { let isCurrentUser = user._id.toString() === userId.toString(); - return `${user.firstname} ${user.lastname}${ - isCurrentUser ? ` (you)` : `` - }`; + return `${user.firstname} ${user.lastname}${isCurrentUser ? ` (you)` : ``}`; }); if ( applicationStatus === constants.applicationStatuses.SUBMITTED || - (applicationStatus === constants.applicationStatuses.INREVIEW && - _.isEmpty(workflow)) + (applicationStatus === constants.applicationStatuses.INREVIEW && _.isEmpty(workflow)) ) { remainingActioners = managerUsers.join(', '); } @@ -2422,39 +2251,28 @@ module.exports = { decisionDate, isReviewer = false, reviewPanels = [], - } = workflowController.getActiveStepStatus( - activeStep, - users, - userId - )); - let activeStepIndex = workflow.steps.findIndex((step) => { + } = workflowController.getActiveStepStatus(activeStep, users, userId)); + let activeStepIndex = workflow.steps.findIndex(step => { return step.active === true; }); workflow.steps[activeStepIndex] = { ...workflow.steps[activeStepIndex], reviewStatus, }; - } else if ( - _.isUndefined(activeStep) && - applicationStatus === constants.applicationStatuses.INREVIEW - ) { + } else if (_.isUndefined(activeStep) && applicationStatus === constants.applicationStatuses.INREVIEW) { reviewStatus = 'Final decision required'; remainingActioners = managerUsers.join(', '); } // Get decision duration if completed let { dateFinalStatus, dateSubmitted } = app; if (dateFinalStatus) { - decisionDuration = parseInt( - moment(dateFinalStatus).diff(dateSubmitted, 'days') - ); + decisionDuration = parseInt(moment(dateFinalStatus).diff(dateSubmitted, 'days')); } // Set review section to display format let formattedSteps = [...workflow.steps].reduce((arr, item) => { let step = { ...item, - sections: [...item.sections].map( - (section) => constants.darPanelMapper[section] - ), + sections: [...item.sections].map(section => constants.darPanelMapper[section]), }; arr.push(step); return arr; @@ -2485,14 +2303,13 @@ module.exports = { } if (questionAnswers) { let questionAnswersObj = JSON.parse(questionAnswers); - applicants = module.exports - .extractApplicantNames(questionAnswersObj) - .join(', '); + applicants = datarequestUtil.extractApplicantNames(questionAnswersObj).join(', '); } if (_.isEmpty(applicants)) { let { firstname, lastname } = app.mainApplicant; applicants = `${firstname} ${lastname}`; } + amendmentStatus = amendmentController.calculateAmendmentStatus(app, userType); return { ...app, projectName, @@ -2512,17 +2329,15 @@ module.exports = { reviewStatus, isReviewer, reviewPanels, + amendmentStatus, }; }, - calculateAvgDecisionTime: (applications) => { + calculateAvgDecisionTime: applications => { // Extract dateSubmitted dateFinalStatus - let decidedApplications = applications.filter((app) => { + let decidedApplications = applications.filter(app => { let { dateSubmitted = '', dateFinalStatus = '' } = app; - return ( - !_.isEmpty(dateSubmitted.toString()) && - !_.isEmpty(dateFinalStatus.toString()) - ); + return !_.isEmpty(dateSubmitted.toString()) && !_.isEmpty(dateFinalStatus.toString()); }); // Find difference between dates in milliseconds if (!_.isEmpty(decidedApplications)) { @@ -2535,8 +2350,7 @@ module.exports = { return count; }, 0); // Divide by number of items - if (totalDecisionTime > 0) - return parseInt(totalDecisionTime / decidedApplications.length / 86400); + if (totalDecisionTime > 0) return parseInt(totalDecisionTime / decidedApplications.length / 86400); } return 0; }, diff --git a/src/resources/datarequest/datarequest.model.js b/src/resources/datarequest/datarequest.model.js index 05c6c99b..51cfc047 100644 --- a/src/resources/datarequest/datarequest.model.js +++ b/src/resources/datarequest/datarequest.model.js @@ -64,7 +64,7 @@ const DataRequestSchema = new Schema({ createdBy: { type : Schema.Types.ObjectId, ref: 'User' }, dateReturned: { type: Date }, returnedBy: { type : Schema.Types.ObjectId, ref: 'User' }, - dateSubmitted: { type: Date }, + dateSubmitted: { type: Date }, submittedBy: { type : Schema.Types.ObjectId, ref: 'User' }, questionAnswers: { type: Object, default: {} } }], diff --git a/src/resources/datarequest/datarequest.route.js b/src/resources/datarequest/datarequest.route.js index a9458b10..8b9bd659 100644 --- a/src/resources/datarequest/datarequest.route.js +++ b/src/resources/datarequest/datarequest.route.js @@ -74,9 +74,14 @@ router.put('/:id/startreview', passport.authenticate('jwt'), datarequestControll // @access Private - Custodian Manager router.put('/:id/stepoverride', passport.authenticate('jwt'), datarequestController.updateAccessRequestStepOverride); +// @route PUT api/v1/data-access-request/:id/deletefile +// @desc Update access request deleting a file by Id +// @access Private - Applicant (Gateway User) +router.put('/:id/deletefile', passport.authenticate('jwt'), datarequestController.updateAccessRequestDeleteFile); + // @route POST api/v1/data-access-request/:id/upload // @desc POST application files to scan bucket -// @access Private - Applicant (Gateway User / Custodian Manager) +// @access Private - Applicant (Gateway User / Custodian Manager) router.post('/:id/upload', passport.authenticate('jwt'), multerMid.array('assets'), datarequestController.uploadFiles); // @route POST api/v1/data-access-request/:id/amendments @@ -84,6 +89,16 @@ router.post('/:id/upload', passport.authenticate('jwt'), multerMid.array('assets // @access Private - Custodian Reviewer/Manager router.post('/:id/amendments', passport.authenticate('jwt'), amendmentController.setAmendment); +// @route POST api/v1/data-access-request/:id/requestAmendments +// @desc Submit a batch of requested amendments back to the form applicant(s) +// @access Private - Manager +router.post('/:id/requestAmendments', passport.authenticate('jwt'), amendmentController.requestAmendments); + +// @route POST api/v1/data-access-request/:id/actions +// @desc Perform an action on a presubmitted application form e.g. add/remove repeatable section +// @access Private - Applicant +router.post('/:id/actions', passport.authenticate('jwt'), datarequestController.performAction); + // @route POST api/v1/data-access-request/:id // @desc Submit request record // @access Private - Applicant (Gateway User) diff --git a/src/resources/datarequest/datarequest.schemas.model.js b/src/resources/datarequest/datarequest.schemas.model.js index a197ce14..c2da5c35 100644 --- a/src/resources/datarequest/datarequest.schemas.model.js +++ b/src/resources/datarequest/datarequest.schemas.model.js @@ -20,4 +20,4 @@ const DataRequestSchemas = new Schema({ timestamps: true }); -export const DataRequestSchemaModel = model('data_request_schemas', DataRequestSchemas); +export const DataRequestSchemaModel = model('data_request_schemas', DataRequestSchemas); diff --git a/src/resources/datarequest/datarequest.schemas.route.js b/src/resources/datarequest/datarequest.schemas.route.js index 4b2884a4..d99450af 100644 --- a/src/resources/datarequest/datarequest.schemas.route.js +++ b/src/resources/datarequest/datarequest.schemas.route.js @@ -1,8 +1,8 @@ import express from 'express'; import { DataRequestSchemaModel } from './datarequest.schemas.model'; -import passport from "passport"; -import { utils } from "../auth"; -import { ROLES } from '../user/user.roles' +import passport from 'passport'; +import { utils } from '../auth'; +import { ROLES } from '../user/user.roles'; const router = express.Router(); @@ -10,50 +10,48 @@ const router = express.Router(); // @desc Add a data request schema // @access Private router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { - const { version, status, dataSetId, jsonSchema, publisher } = req.body; - const dataRequestSchema = new DataRequestSchemaModel(); - dataRequestSchema.id = parseInt(Math.random().toString().replace('0.', '')); - dataRequestSchema.status = status; - dataRequestSchema.version = version; - dataRequestSchema.dataSetId = dataSetId; - dataRequestSchema.publisher = publisher; - dataRequestSchema.jsonSchema = JSON.stringify(jsonSchema); - - await dataRequestSchema.save(async (err) => { - if (err) return res.json({ success: false, error: err }); - - return res.json({ success: true, id: dataRequestSchema.id }); - }); - await archiveOtherVersions(dataRequestSchema.id, dataSetId, status); + const { version, status, dataSetId, jsonSchema, publisher } = req.body; + const dataRequestSchema = new DataRequestSchemaModel(); + dataRequestSchema.id = parseInt(Math.random().toString().replace('0.', '')); + dataRequestSchema.status = status; + dataRequestSchema.version = version; + dataRequestSchema.dataSetId = dataSetId; + dataRequestSchema.publisher = publisher; + dataRequestSchema.jsonSchema = JSON.stringify(jsonSchema); + + await dataRequestSchema.save(async err => { + if (err) return res.json({ success: false, error: err }); + + return res.json({ success: true, id: dataRequestSchema.id }); + }); + await archiveOtherVersions(dataRequestSchema.id, dataSetId, status); }); // @router GET /api/v1/data-access-request/schema // @desc Get a data request schema // @access Private router.get('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { - const { dataSetId } = req.body; - let dataRequestSchema = await DataRequestSchemaModel.findOne({ $and: [{ dataSetId: dataSetId }, { status: 'active' }] }); - return res.json({ jsonSchema: dataRequestSchema.jsonSchema }); + const { dataSetId } = req.body; + let dataRequestSchema = await DataRequestSchemaModel.findOne({ $and: [{ dataSetId: dataSetId }, { status: 'active' }] }); + return res.json({ jsonSchema: dataRequestSchema.jsonSchema }); }); module.exports = router; - - async function archiveOtherVersions(id, dataSetId, status) { - try { - if ((status = 'active')) { - await DataRequestSchemaModel.updateMany( - { $and: [{ dataSetId: dataSetId }, { id: { $ne: id } }] }, - { $set: { status: 'archive' } }, - async (err) => { - if (err) return new Error({ success: false, error: err }); - - return { success: true }; - } - ); - } - } catch (err) { - console.log(err); - } + try { + if ((status = 'active')) { + await DataRequestSchemaModel.updateMany( + { $and: [{ dataSetId: dataSetId }, { id: { $ne: id } }] }, + { $set: { status: 'archive' } }, + async err => { + if (err) return new Error({ success: false, error: err }); + + return { success: true }; + } + ); + } + } catch (err) { + console.log(err); + } } diff --git a/src/resources/datarequest/utils/__tests__/datarequest.util.test.js b/src/resources/datarequest/utils/__tests__/datarequest.util.test.js new file mode 100644 index 00000000..cb57d81c --- /dev/null +++ b/src/resources/datarequest/utils/__tests__/datarequest.util.test.js @@ -0,0 +1,71 @@ +import constants from '../../../utilities/constants.util'; +import _ from 'lodash'; + +import datarequestUtil from '../datarequest.util'; +const dataRequest = require('../../__mocks__/datarequest'); + +describe('injectQuestionActions', () => { + // Arrange + const data = _.cloneDeep(dataRequest); + const guidance = { key: 'guidance', icon: 'far fa-question-circle', color: '#475da7', toolTip: 'Guidance', order: 1 }; + const requestAmendment = { + key: 'requestAmendment', + icon: 'fas fa-exclamation-circle', + color: '#F0BB24', + toolTip: 'Request applicant updates answer', + order: 2, + }; + const cases = [ + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.INPROGRESS, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.APPROVED, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.APPROVEDWITHCONDITIONS, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.INREVIEW, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.WITHDRAWN, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.APPLICANT, constants.applicationStatuses.SUBMITTED, '', [guidance]], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.APPROVED, constants.roleTypes.MANAGER, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.APPROVEDWITHCONDITIONS, + constants.roleTypes.MANAGER, + [guidance], + ], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.INREVIEW, + constants.roleTypes.MANAGER, + [guidance, requestAmendment], + ], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.WITHDRAWN, constants.roleTypes.MANAGER, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.SUBMITTED, + constants.roleTypes.MANAGER, + [guidance, requestAmendment], + ], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.APPROVED, constants.roleTypes.REVIEWER, [guidance]], + [ + data[0].jsonSchema, + constants.userTypes.CUSTODIAN, + constants.applicationStatuses.APPROVEDWITHCONDITIONS, + constants.roleTypes.REVIEWER, + [guidance], + ], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.INREVIEW, constants.roleTypes.REVIEWER, [guidance]], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.WITHDRAWN, constants.roleTypes.REVIEWER, [guidance]], + [data[0].jsonSchema, constants.userTypes.CUSTODIAN, constants.applicationStatuses.SUBMITTED, constants.roleTypes.REVIEWER, [guidance]], + ]; + test.each(cases)( + 'given a jsonSchema object %p and the user is a/an %p, and the application status is %p, it returns the correct question actions', + (data, userType, applicationStatus, role, expectedResults) => { + // Act + const result = datarequestUtil.injectQuestionActions(data, userType, applicationStatus, role); + // Assert + expectedResults.forEach(expectedResult => { + expect(result.questionActions).toContainEqual(expectedResult); + }); + } + ); +}); diff --git a/src/resources/datarequest/utils/datarequest.util.js b/src/resources/datarequest/utils/datarequest.util.js new file mode 100644 index 00000000..699df02b --- /dev/null +++ b/src/resources/datarequest/utils/datarequest.util.js @@ -0,0 +1,216 @@ +import _ from 'lodash'; +import constants from '../../utilities/constants.util'; +import teamController from '../../team/team.controller'; +import moment from 'moment'; + +const injectQuestionActions = (jsonSchema, userType, applicationStatus, role = '') => { + let formattedSchema = {}; + if (userType === constants.userTypes.CUSTODIAN) { + formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][role][applicationStatus] }; + } else { + formattedSchema = { ...jsonSchema, questionActions: constants.userQuestionActions[userType][applicationStatus] }; + } + return formattedSchema; +}; + +const getUserPermissionsForApplication = (application, userId, _id) => { + try { + let authorised = false, + isTeamMember = false, + userType = ''; + // Return default unauthorised with no user type if incorrect params passed + if (!application || !userId || !_id) { + return { authorised, userType }; + } + // Check if the user is a custodian team member and assign permissions if so + if (_.has(application.datasets[0], 'publisher.team')) { + isTeamMember = teamController.checkTeamPermissions('', application.datasets[0].publisher.team, _id); + } else if (_.has(application, 'publisherObj.team')) { + isTeamMember = teamController.checkTeamPermissions('', application.publisherObj.team, _id); + } + if (isTeamMember) { + userType = constants.userTypes.CUSTODIAN; + authorised = true; + } + // If user is not authenticated as a custodian, check if they are an author or the main applicant + if (application.applicationStatus === constants.applicationStatuses.INPROGRESS || _.isEmpty(userType)) { + if (application.authorIds.includes(userId) || application.userId === userId) { + userType = constants.userTypes.APPLICANT; + authorised = true; + } + } + return { authorised, userType }; + } catch (error) { + console.error(error); + return { authorised: false, userType: '' }; + } +}; + +const extractApplicantNames = questionAnswers => { + let fullnames = [], + autoCompleteLookups = { fullname: ['email'] }; + // spread questionAnswers to new var + let qa = { ...questionAnswers }; + // get object keys of questionAnswers + let keys = Object.keys(qa); + // loop questionAnswer keys + for (const key of keys) { + // get value of key + let value = qa[key]; + // split the key up for unique purposes + let [qId] = key.split('_'); + // check if key in lookup + let lookup = autoCompleteLookups[`${qId}`]; + // if key exists and it has an object do relevant data setting + if (typeof lookup !== 'undefined' && typeof value === 'object') { + switch (qId) { + case 'fullname': + fullnames.push(value.name); + break; + } + } + } + return fullnames; +}; + +const findQuestion = (questionsArr, questionId) => { + // 1. Define child object to allow recursive calls + let child; + // 2. Exit from function if no children are present + if (!questionsArr) return {}; + // 3. Iterate through questions in the current level to locate question by Id + for (const questionObj of questionsArr) { + // 4. Return the question if it is located + if (questionObj.questionId === questionId) return questionObj; + // 5. Recursively call the find question function on child elements to find question Id + if (typeof questionObj.input === 'object' && typeof questionObj.input.options !== 'undefined') { + questionObj.input.options + .filter(option => { + return typeof option.conditionalQuestions !== 'undefined' && option.conditionalQuestions.length > 0; + }) + .forEach(option => { + if(!child) { + child = findQuestion(option.conditionalQuestions, questionId); + } + }); + } + // 6. Return the child question + if (child) return child; + } +}; + +const updateQuestion = (questionsArr, question) => { + // 1. Extract question Id + let { questionId } = question; + let found = false; + // 2. Recursive function to iterate through each level of questions + questionsArr.forEach(function iter(currentQuestion, index, currentArray) { + // 3. Prevent unnecessary computation by exiting loop if question was found + if (found) { + return; + } + // 4. If the current question matches the target question, replace with updated question + if (currentQuestion.questionId === questionId) { + currentArray[index] = { ...question }; + found = true; + return; + } + // 5. If target question has not been identified, recall function with child questions + if (_.has(currentQuestion, 'input.options')) { + currentQuestion.input.options.forEach(option => { + if (_.has(option, 'conditionalQuestions')) { + Array.isArray(option.conditionalQuestions) && option.conditionalQuestions.forEach(iter); + } + }); + } + }); + // 6. Return the updated question array + return questionsArr; +}; + +const setQuestionState = (question, questionAlert, readOnly) => { + // 1. Find input object for question + const { input = {} } = question; + // 2. Assemble question in readOnly true/false mode + question = { + ...question, + input: { + ...input, + questionAlert, + readOnly, + }, + }; + // 3. Recursively set readOnly mode for children + if (_.has(question, 'input.options')) { + question.input.options.forEach(function iter(currentQuestion) { + // 4. If current question contains an input, set readOnly mode + if (_.has(currentQuestion, 'input')) { + currentQuestion.input.readOnly = readOnly; + } + // 5. Recall the iteration with each child question + if (_.has(currentQuestion, 'conditionalQuestions')) { + currentQuestion.conditionalQuestions.forEach(option => { + if (_.has(option, 'input.options')) { + Array.isArray(option.input.options) && option.input.options.forEach(iter); + } else { + option.input.readOnly = readOnly; + } + }); + } + }); + } + return question; +}; + +const buildQuestionAlert = (userType, iterationStatus, completed, amendment, user, publisher) => { + // 1. Use a try catch to prevent conditions where the combination of params lead to no question alert required + try { + // 2. Static mapping allows us to determine correct flag to show based on scenario (params) + const questionAlert = { + ...constants.navigationFlags[userType][iterationStatus][completed], + }; + // 3. Extract data from amendment + let { requestedBy, updatedBy, dateRequested, dateUpdated } = amendment; + // 4. Update audit fields to 'you' if the action was performed by the current user + requestedBy = matchCurrentUser(user, requestedBy); + updatedBy = matchCurrentUser(user, updatedBy); + // 5. Update the generic question alerts to match the scenario + let relevantActioner = !_.isNil(updatedBy) ? updatedBy : userType === constants.userTypes.CUSTODIAN ? requestedBy : publisher; + questionAlert.text = questionAlert.text.replace( + '#NAME#', + relevantActioner + ); + questionAlert.text = questionAlert.text.replace( + '#DATE#', + userType === !_.isNil(dateUpdated) + ? moment(dateUpdated).format('Do MMM YYYY') + : moment(dateRequested).format('Do MMM YYYY') + ); + // 6. Return the built question alert + return questionAlert; + } catch (err) { + return {}; + } +}; + +const matchCurrentUser = (user, auditField) => { + // 1. Extract the name of the current user + const { firstname, lastname } = user; + // 2. Compare current user to audit field supplied e.g. 'updated by' + if (auditField === `${firstname} ${lastname}`) { + // 3. Update audit field value to 'you' if name matches current user + return 'You'; + } + // 4. Return updated audit field + return auditField; +}; + +export default { + injectQuestionActions: injectQuestionActions, + getUserPermissionsForApplication: getUserPermissionsForApplication, + extractApplicantNames: extractApplicantNames, + findQuestion: findQuestion, + updateQuestion: updateQuestion, + buildQuestionAlert: buildQuestionAlert, + setQuestionState: setQuestionState, +}; diff --git a/src/resources/dataset/dataset.route.js b/src/resources/dataset/dataset.route.js index ba3ef38d..ad54d1cf 100644 --- a/src/resources/dataset/dataset.route.js +++ b/src/resources/dataset/dataset.route.js @@ -1,125 +1,155 @@ -import express from 'express' -import { Data } from '../tool/data.model' +import express from 'express'; +import { Data } from '../tool/data.model'; import { loadDataset, loadDatasets } from './dataset.service'; -import { getToolsAdmin } from '../tool/data.repository'; +import { getAllTools } from '../tool/data.repository'; import _ from 'lodash'; import escape from 'escape-html'; +import { Course } from '../course/course.model'; const router = express.Router(); -const rateLimit = require("express-rate-limit"); +const rateLimit = require('express-rate-limit'); const datasetLimiter = rateLimit({ - windowMs: 60 * 60 * 1000, // 1 hour window - max: 10, // start blocking after 10 requests - message: 'Too many calls have been made to this api from this IP, please try again after an hour' + windowMs: 60 * 60 * 1000, // 1 hour window + max: 10, // start blocking after 10 requests + message: 'Too many calls have been made to this api from this IP, please try again after an hour', }); router.post('/', async (req, res) => { - //Check to see if header is in json format - var parsedBody = {}; - if (req.header('content-type') === 'application/json') { - parsedBody = req.body; - } else { - parsedBody = JSON.parse(req.body); - } - //Check for key - if (parsedBody.key !== process.env.cachingkey) { - return res.json({ success: false, error: 'Caching failed' }); - } - - loadDatasets(parsedBody.override || false); - return res.json({ success: true, message: 'Caching started' }); + //Check to see if header is in json format + var parsedBody = {}; + if (req.header('content-type') === 'application/json') { + parsedBody = req.body; + } else { + parsedBody = JSON.parse(req.body); + } + //Check for key + if (parsedBody.key !== process.env.cachingkey) { + return res.json({ success: false, error: 'Caching failed' }); + } + + loadDatasets(parsedBody.override || false); + return res.json({ success: true, message: 'Caching started' }); }); // @router GET /api/v1/datasets/pidList // @desc Returns List of PIDs with linked datasetIDs // @access Public router.get('/pidList/', datasetLimiter, async (req, res) => { - var q = Data.find({ type: 'dataset', pid: { $exists: true } }, { pid: 1, datasetid: 1 }).sort({ pid: 1 }); - - q.exec((err, data) => { - var listOfPIDs = []; - - data.forEach(item => { - if (listOfPIDs.find(x => x.pid === item.pid)) { - var index = listOfPIDs.findIndex(x => x.pid === item.pid); - listOfPIDs[index].datasetIds.push(item.datasetid); - } else { - listOfPIDs.push({ pid: item.pid, datasetIds: [item.datasetid] }); - } - }); - - return res.json({ success: true, data: listOfPIDs }); - }); + var q = Data.find({ type: 'dataset', pid: { $exists: true } }, { pid: 1, datasetid: 1 }).sort({ pid: 1 }); + + q.exec((err, data) => { + var listOfPIDs = []; + + data.forEach(item => { + if (listOfPIDs.find(x => x.pid === item.pid)) { + var index = listOfPIDs.findIndex(x => x.pid === item.pid); + listOfPIDs[index].datasetIds.push(item.datasetid); + } else { + listOfPIDs.push({ pid: item.pid, datasetIds: [item.datasetid] }); + } + }); + + return res.json({ success: true, data: listOfPIDs }); + }); }); // @router GET /api/v1/ // @desc Returns a dataset based on either datasetID or PID provided // @access Public router.get('/:datasetID', async (req, res) => { - let { datasetID = '' } = req.params; - if (_.isEmpty(datasetID)) { - return res.status(400).json({ success: false }); - } - - let isLatestVersion = true; - let isDatasetArchived = false; - - let dataVersion = await Data.findOne({ datasetid: datasetID }); - - if (!_.isNil(dataVersion)) { - datasetID = dataVersion.pid; - } - - let dataset = await Data.findOne({ pid: datasetID, activeflag: 'active' }); - - if (_.isNil(dataset)) { - dataset = await Data.findOne({ pid: datasetID, activeflag: 'archive' }).sort({ createdAt: -1 }); - if (_.isNil(dataset)) { - try { - dataset = await loadDataset(datasetID); - } catch (err) { - return res.status(404).send(`Dataset not found for Id: ${escape(datasetID)}`); - } - } - else { - isDatasetArchived = true; - } - isLatestVersion = dataset.activeflag === 'active'; - } - - let pid = dataset.pid; - - let relatedData = await Data.find({ - relatedObjects: { - $elemMatch: { - $or: [ - { - objectId: { $in: [datasetID] }, - }, - { - pid: { $in: [pid] }, - }, - ], - }, - }, - }); - - relatedData.forEach(dat => { - dat.relatedObjects.forEach(relatedObject => { - if ((relatedObject.objectId === datasetID && dat.id !== datasetID) || (relatedObject.pid === pid && dat.id !== pid)) { - if (typeof dataset.relatedObjects === 'undefined') dataset.relatedObjects = []; - dataset.relatedObjects.push({ - objectId: dat.id, - reason: relatedObject.reason, - objectType: dat.type, - user: relatedObject.user, - updated: relatedObject.updated, - }); - } - }); - }); - - return res.json({ success: true, isLatestVersion, isDatasetArchived, data: dataset }); + let { datasetID = '' } = req.params; + if (_.isEmpty(datasetID)) { + return res.status(400).json({ success: false }); + } + + let isLatestVersion = true; + let isDatasetArchived = false; + + // try to find the dataset using the datasetid + let dataVersion = await Data.findOne({ datasetid: datasetID }); + + // if found then set the datasetID to the pid of the found dataset + if (!_.isNil(dataVersion)) { + datasetID = dataVersion.pid; + } + + // find the active dataset using the pid + let dataset = await Data.findOne({ pid: datasetID, activeflag: 'active' }); + + if (_.isNil(dataset)) { + // if no active version found look for the next latest version using the pid and set the isDatasetArchived flag to true + dataset = await Data.findOne({ pid: datasetID, activeflag: 'archive' }).sort({ createdAt: -1 }); + if (_.isNil(dataset)) { + try { + // if still not found then look up the MDC for the dataset + dataset = await loadDataset(datasetID); + } catch (err) { + return res.status(404).send(`Dataset not found for Id: ${escape(datasetID)}`); + } + } else { + isDatasetArchived = true; + } + isLatestVersion = dataset.activeflag === 'active'; + } + + let pid = dataset.pid; + + // get a list of all the datasetids connected to a pid + let dataVersions = await Data.find({ pid }, { _id: 0, datasetid: 1 }); + let dataVersionsArray = dataVersions.map(a => a.datasetid); + dataVersionsArray.push(pid); + + // find the related resources using the pid or datasetids for legacy entries + let relatedData = await Data.find({ + relatedObjects: { + $elemMatch: { + $or: [ + { + objectId: { $in: dataVersionsArray }, + }, + { + pid: pid, + }, + ], + }, + }, + activeflag: 'active', + }); + + let relatedDataFromCourses = await Course.find({ + relatedObjects: { + $elemMatch: { + $or: [ + { + objectId: { $in: dataVersionsArray }, + }, + { + pid: pid, + }, + ], + }, + }, + activeflag: 'active', + }); + + relatedData = [...relatedData, ...relatedDataFromCourses]; + + relatedData.forEach(dat => { + dat.relatedObjects.forEach(relatedObject => { + if ((relatedObject.objectId === dataset.datasetid && dat.id !== dataset.datasetid) || (relatedObject.pid === pid && dat.id !== pid)) { + if (typeof dataset.relatedObjects === 'undefined') dataset.relatedObjects = []; + dataset.relatedObjects.push({ + objectId: dat.id, + reason: relatedObject.reason, + objectType: dat.type, + user: relatedObject.user, + updated: relatedObject.updated, + }); + } + }); + }); + + return res.json({ success: true, isLatestVersion, isDatasetArchived, data: dataset }); }); // @router GET /api/v1/ @@ -127,14 +157,14 @@ router.get('/:datasetID', async (req, res) => { // This unauthenticated route was created specifically for API-docs // @access Public router.get('/', async (req, res) => { - req.params.type = 'dataset'; - await getAllTools(req) - .then(data => { - return res.json({ success: true, data }); - }) - .catch(err => { - return res.json({ success: false, err }); - }); + req.params.type = 'dataset'; + await getAllTools(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); }); module.exports = router; diff --git a/src/resources/dataset/dataset.service.js b/src/resources/dataset/dataset.service.js index 1d542274..68e30b5d 100644 --- a/src/resources/dataset/dataset.service.js +++ b/src/resources/dataset/dataset.service.js @@ -1,770 +1,958 @@ -import { Data } from '../tool/data.model' -import { MetricsData } from '../stats/metrics.model' +import { Data } from '../tool/data.model'; +import { MetricsData } from '../stats/metrics.model'; import axios from 'axios'; import * as Sentry from '@sentry/node'; import { v4 as uuidv4 } from 'uuid'; export async function loadDataset(datasetID) { - var metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; - const datasetCall = axios.get(metadataCatalogueLink + '/api/facets/'+ datasetID +'/profile/uk.ac.hdrukgateway/HdrUkProfilePluginService', { timeout:5000 }).catch(err => { console.log('Unable to get dataset details '+err.message) }); - const metadataQualityCall = axios.get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/metadata_quality.json', { timeout:5000 }).catch(err => { console.log('Unable to get metadata quality value '+err.message) }); - const metadataSchemaCall = axios.get(metadataCatalogueLink + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/schema.org/'+ datasetID, { timeout:5000 }).catch(err => { console.log('Unable to get metadata schema '+err.message) }); - const dataClassCall = axios.get(metadataCatalogueLink + '/api/dataModels/'+datasetID+'/dataClasses?max=300', { timeout:5000 }).catch(err => { console.log('Unable to get dataclass '+err.message) }); - const versionLinksCall = axios.get(metadataCatalogueLink + '/api/catalogueItems/'+datasetID+'/semanticLinks', { timeout:5000 }).catch(err => { console.log('Unable to get version links '+err.message) }); - const phenotypesCall = await axios.get('https://raw.githubusercontent.com/spiros/hdr-caliber-phenome-portal/master/_data/dataset2phenotypes.json', { timeout:5000 }).catch(err => { console.log('Unable to get phenotypes '+err.message) }); - const dataUtilityCall = await axios.get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json', { timeout:5000 }).catch(err => { console.log('Unable to get data utility '+err.message) }); - const datasetV2Call = axios.get(metadataCatalogueLink + '/api/facets/'+datasetID+'/metadata?all=true', { timeout:5000 }).catch(err => { console.log('Unable to get dataset version 2 '+err.message) }); - const [dataset, metadataQualityList, metadataSchema, dataClass, versionLinks, phenotypesList, dataUtilityList, datasetV2] = await axios.all([datasetCall, metadataQualityCall, metadataSchemaCall, dataClassCall, versionLinksCall, phenotypesCall,dataUtilityCall, datasetV2Call]); - - var technicaldetails = []; - - await dataClass.data.items.reduce( - (p, dataclassMDC) => p.then( - () => (new Promise(resolve => { - setTimeout(async function () { - const dataClassElementCall = axios.get(metadataCatalogueLink + '/api/dataModels/'+datasetID+'/dataClasses/'+dataclassMDC.id+'/dataElements?all=true', { timeout:5000 }).catch(err => { console.log('Unable to get dataclass element '+err.message) }); - const [dataClassElement] = await axios.all([dataClassElementCall]); - var dataClassElementArray = [] - - dataClassElement.data.items.forEach((element) => { - dataClassElementArray.push( - { - "id": element.id, - "domainType": element.domainType, - "label": element.label, - "description": element.description, - "dataType": { - "id": element.dataType.id, - "domainType": element.dataType.domainType, - "label": element.dataType.label - } - } - ); - }); - - technicaldetails.push({ - "id": dataclassMDC.id, - "domainType": dataclassMDC.domainType, - "label": dataclassMDC.label, - "description": dataclassMDC.description, - "elements": dataClassElementArray - }) - - resolve(null) - }, 500) - })) - ), - Promise.resolve(null) - ); - - let datasetv2Object = populateV2datasetObject(datasetV2.data.items) - - let uuid = uuidv4(); - let listOfVersions =[]; - let pid = uuid; - let datasetVersion = "0.0.1"; - - if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { - versionLinks.data.items.forEach((item) => { - if (!listOfVersions.find(x => x.id === item.source.id)) { - listOfVersions.push({"id":item.source.id, "version":item.source.documentationVersion}); - } - if (!listOfVersions.find(x => x.id === item.target.id)) { - listOfVersions.push({"id":item.target.id, "version":item.target.documentationVersion}); - } - }) - - for (const item of listOfVersions) { - if (item.id !== dataset.data.id) { - let existingDataset = await Data.findOne({ datasetid: item.id }); - - if (existingDataset && existingDataset.pid) pid = existingDataset.pid; - else { - await Data.findOneAndUpdate({ datasetid: item.id }, - { pid: uuid, datasetVersion: item.version } - ) - } - } - else { - datasetVersion = item.version; - } - } - } - - var uniqueID=''; - while (uniqueID === '') { - uniqueID = parseInt(Math.random().toString().replace('0.', '')); - if (await Data.find({ id: uniqueID }).length === 0) { - uniqueID = ''; - } - } - - var keywordArray = splitString(dataset.data.keywords) - var physicalSampleAvailabilityArray = splitString(dataset.data.physicalSampleAvailability) - var geographicCoverageArray = splitString(dataset.data.geographicCoverage) - - const metadataQuality = metadataQualityList.data.find(x => x.id === datasetID); - const phenotypes = phenotypesList.data[datasetID] || []; - const dataUtility = dataUtilityList.data.find(x => x.id === datasetID); - - var data = new Data(); - data.pid = pid; - data.datasetVersion = datasetVersion; - data.id = uniqueID; - data.datasetid = dataset.data.id; - data.type = 'dataset'; - data.activeflag = 'archive'; - - data.name = dataset.data.title; - data.description = dataset.data.description; - data.license = dataset.data.license; - data.tags.features = keywordArray; - data.datasetfields.publisher = dataset.data.publisher; - data.datasetfields.geographicCoverage = geographicCoverageArray; - data.datasetfields.physicalSampleAvailability = physicalSampleAvailabilityArray; - data.datasetfields.abstract = dataset.data.abstract; - data.datasetfields.releaseDate = dataset.data.releaseDate; - data.datasetfields.accessRequestDuration = dataset.data.accessRequestDuration; - data.datasetfields.conformsTo = dataset.data.conformsTo; - data.datasetfields.accessRights = dataset.data.accessRights; - data.datasetfields.jurisdiction = dataset.data.jurisdiction; - data.datasetfields.datasetStartDate = dataset.data.datasetStartDate; - data.datasetfields.datasetEndDate = dataset.data.datasetEndDate; - data.datasetfields.statisticalPopulation = dataset.data.statisticalPopulation; - data.datasetfields.ageBand = dataset.data.ageBand; - data.datasetfields.contactPoint = dataset.data.contactPoint; - data.datasetfields.periodicity = dataset.data.periodicity; - - data.datasetfields.metadataquality = metadataQuality ? metadataQuality : {}; - data.datasetfields.metadataschema = metadataSchema && metadataSchema.data ? metadataSchema.data : {}; - data.datasetfields.technicaldetails = technicaldetails; - data.datasetfields.versionLinks = versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : []; - data.datasetfields.phenotypes = phenotypes; - data.datasetfields.datautility = dataUtility ? dataUtility : {}; - data.datasetv2 = datasetv2Object; - - return await data.save(); + var metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; + const datasetCall = axios + .get(metadataCatalogueLink + '/api/facets/' + datasetID + '/profile/uk.ac.hdrukgateway/HdrUkProfilePluginService', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get dataset details ' + err.message); + }); + const metadataQualityCall = axios + .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/metadata_quality.json', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get metadata quality value ' + err.message); + }); + const metadataSchemaCall = axios + .get(metadataCatalogueLink + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/schema.org/' + datasetID, { timeout: 5000 }) + .catch(err => { + console.log('Unable to get metadata schema ' + err.message); + }); + const dataClassCall = axios.get(metadataCatalogueLink + '/api/dataModels/' + datasetID + '/dataClasses', { timeout: 5000 }).catch(err => { + console.log('Unable to get dataclass ' + err.message); + }); + const versionLinksCall = axios + .get(metadataCatalogueLink + '/api/catalogueItems/' + datasetID + '/semanticLinks', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get version links ' + err.message); + }); + const phenotypesCall = await axios + .get('https://raw.githubusercontent.com/spiros/hdr-caliber-phenome-portal/master/_data/dataset2phenotypes.json', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get phenotypes ' + err.message); + }); + const dataUtilityCall = await axios + .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get data utility ' + err.message); + }); + const datasetV2Call = axios + .get(metadataCatalogueLink + '/api/facets/' + datasetID + '/metadata?all=true', { timeout: 5000 }) + .catch(err => { + console.log('Unable to get dataset version 2 ' + err.message); + }); + const [ + dataset, + metadataQualityList, + metadataSchema, + dataClass, + versionLinks, + phenotypesList, + dataUtilityList, + datasetV2, + ] = await axios.all([ + datasetCall, + metadataQualityCall, + metadataSchemaCall, + dataClassCall, + versionLinksCall, + phenotypesCall, + dataUtilityCall, + datasetV2Call, + ]); + + var technicaldetails = []; + + await dataClass.data.items.reduce( + (p, dataclassMDC) => + p.then( + () => + new Promise(resolve => { + setTimeout(async function () { + const dataClassElementCall = axios + .get(metadataCatalogueLink + '/api/dataModels/' + datasetID + '/dataClasses/' + dataclassMDC.id + '/dataElements', { + timeout: 5000, + }) + .catch(err => { + console.log('Unable to get dataclass element ' + err.message); + }); + const [dataClassElement] = await axios.all([dataClassElementCall]); + var dataClassElementArray = []; + + dataClassElement.data.items.forEach(element => { + dataClassElementArray.push({ + id: element.id, + domainType: element.domainType, + label: element.label, + description: element.description, + dataType: { + id: element.dataType.id, + domainType: element.dataType.domainType, + label: element.dataType.label, + }, + }); + }); + + technicaldetails.push({ + id: dataclassMDC.id, + domainType: dataclassMDC.domainType, + label: dataclassMDC.label, + description: dataclassMDC.description, + elements: dataClassElementArray, + }); + + resolve(null); + }, 500); + }) + ), + Promise.resolve(null) + ); + + let datasetv2Object = populateV2datasetObject(datasetV2.data.items); + + let uuid = uuidv4(); + let listOfVersions = []; + let pid = uuid; + let datasetVersion = '0.0.1'; + + if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { + versionLinks.data.items.forEach(item => { + if (!listOfVersions.find(x => x.id === item.source.id)) { + listOfVersions.push({ id: item.source.id, version: item.source.documentationVersion }); + } + if (!listOfVersions.find(x => x.id === item.target.id)) { + listOfVersions.push({ id: item.target.id, version: item.target.documentationVersion }); + } + }); + + for (const item of listOfVersions) { + if (item.id !== dataset.data.id) { + let existingDataset = await Data.findOne({ datasetid: item.id }); + if (existingDataset && existingDataset.pid) pid = existingDataset.pid; + else { + await Data.findOneAndUpdate({ datasetid: item.id }, { pid: uuid, datasetVersion: item.version }); + } + } else { + datasetVersion = item.version; + } + } + } + + var uniqueID = ''; + while (uniqueID === '') { + uniqueID = parseInt(Math.random().toString().replace('0.', '')); + if ((await Data.find({ id: uniqueID }).length) === 0) { + uniqueID = ''; + } + } + + var keywordArray = splitString(dataset.data.keywords); + var physicalSampleAvailabilityArray = splitString(dataset.data.physicalSampleAvailability); + var geographicCoverageArray = splitString(dataset.data.geographicCoverage); + + const metadataQuality = metadataQualityList.data.find(x => x.id === datasetID); + const phenotypes = phenotypesList.data[datasetID] || []; + const dataUtility = dataUtilityList.data.find(x => x.id === datasetID); + + var data = new Data(); + data.pid = pid; + data.datasetVersion = datasetVersion; + data.id = uniqueID; + data.datasetid = dataset.data.id; + data.type = 'dataset'; + data.activeflag = 'archive'; + + data.name = dataset.data.title; + data.description = dataset.data.description; + data.license = dataset.data.license; + data.tags.features = keywordArray; + data.datasetfields.publisher = dataset.data.publisher; + data.datasetfields.geographicCoverage = geographicCoverageArray; + data.datasetfields.physicalSampleAvailability = physicalSampleAvailabilityArray; + data.datasetfields.abstract = dataset.data.abstract; + data.datasetfields.releaseDate = dataset.data.releaseDate; + data.datasetfields.accessRequestDuration = dataset.data.accessRequestDuration; + data.datasetfields.conformsTo = dataset.data.conformsTo; + data.datasetfields.accessRights = dataset.data.accessRights; + data.datasetfields.jurisdiction = dataset.data.jurisdiction; + data.datasetfields.datasetStartDate = dataset.data.datasetStartDate; + data.datasetfields.datasetEndDate = dataset.data.datasetEndDate; + data.datasetfields.statisticalPopulation = dataset.data.statisticalPopulation; + data.datasetfields.ageBand = dataset.data.ageBand; + data.datasetfields.contactPoint = dataset.data.contactPoint; + data.datasetfields.periodicity = dataset.data.periodicity; + + data.datasetfields.metadataquality = metadataQuality ? metadataQuality : {}; + data.datasetfields.metadataschema = metadataSchema && metadataSchema.data ? metadataSchema.data : {}; + data.datasetfields.technicaldetails = technicaldetails; + data.datasetfields.versionLinks = versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : []; + data.datasetfields.phenotypes = phenotypes; + data.datasetfields.datautility = dataUtility ? dataUtility : {}; + data.datasetv2 = datasetv2Object; + + return await data.save(); } export async function loadDatasets(override) { - console.log("Starting run at "+Date()) - var metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; - - var datasetsMDCCount = await new Promise(function (resolve, reject) { - axios.post(metadataCatalogueLink + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/customSearch?searchTerm=&domainType=DataModel&limit=1') - .then(function (response) { - resolve(response.data.count); - }) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'The caching run has failed because it was unable to get a count from the MDC', - level: Sentry.Severity.Fatal - }); - Sentry.captureException(err); - reject(err) - }) - }) - .catch(() => { - return 'Update failed'; - }); - - if (datasetsMDCCount === 'Update failed') return; - - //Compare counts from HDR and MDC, if greater drop of 10%+ then stop process and email support queue - var datasetsHDRCount = await Data.countDocuments({ type: 'dataset', activeflag: 'active' }); - - if ((datasetsMDCCount/datasetsHDRCount*100) < 90 && !override) { - Sentry.addBreadcrumb({ - category: 'Caching', - message: `The caching run has failed because the counts from the MDC (${datasetsMDCCount}) where ${100 - ((datasetsMDCCount/datasetsHDRCount)*100)}% lower than the number stored in the DB (${datasetsHDRCount})`, - level: Sentry.Severity.Fatal - }); - Sentry.captureException(); - return; - } - - //datasetsMDCCount = 10; //For testing to limit the number brought down - - var datasetsMDCList = await new Promise(function (resolve, reject) { - axios.post(metadataCatalogueLink + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/customSearch?searchTerm=&domainType=DataModel&limit=' + datasetsMDCCount) - .then(function (response) { - resolve(response.data); - }) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'The caching run has failed because it was unable to pull the datasets from the MDC', - level: Sentry.Severity.Fatal - }); - Sentry.captureException(err); - reject(err) - }) - }) - .catch(() => { - return 'Update failed'; - }); - - if (datasetsMDCList === 'Update failed') return; - - const metadataQualityList = await axios - .get( - "https://raw.githubusercontent.com/HDRUK/datasets/master/reports/metadata_quality.json", - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata quality value '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log("Unable to get metadata quality value " + err.message); //Uncomment for local testing - }); - - const phenotypesList = await axios - .get( - "https://raw.githubusercontent.com/spiros/hdr-caliber-phenome-portal/master/_data/dataset2phenotypes.json", - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata quality value '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log("Unable to get metadata quality value " + err.message); //Uncomment for local testing - }); - - const dataUtilityList = await axios - .get( - "https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json", - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get data utility '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log("Unable to get data utility " + err.message); //Uncomment for local testing - }); - - var datasetsMDCIDs = [] - var counter = 0; - - await datasetsMDCList.results.reduce( - (p, datasetMDC) => p.then( - () => (new Promise(resolve => { - setTimeout(async function () { - try { - counter++; - var datasetHDR = await Data.findOne({ datasetid: datasetMDC.id }); - datasetsMDCIDs.push({ datasetid: datasetMDC.id }); - - const metadataQuality = metadataQualityList.data.find(x => x.id === datasetMDC.id); - const dataUtility = dataUtilityList.data.find(x => x.id === datasetMDC.id); - const phenotypes = phenotypesList.data[datasetMDC.id] || []; - - const metadataSchemaCall = axios - .get( - metadataCatalogueLink + - "/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/schema.org/" + - datasetMDC.id, - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get metadata schema '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log('Unable to get metadata schema ' + err.message); - }); - - const dataClassCall = axios - .get( - metadataCatalogueLink + - "/api/dataModels/" + - datasetMDC.id + - "/dataClasses?max=300", - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get dataclass '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log('Unable to get dataclass ' + err.message); - }); - - const versionLinksCall = axios - .get( - metadataCatalogueLink + - "/api/catalogueItems/" + - datasetMDC.id + - "/semanticLinks", - { timeout: 10000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get version links '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log('Unable to get version links ' + err.message); - }); - - const datasetV2Call = axios - .get( - metadataCatalogueLink + - "/api/facets/" + - datasetMDC.id + - "/metadata?all=true", - { timeout: 5000 } - ) - .catch(err => { - Sentry.addBreadcrumb({ - category: 'Caching', - message: 'Unable to get dataset version 2 '+ err.message, - level: Sentry.Severity.Error - }); - Sentry.captureException(err); - //console.log('Unable to get dataset version 2 ' + err.message); - }); - - const [ - metadataSchema, - dataClass, - versionLinks, - datasetV2 - ] = await axios.all([ - metadataSchemaCall, - dataClassCall, - versionLinksCall, - datasetV2Call - ]); - - var technicaldetails = []; - - await dataClass.data.items.reduce( - (p, dataclassMDC) => p.then( - () => (new Promise(resolve => { - setTimeout(async function () { - const dataClassElementCall = axios.get(metadataCatalogueLink + '/api/dataModels/'+datasetMDC.id+'/dataClasses/'+dataclassMDC.id+'/dataElements?max=300', { timeout:5000 }).catch(err => { console.log('Unable to get dataclass element '+err.message) }); - const [dataClassElement] = await axios.all([dataClassElementCall]); - var dataClassElementArray = [] - - dataClassElement.data.items.forEach((element) => { - dataClassElementArray.push( - { - "id": element.id, - "domainType": element.domainType, - "label": element.label, - "description": element.description, - "dataType": { - "id": element.dataType.id, - "domainType": element.dataType.domainType, - "label": element.dataType.label - } - } - ); - }); - - technicaldetails.push({ - "id": dataclassMDC.id, - "domainType": dataclassMDC.domainType, - "label": dataclassMDC.label, - "description": dataclassMDC.description, - "elements": dataClassElementArray - }) - - - resolve(null) - }, 500) - })) - ), - Promise.resolve(null) - ); - - let datasetv2Object = populateV2datasetObject(datasetV2.data.items) - - if (datasetHDR) { - //Edit - if (!datasetHDR.pid) { - let uuid = uuidv4(); - let listOfVersions =[]; - datasetHDR.pid = uuid; - datasetHDR.datasetVersion = "0.0.1"; - - if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { - versionLinks.data.items.forEach((item) => { - if (!listOfVersions.find(x => x.id === item.source.id)) { - listOfVersions.push({"id":item.source.id, "version":item.source.documentationVersion}); - } - if (!listOfVersions.find(x => x.id === item.target.id)) { - listOfVersions.push({"id":item.target.id, "version":item.target.documentationVersion}); - } - }) - - listOfVersions.forEach(async (item) => { - if (item.id !== datasetMDC.id) { - await Data.findOneAndUpdate({ datasetid: item.id }, - { pid: uuid, datasetVersion: item.version } - ) - } - else { - datasetHDR.pid = uuid; - datasetHDR.datasetVersion = item.version; - } - }) - } - } - - let keywordArray = splitString(datasetMDC.keywords) - let physicalSampleAvailabilityArray = splitString(datasetMDC.physicalSampleAvailability) - let geographicCoverageArray = splitString(datasetMDC.geographicCoverage) - - await Data.findOneAndUpdate({ datasetid: datasetMDC.id }, - { - pid: datasetHDR.pid, - datasetVersion: datasetHDR.datasetVersion, - name: datasetMDC.title, - description: datasetMDC.description, - activeflag: 'active', - license: datasetMDC.license, - tags: { - features: keywordArray - }, - datasetfields: { - publisher: datasetMDC.publisher, - geographicCoverage: geographicCoverageArray, - physicalSampleAvailability: physicalSampleAvailabilityArray, - abstract: datasetMDC.abstract, - releaseDate: datasetMDC.releaseDate, - accessRequestDuration: datasetMDC.accessRequestDuration, - conformsTo: datasetMDC.conformsTo, - accessRights: datasetMDC.accessRights, - jurisdiction: datasetMDC.jurisdiction, - datasetStartDate: datasetMDC.datasetStartDate, - datasetEndDate: datasetMDC.datasetEndDate, - statisticalPopulation: datasetMDC.statisticalPopulation, - ageBand: datasetMDC.ageBand, - contactPoint: datasetMDC.contactPoint, - periodicity: datasetMDC.periodicity, - - metadataquality: metadataQuality ? metadataQuality : {}, - datautility: dataUtility ? dataUtility : {}, - metadataschema: metadataSchema && metadataSchema.data ? metadataSchema.data : {}, - technicaldetails: technicaldetails, - versionLinks: versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : [], - phenotypes - }, - datasetv2: datasetv2Object - }, - ); - } - else { - //Add - let uuid = uuidv4(); - let listOfVersions =[]; - let pid = uuid; - let datasetVersion = "0.0.1"; - - if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { - versionLinks.data.items.forEach((item) => { - if (!listOfVersions.find(x => x.id === item.source.id)) { - listOfVersions.push({"id":item.source.id, "version":item.source.documentationVersion}); - } - if (!listOfVersions.find(x => x.id === item.target.id)) { - listOfVersions.push({"id":item.target.id, "version":item.target.documentationVersion}); - } - }) - - for (const item of listOfVersions) { - if (item.id !== datasetMDC.id) { - - var existingDataset = await Data.findOne({ datasetid: item.id }); - if (existingDataset && existingDataset.pid) pid = existingDataset.pid; - else { - await Data.findOneAndUpdate({ datasetid: item.id }, - { pid: uuid, datasetVersion: item.version } - ) - } - } - else { - datasetVersion = item.version; - } - } - } - - var uniqueID=''; - while (uniqueID === '') { - uniqueID = parseInt(Math.random().toString().replace('0.', '')); - if (await Data.find({ id: uniqueID }).length === 0) { - uniqueID = ''; - } - } - - var keywordArray = splitString(datasetMDC.keywords) - var physicalSampleAvailabilityArray = splitString(datasetMDC.physicalSampleAvailability) - var geographicCoverageArray = splitString(datasetMDC.geographicCoverage) - - var data = new Data(); - data.pid = pid; - data.datasetVersion = datasetVersion; - data.id = uniqueID; - data.datasetid = datasetMDC.id; - data.type = 'dataset'; - data.activeflag = 'active'; - - data.name = datasetMDC.title; - data.description = datasetMDC.description; - data.license = datasetMDC.license; - data.tags.features = keywordArray; - data.datasetfields.publisher = datasetMDC.publisher; - data.datasetfields.geographicCoverage = geographicCoverageArray; - data.datasetfields.physicalSampleAvailability = physicalSampleAvailabilityArray; - data.datasetfields.abstract = datasetMDC.abstract; - data.datasetfields.releaseDate = datasetMDC.releaseDate; - data.datasetfields.accessRequestDuration = datasetMDC.accessRequestDuration; - data.datasetfields.conformsTo = datasetMDC.conformsTo; - data.datasetfields.accessRights = datasetMDC.accessRights; - data.datasetfields.jurisdiction = datasetMDC.jurisdiction; - data.datasetfields.datasetStartDate = datasetMDC.datasetStartDate; - data.datasetfields.datasetEndDate = datasetMDC.datasetEndDate; - data.datasetfields.statisticalPopulation = datasetMDC.statisticalPopulation; - data.datasetfields.ageBand = datasetMDC.ageBand; - data.datasetfields.contactPoint = datasetMDC.contactPoint; - data.datasetfields.periodicity = datasetMDC.periodicity; - - data.datasetfields.metadataquality = metadataQuality ? metadataQuality : {}; - data.datasetfields.datautility = dataUtility ? dataUtility : {}; - data.datasetfields.metadataschema = metadataSchema && metadataSchema.data ? metadataSchema.data : {}; - data.datasetfields.technicaldetails = technicaldetails; - data.datasetfields.versionLinks = versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : []; - data.datasetfields.phenotypes = phenotypes; - data.datasetv2 = datasetv2Object; - await data.save(); - } - console.log(`Finished ${counter} of ${datasetsMDCCount} datasets (${datasetMDC.id})`); - resolve(null) - } - catch (err) { - Sentry.addBreadcrumb({ - category: 'Caching', - message: `Failed to add ${datasetMDC.id} to the DB with the error of ${err.message}`, - level: Sentry.Severity.Fatal - }); - Sentry.captureException(err); - //console.log(`Failed to add ${datasetMDC.id} to the DB with the error of ${err.message}`); //Uncomment for local testing - } - }, 500) - })) - ), - Promise.resolve(null) - ); - - var datasetsHDRIDs = await Data.aggregate([{ $match: { type: 'dataset' } },{ $project: { "_id": 0, "datasetid": 1 } }]); - - let datasetsNotFound = datasetsHDRIDs.filter(o1 => !datasetsMDCIDs.some(o2 => o1.datasetid === o2.datasetid)); - - await Promise.all( datasetsNotFound.map( async (dataset) => { - //Archive - await Data.findOneAndUpdate({ datasetid: dataset.datasetid }, - { - activeflag: 'archive', - } - ); - })) - - saveUptime(); - - console.log("Update Completed at "+Date()) - return; -}; - -function populateV2datasetObject (v2Data) { - let datasetV2List = v2Data.filter((item) => item.namespace === 'org.healthdatagateway') - - let datasetv2Object = {}; - if (datasetV2List.length > 0) { - datasetv2Object = { - identifier: datasetV2List.find(x => x.key === 'properties/identifier') ? datasetV2List.find(x => x.key === 'properties/identifier').value : '', - version: datasetV2List.find(x => x.key === 'properties/version') ? datasetV2List.find(x => x.key === 'properties/version').value : '', - issued: datasetV2List.find(x => x.key === 'properties/issued') ? datasetV2List.find(x => x.key === 'properties/issued').value : '', - modified: datasetV2List.find(x => x.key === 'properties/modified') ? datasetV2List.find(x => x.key === 'properties/modified').value : '', - revisions: [], - summary: { - title: datasetV2List.find(x => x.key === 'properties/summary/title') ? datasetV2List.find(x => x.key === 'properties/summary/title').value : '', - abstract: datasetV2List.find(x => x.key === 'properties/summary/abstract') ? datasetV2List.find(x => x.key === 'properties/summary/abstract').value : '', - publisher: { - identifier: datasetV2List.find(x => x.key === 'properties/summary/publisher/identifier') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/identifier').value : '', - name: datasetV2List.find(x => x.key === 'properties/summary/publisher/name') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/name').value : '', - logo: datasetV2List.find(x => x.key === 'properties/summary/publisher/logo') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/logo').value : '', - description: datasetV2List.find(x => x.key === 'properties/summary/publisher/description') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/description').value : '', - contactPoint: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/publisher/contactPoint') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/contactPoint').value : []), - memberOf: datasetV2List.find(x => x.key === 'properties/summary/publisher/memberOf') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/memberOf').value : '', - accessRights: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRights') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRights').value : []), - deliveryLeadTime: datasetV2List.find(x => x.key === 'properties/summary/publisher/deliveryLeadTime') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/deliveryLeadTime').value : '', - accessService: datasetV2List.find(x => x.key === 'properties/summary/publisher/accessService') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessService').value : '', - accessRequestCost: datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRequestCost') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRequestCost').value : '', - dataUseLimitation: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseLimitation') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseLimitation').value : []), - dataUseRequirements: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseRequirements') ? datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseRequirements').value : []) - }, - contactPoint: datasetV2List.find(x => x.key === 'properties/summary/contactPoint') ? datasetV2List.find(x => x.key === 'properties/summary/contactPoint').value : '', - keywords: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/keywords') ? datasetV2List.find(x => x.key === 'properties/summary/keywords').value : []), - alternateIdentifiers: checkForArray(datasetV2List.find(x => x.key === 'properties/summary/alternateIdentifiers') ? datasetV2List.find(x => x.key === 'properties/summary/alternateIdentifiers').value : []), - doiName: datasetV2List.find(x => x.key === 'properties/summary/doiName') ? datasetV2List.find(x => x.key === 'properties/summary/doiName').value : '' - }, - documentation: { - description: datasetV2List.find(x => x.key === 'properties/documentation/description') ? datasetV2List.find(x => x.key === 'properties/documentation/description').value : '', - associatedMedia: checkForArray(datasetV2List.find(x => x.key === 'properties/documentation/associatedMedia') ? datasetV2List.find(x => x.key === 'properties/documentation/associatedMedia').value : []), - isPartOf: checkForArray(datasetV2List.find(x => x.key === 'properties/documentation/isPartOf') ? datasetV2List.find(x => x.key === 'properties/documentation/isPartOf').value : []) - }, - coverage: { - spatial: datasetV2List.find(x => x.key === 'properties/coverage/spatial') ? datasetV2List.find(x => x.key === 'properties/coverage/spatial').value : '', - typicalAgeRange: datasetV2List.find(x => x.key === 'properties/coverage/typicalAgeRange') ? datasetV2List.find(x => x.key === 'properties/coverage/typicalAgeRange').value : '', - physicalSampleAvailability: checkForArray(datasetV2List.find(x => x.key === 'properties/coverage/physicalSampleAvailability') ? datasetV2List.find(x => x.key === 'properties/coverage/physicalSampleAvailability').value : []), - followup: datasetV2List.find(x => x.key === 'properties/coverage/followup') ? datasetV2List.find(x => x.key === 'properties/coverage/followup').value : '', - pathway: datasetV2List.find(x => x.key === 'properties/coverage/pathway') ? datasetV2List.find(x => x.key === 'properties/coverage/pathway').value : '' - }, - provenance: { - origin : { - purpose: checkForArray(datasetV2List.find(x => x.key === 'properties/provenance/origin/purpose') ? datasetV2List.find(x => x.key === 'properties/provenance/origin/purpose').value : []), - source: checkForArray(datasetV2List.find(x => x.key === 'properties/provenance/origin/source') ? datasetV2List.find(x => x.key === 'properties/provenance/origin/source').value : []), - collectionSituation: checkForArray(datasetV2List.find(x => x.key === 'properties/provenance/origin/collectionSituation') ? datasetV2List.find(x => x.key === 'properties/provenance/origin/collectionSituation').value : []) - }, - temporal:{ - accrualPeriodicity: datasetV2List.find(x => x.key === 'properties/provenance/temporal/accrualPeriodicity') ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/accrualPeriodicity').value : '', - distributionReleaseDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/distributionReleaseDate') ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/distributionReleaseDate').value : '', - startDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/startDate') ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/startDate').value : '', - endDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/endDate') ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/endDate').value : '', - timeLag: datasetV2List.find(x => x.key === 'properties/provenance/temporal/timeLag') ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/timeLag').value : '' - } - }, - accessibility : { - usage: { - dataUseLimitation: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseLimitation') ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseLimitation').value : []), - dataUseRequirements: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseRequirements') ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseRequirements').value : []), - resourceCreator: datasetV2List.find(x => x.key === 'properties/accessibility/usage/resourceCreator') ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/resourceCreator').value : '', - investigations: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/usage/investigations') ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/investigations').value : []), - isReferencedBy: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/usage/isReferencedBy') ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/isReferencedBy').value : []) - }, - access: { - accessRights: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRights') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRights').value : []), - accessService: datasetV2List.find(x => x.key === 'properties/accessibility/access/accessService') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessService').value : '', - accessRequestCost: datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRequestCost') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRequestCost').value : '', - deliveryLeadTime: datasetV2List.find(x => x.key === 'properties/accessibility/access/deliveryLeadTime') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/deliveryLeadTime').value : '', - jurisdiction: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/access/jurisdiction') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/jurisdiction').value : []), - dataProcessor: datasetV2List.find(x => x.key === 'properties/accessibility/access/dataProcessor') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/dataProcessor').value : '', - dataController: datasetV2List.find(x => x.key === 'properties/accessibility/access/dataController') ? datasetV2List.find(x => x.key === 'properties/accessibility/access/dataController').value : '' - }, - formatAndStandards: { - vocabularyEncodingScheme: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/vocabularyEncodingScheme') ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/vocabularyEncodingScheme').value : []), - conformsTo: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/conformsTo') ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/conformsTo').value : []), - language: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/language') ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/language').value : []), - format: checkForArray(datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/format') ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/format').value : []) - } - - }, - enrichmentAndLinkage: { - qualifiedRelation: checkForArray(datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/qualifiedRelation') ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/qualifiedRelation').value : []), - derivation: checkForArray(datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/derivation') ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/derivation').value : []), - tools: checkForArray(datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/tools') ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/tools').value : []) - }, - observations: [] - } - } - - return datasetv2Object; + console.log('Starting run at ' + Date()); + var metadataCatalogueLink = process.env.metadataURL || 'https://metadata-catalogue.org/hdruk'; + + var datasetsMDCCount = await new Promise(function (resolve, reject) { + axios + .post( + metadataCatalogueLink + + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/customSearch?searchTerm=&domainType=DataModel&limit=1' + ) + .then(function (response) { + resolve(response.data.count); + }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'The caching run has failed because it was unable to get a count from the MDC', + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(err); + reject(err); + }); + }).catch(() => { + return 'Update failed'; + }); + + if (datasetsMDCCount === 'Update failed') return; + + //Compare counts from HDR and MDC, if greater drop of 10%+ then stop process and email support queue + var datasetsHDRCount = await Data.countDocuments({ type: 'dataset', activeflag: 'active' }); + + if ((datasetsMDCCount / datasetsHDRCount) * 100 < 90 && !override) { + Sentry.addBreadcrumb({ + category: 'Caching', + message: `The caching run has failed because the counts from the MDC (${datasetsMDCCount}) where ${ + 100 - (datasetsMDCCount / datasetsHDRCount) * 100 + }% lower than the number stored in the DB (${datasetsHDRCount})`, + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(); + return; + } + + //datasetsMDCCount = 10; //For testing to limit the number brought down + + var datasetsMDCList = await new Promise(function (resolve, reject) { + axios + .post( + metadataCatalogueLink + + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/customSearch?searchTerm=&domainType=DataModel&limit=' + + datasetsMDCCount + ) + .then(function (response) { + resolve(response.data); + }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'The caching run has failed because it was unable to pull the datasets from the MDC', + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(err); + reject(err); + }); + }).catch(() => { + return 'Update failed'; + }); + + if (datasetsMDCList === 'Update failed') return; + + const metadataQualityList = await axios + .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/metadata_quality.json', { timeout: 10000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata quality value ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log("Unable to get metadata quality value " + err.message); //Uncomment for local testing + }); + + const phenotypesList = await axios + .get('https://raw.githubusercontent.com/spiros/hdr-caliber-phenome-portal/master/_data/dataset2phenotypes.json', { timeout: 10000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata quality value ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log("Unable to get metadata quality value " + err.message); //Uncomment for local testing + }); + + const dataUtilityList = await axios + .get('https://raw.githubusercontent.com/HDRUK/datasets/master/reports/data_utility.json', { timeout: 10000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get data utility ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log("Unable to get data utility " + err.message); //Uncomment for local testing + }); + + var datasetsMDCIDs = []; + var counter = 0; + + await datasetsMDCList.results.reduce( + (p, datasetMDC) => + p.then( + () => + new Promise(resolve => { + setTimeout(async function () { + try { + counter++; + var datasetHDR = await Data.findOne({ datasetid: datasetMDC.id }); + datasetsMDCIDs.push({ datasetid: datasetMDC.id }); + + const metadataQuality = metadataQualityList.data.find(x => x.id === datasetMDC.id); + const dataUtility = dataUtilityList.data.find(x => x.id === datasetMDC.id); + const phenotypes = phenotypesList.data[datasetMDC.id] || []; + + const metadataSchemaCall = axios + .get(metadataCatalogueLink + '/api/profiles/uk.ac.hdrukgateway/HdrUkProfilePluginService/schema.org/' + datasetMDC.id, { + timeout: 10000, + }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get metadata schema ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log('Unable to get metadata schema ' + err.message); + }); + + const dataClassCall = axios + .get(metadataCatalogueLink + '/api/dataModels/' + datasetMDC.id + '/dataClasses?max=300', { timeout: 10000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get dataclass ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log('Unable to get dataclass ' + err.message); + }); + + const versionLinksCall = axios + .get(metadataCatalogueLink + '/api/catalogueItems/' + datasetMDC.id + '/semanticLinks', { timeout: 10000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get version links ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log('Unable to get version links ' + err.message); + }); + + const datasetV2Call = axios + .get(metadataCatalogueLink + '/api/facets/' + datasetMDC.id + '/metadata?all=true', { timeout: 5000 }) + .catch(err => { + Sentry.addBreadcrumb({ + category: 'Caching', + message: 'Unable to get dataset version 2 ' + err.message, + level: Sentry.Severity.Error, + }); + Sentry.captureException(err); + //console.log('Unable to get dataset version 2 ' + err.message); + }); + + const [metadataSchema, dataClass, versionLinks, datasetV2] = await axios.all([ + metadataSchemaCall, + dataClassCall, + versionLinksCall, + datasetV2Call, + ]); + + var technicaldetails = []; + + await dataClass.data.items.reduce( + (p, dataclassMDC) => + p.then( + () => + new Promise(resolve => { + setTimeout(async function () { + const dataClassElementCall = axios + .get( + metadataCatalogueLink + + '/api/dataModels/' + + datasetMDC.id + + '/dataClasses/' + + dataclassMDC.id + + '/dataElements?max=300', + { timeout: 5000 } + ) + .catch(err => { + console.log('Unable to get dataclass element ' + err.message); + }); + const [dataClassElement] = await axios.all([dataClassElementCall]); + var dataClassElementArray = []; + + dataClassElement.data.items.forEach(element => { + dataClassElementArray.push({ + id: element.id, + domainType: element.domainType, + label: element.label, + description: element.description, + dataType: { + id: element.dataType.id, + domainType: element.dataType.domainType, + label: element.dataType.label, + }, + }); + }); + + technicaldetails.push({ + id: dataclassMDC.id, + domainType: dataclassMDC.domainType, + label: dataclassMDC.label, + description: dataclassMDC.description, + elements: dataClassElementArray, + }); + + resolve(null); + }, 500); + }) + ), + Promise.resolve(null) + ); + + let datasetv2Object = populateV2datasetObject(datasetV2.data.items); + + if (datasetHDR) { + //Edit + if (!datasetHDR.pid) { + let uuid = uuidv4(); + let listOfVersions = []; + datasetHDR.pid = uuid; + datasetHDR.datasetVersion = '0.0.1'; + + if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { + versionLinks.data.items.forEach(item => { + if (!listOfVersions.find(x => x.id === item.source.id)) { + listOfVersions.push({ id: item.source.id, version: item.source.documentationVersion }); + } + if (!listOfVersions.find(x => x.id === item.target.id)) { + listOfVersions.push({ id: item.target.id, version: item.target.documentationVersion }); + } + }); + + listOfVersions.forEach(async item => { + if (item.id !== datasetMDC.id) { + await Data.findOneAndUpdate({ datasetid: item.id }, { pid: uuid, datasetVersion: item.version }); + } else { + datasetHDR.pid = uuid; + datasetHDR.datasetVersion = item.version; + } + }); + } + } + + let keywordArray = splitString(datasetMDC.keywords); + let physicalSampleAvailabilityArray = splitString(datasetMDC.physicalSampleAvailability); + let geographicCoverageArray = splitString(datasetMDC.geographicCoverage); + + await Data.findOneAndUpdate( + { datasetid: datasetMDC.id }, + { + pid: datasetHDR.pid, + datasetVersion: datasetHDR.datasetVersion, + name: datasetMDC.title, + description: datasetMDC.description, + activeflag: 'active', + license: datasetMDC.license, + tags: { + features: keywordArray, + }, + datasetfields: { + publisher: datasetMDC.publisher, + geographicCoverage: geographicCoverageArray, + physicalSampleAvailability: physicalSampleAvailabilityArray, + abstract: datasetMDC.abstract, + releaseDate: datasetMDC.releaseDate, + accessRequestDuration: datasetMDC.accessRequestDuration, + conformsTo: datasetMDC.conformsTo, + accessRights: datasetMDC.accessRights, + jurisdiction: datasetMDC.jurisdiction, + datasetStartDate: datasetMDC.datasetStartDate, + datasetEndDate: datasetMDC.datasetEndDate, + statisticalPopulation: datasetMDC.statisticalPopulation, + ageBand: datasetMDC.ageBand, + contactPoint: datasetMDC.contactPoint, + periodicity: datasetMDC.periodicity, + + metadataquality: metadataQuality ? metadataQuality : {}, + datautility: dataUtility ? dataUtility : {}, + metadataschema: metadataSchema && metadataSchema.data ? metadataSchema.data : {}, + technicaldetails: technicaldetails, + versionLinks: versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : [], + phenotypes, + }, + datasetv2: datasetv2Object, + } + ); + } else { + //Add + let uuid = uuidv4(); + let listOfVersions = []; + let pid = uuid; + let datasetVersion = '0.0.1'; + + if (versionLinks && versionLinks.data && versionLinks.data.items && versionLinks.data.items.length > 0) { + versionLinks.data.items.forEach(item => { + if (!listOfVersions.find(x => x.id === item.source.id)) { + listOfVersions.push({ id: item.source.id, version: item.source.documentationVersion }); + } + if (!listOfVersions.find(x => x.id === item.target.id)) { + listOfVersions.push({ id: item.target.id, version: item.target.documentationVersion }); + } + }); + + for (const item of listOfVersions) { + if (item.id !== datasetMDC.id) { + var existingDataset = await Data.findOne({ datasetid: item.id }); + if (existingDataset && existingDataset.pid) pid = existingDataset.pid; + else { + await Data.findOneAndUpdate({ datasetid: item.id }, { pid: uuid, datasetVersion: item.version }); + } + } else { + datasetVersion = item.version; + } + } + } + + var uniqueID = ''; + while (uniqueID === '') { + uniqueID = parseInt(Math.random().toString().replace('0.', '')); + if ((await Data.find({ id: uniqueID }).length) === 0) { + uniqueID = ''; + } + } + + var keywordArray = splitString(datasetMDC.keywords); + var physicalSampleAvailabilityArray = splitString(datasetMDC.physicalSampleAvailability); + var geographicCoverageArray = splitString(datasetMDC.geographicCoverage); + + var data = new Data(); + data.pid = pid; + data.datasetVersion = datasetVersion; + data.id = uniqueID; + data.datasetid = datasetMDC.id; + data.type = 'dataset'; + data.activeflag = 'active'; + + data.name = datasetMDC.title; + data.description = datasetMDC.description; + data.license = datasetMDC.license; + data.tags.features = keywordArray; + data.datasetfields.publisher = datasetMDC.publisher; + data.datasetfields.geographicCoverage = geographicCoverageArray; + data.datasetfields.physicalSampleAvailability = physicalSampleAvailabilityArray; + data.datasetfields.abstract = datasetMDC.abstract; + data.datasetfields.releaseDate = datasetMDC.releaseDate; + data.datasetfields.accessRequestDuration = datasetMDC.accessRequestDuration; + data.datasetfields.conformsTo = datasetMDC.conformsTo; + data.datasetfields.accessRights = datasetMDC.accessRights; + data.datasetfields.jurisdiction = datasetMDC.jurisdiction; + data.datasetfields.datasetStartDate = datasetMDC.datasetStartDate; + data.datasetfields.datasetEndDate = datasetMDC.datasetEndDate; + data.datasetfields.statisticalPopulation = datasetMDC.statisticalPopulation; + data.datasetfields.ageBand = datasetMDC.ageBand; + data.datasetfields.contactPoint = datasetMDC.contactPoint; + data.datasetfields.periodicity = datasetMDC.periodicity; + + data.datasetfields.metadataquality = metadataQuality ? metadataQuality : {}; + data.datasetfields.datautility = dataUtility ? dataUtility : {}; + data.datasetfields.metadataschema = metadataSchema && metadataSchema.data ? metadataSchema.data : {}; + data.datasetfields.technicaldetails = technicaldetails; + data.datasetfields.versionLinks = + versionLinks && versionLinks.data && versionLinks.data.items ? versionLinks.data.items : []; + data.datasetfields.phenotypes = phenotypes; + data.datasetv2 = datasetv2Object; + await data.save(); + } + console.log(`Finished ${counter} of ${datasetsMDCCount} datasets (${datasetMDC.id})`); + resolve(null); + } catch (err) { + Sentry.addBreadcrumb({ + category: 'Caching', + message: `Failed to add ${datasetMDC.id} to the DB with the error of ${err.message}`, + level: Sentry.Severity.Fatal, + }); + Sentry.captureException(err); + //console.log(`Failed to add ${datasetMDC.id} to the DB with the error of ${err.message}`); //Uncomment for local testing + } + }, 500); + }) + ), + Promise.resolve(null) + ); + + var datasetsHDRIDs = await Data.aggregate([{ $match: { type: 'dataset' } }, { $project: { _id: 0, datasetid: 1 } }]); + + let datasetsNotFound = datasetsHDRIDs.filter(o1 => !datasetsMDCIDs.some(o2 => o1.datasetid === o2.datasetid)); + + await Promise.all( + datasetsNotFound.map(async dataset => { + //Archive + await Data.findOneAndUpdate( + { datasetid: dataset.datasetid }, + { + activeflag: 'archive', + } + ); + }) + ); + + saveUptime(); + + console.log('Update Completed at ' + Date()); + return; +} + +function populateV2datasetObject(v2Data) { + let datasetV2List = v2Data.filter(item => item.namespace === 'org.healthdatagateway'); + + let datasetv2Object = {}; + if (datasetV2List.length > 0) { + datasetv2Object = { + identifier: datasetV2List.find(x => x.key === 'properties/identifier') + ? datasetV2List.find(x => x.key === 'properties/identifier').value + : '', + version: datasetV2List.find(x => x.key === 'properties/version') ? datasetV2List.find(x => x.key === 'properties/version').value : '', + issued: datasetV2List.find(x => x.key === 'properties/issued') ? datasetV2List.find(x => x.key === 'properties/issued').value : '', + modified: datasetV2List.find(x => x.key === 'properties/modified') + ? datasetV2List.find(x => x.key === 'properties/modified').value + : '', + revisions: [], + summary: { + title: datasetV2List.find(x => x.key === 'properties/summary/title') + ? datasetV2List.find(x => x.key === 'properties/summary/title').value + : '', + abstract: datasetV2List.find(x => x.key === 'properties/summary/abstract') + ? datasetV2List.find(x => x.key === 'properties/summary/abstract').value + : '', + publisher: { + identifier: datasetV2List.find(x => x.key === 'properties/summary/publisher/identifier') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/identifier').value + : '', + name: datasetV2List.find(x => x.key === 'properties/summary/publisher/name') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/name').value + : '', + logo: datasetV2List.find(x => x.key === 'properties/summary/publisher/logo') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/logo').value + : '', + description: datasetV2List.find(x => x.key === 'properties/summary/publisher/description') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/description').value + : '', + contactPoint: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/publisher/contactPoint') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/contactPoint').value + : [] + ), + memberOf: datasetV2List.find(x => x.key === 'properties/summary/publisher/memberOf') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/memberOf').value + : '', + accessRights: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRights') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRights').value + : [] + ), + deliveryLeadTime: datasetV2List.find(x => x.key === 'properties/summary/publisher/deliveryLeadTime') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/deliveryLeadTime').value + : '', + accessService: datasetV2List.find(x => x.key === 'properties/summary/publisher/accessService') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessService').value + : '', + accessRequestCost: datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRequestCost') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/accessRequestCost').value + : '', + dataUseLimitation: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseLimitation') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseLimitation').value + : [] + ), + dataUseRequirements: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseRequirements') + ? datasetV2List.find(x => x.key === 'properties/summary/publisher/dataUseRequirements').value + : [] + ), + }, + contactPoint: datasetV2List.find(x => x.key === 'properties/summary/contactPoint') + ? datasetV2List.find(x => x.key === 'properties/summary/contactPoint').value + : '', + keywords: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/keywords') + ? datasetV2List.find(x => x.key === 'properties/summary/keywords').value + : [] + ), + alternateIdentifiers: checkForArray( + datasetV2List.find(x => x.key === 'properties/summary/alternateIdentifiers') + ? datasetV2List.find(x => x.key === 'properties/summary/alternateIdentifiers').value + : [] + ), + doiName: datasetV2List.find(x => x.key === 'properties/summary/doiName') + ? datasetV2List.find(x => x.key === 'properties/summary/doiName').value + : '', + }, + documentation: { + description: datasetV2List.find(x => x.key === 'properties/documentation/description') + ? datasetV2List.find(x => x.key === 'properties/documentation/description').value + : '', + associatedMedia: checkForArray( + datasetV2List.find(x => x.key === 'properties/documentation/associatedMedia') + ? datasetV2List.find(x => x.key === 'properties/documentation/associatedMedia').value + : [] + ), + isPartOf: checkForArray( + datasetV2List.find(x => x.key === 'properties/documentation/isPartOf') + ? datasetV2List.find(x => x.key === 'properties/documentation/isPartOf').value + : [] + ), + }, + coverage: { + spatial: datasetV2List.find(x => x.key === 'properties/coverage/spatial') + ? datasetV2List.find(x => x.key === 'properties/coverage/spatial').value + : '', + typicalAgeRange: datasetV2List.find(x => x.key === 'properties/coverage/typicalAgeRange') + ? datasetV2List.find(x => x.key === 'properties/coverage/typicalAgeRange').value + : '', + physicalSampleAvailability: checkForArray( + datasetV2List.find(x => x.key === 'properties/coverage/physicalSampleAvailability') + ? datasetV2List.find(x => x.key === 'properties/coverage/physicalSampleAvailability').value + : [] + ), + followup: datasetV2List.find(x => x.key === 'properties/coverage/followup') + ? datasetV2List.find(x => x.key === 'properties/coverage/followup').value + : '', + pathway: datasetV2List.find(x => x.key === 'properties/coverage/pathway') + ? datasetV2List.find(x => x.key === 'properties/coverage/pathway').value + : '', + }, + provenance: { + origin: { + purpose: checkForArray( + datasetV2List.find(x => x.key === 'properties/provenance/origin/purpose') + ? datasetV2List.find(x => x.key === 'properties/provenance/origin/purpose').value + : [] + ), + source: checkForArray( + datasetV2List.find(x => x.key === 'properties/provenance/origin/source') + ? datasetV2List.find(x => x.key === 'properties/provenance/origin/source').value + : [] + ), + collectionSituation: checkForArray( + datasetV2List.find(x => x.key === 'properties/provenance/origin/collectionSituation') + ? datasetV2List.find(x => x.key === 'properties/provenance/origin/collectionSituation').value + : [] + ), + }, + temporal: { + accrualPeriodicity: datasetV2List.find(x => x.key === 'properties/provenance/temporal/accrualPeriodicity') + ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/accrualPeriodicity').value + : '', + distributionReleaseDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/distributionReleaseDate') + ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/distributionReleaseDate').value + : '', + startDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/startDate') + ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/startDate').value + : '', + endDate: datasetV2List.find(x => x.key === 'properties/provenance/temporal/endDate') + ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/endDate').value + : '', + timeLag: datasetV2List.find(x => x.key === 'properties/provenance/temporal/timeLag') + ? datasetV2List.find(x => x.key === 'properties/provenance/temporal/timeLag').value + : '', + }, + }, + accessibility: { + usage: { + dataUseLimitation: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseLimitation') + ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseLimitation').value + : [] + ), + dataUseRequirements: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseRequirements') + ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/dataUseRequirements').value + : [] + ), + resourceCreator: datasetV2List.find(x => x.key === 'properties/accessibility/usage/resourceCreator') + ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/resourceCreator').value + : '', + investigations: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/usage/investigations') + ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/investigations').value + : [] + ), + isReferencedBy: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/usage/isReferencedBy') + ? datasetV2List.find(x => x.key === 'properties/accessibility/usage/isReferencedBy').value + : [] + ), + }, + access: { + accessRights: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRights') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRights').value + : [] + ), + accessService: datasetV2List.find(x => x.key === 'properties/accessibility/access/accessService') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessService').value + : '', + accessRequestCost: datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRequestCost') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/accessRequestCost').value + : '', + deliveryLeadTime: datasetV2List.find(x => x.key === 'properties/accessibility/access/deliveryLeadTime') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/deliveryLeadTime').value + : '', + jurisdiction: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/access/jurisdiction') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/jurisdiction').value + : [] + ), + dataProcessor: datasetV2List.find(x => x.key === 'properties/accessibility/access/dataProcessor') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/dataProcessor').value + : '', + dataController: datasetV2List.find(x => x.key === 'properties/accessibility/access/dataController') + ? datasetV2List.find(x => x.key === 'properties/accessibility/access/dataController').value + : '', + }, + formatAndStandards: { + vocabularyEncodingScheme: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/vocabularyEncodingScheme') + ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/vocabularyEncodingScheme').value + : [] + ), + conformsTo: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/conformsTo') + ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/conformsTo').value + : [] + ), + language: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/language') + ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/language').value + : [] + ), + format: checkForArray( + datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/format') + ? datasetV2List.find(x => x.key === 'properties/accessibility/formatAndStandards/format').value + : [] + ), + }, + }, + enrichmentAndLinkage: { + qualifiedRelation: checkForArray( + datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/qualifiedRelation') + ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/qualifiedRelation').value + : [] + ), + derivation: checkForArray( + datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/derivation') + ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/derivation').value + : [] + ), + tools: checkForArray( + datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/tools') + ? datasetV2List.find(x => x.key === 'properties/enrichmentAndLinkage/tools').value + : [] + ), + }, + observations: [], + }; + } + + return datasetv2Object; } function checkForArray(value) { - if (typeof value !== 'string') return value; - try { - const type = Object.prototype.toString.call(JSON.parse(value)); - if (type === '[object Object]' || type === '[object Array]') return JSON.parse(value) - } catch (err) { - return value; - } + if (typeof value !== 'string') return value; + try { + const type = Object.prototype.toString.call(JSON.parse(value)); + if (type === '[object Object]' || type === '[object Array]') return JSON.parse(value); + } catch (err) { + return value; + } } -function splitString (array) { - var returnArray = []; - if (array !== null && array !== '' && array !== 'undefined' && array !== undefined) { - if (array.indexOf(',') === -1) { - returnArray.push(array.trim()); - } - else { - array.split(',').forEach((term) => { - returnArray.push(term.trim()); - }); - } - } - return returnArray; +function splitString(array) { + var returnArray = []; + if (array !== null && array !== '' && array !== 'undefined' && array !== undefined) { + if (array.indexOf(',') === -1) { + returnArray.push(array.trim()); + } else { + array.split(',').forEach(term => { + returnArray.push(term.trim()); + }); + } + } + return returnArray; } async function saveUptime() { - const monitoring = require('@google-cloud/monitoring'); - const projectId = 'hdruk-gateway'; - const client = new monitoring.MetricServiceClient(); - - var selectedMonthStart = new Date(); - selectedMonthStart.setMonth(selectedMonthStart.getMonth()-1); - selectedMonthStart.setDate(1); - selectedMonthStart.setHours(0,0,0,0); - - var selectedMonthEnd = new Date(); - selectedMonthEnd.setDate(0); - selectedMonthEnd.setHours(23,59,59,999); - - const request = { - name: client.projectPath(projectId), - filter: 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', - - interval: { - startTime: { - seconds: selectedMonthStart.getTime() / 1000, - }, - endTime: { - seconds: selectedMonthEnd.getTime() / 1000, - }, - }, - aggregation: { - alignmentPeriod: { - seconds: '86400s', - }, - crossSeriesReducer: 'REDUCE_NONE', - groupByFields: [ - 'metric.label."checker_location"', - 'resource.label."instance_id"' - ], - perSeriesAligner: 'ALIGN_FRACTION_TRUE', - }, - }; - - // Writes time series data - const [timeSeries] = await client.listTimeSeries(request); - var dailyUptime = []; - var averageUptime; - - timeSeries.forEach(data => { - - data.points.forEach(data => { - dailyUptime.push(data.value.doubleValue) - }) - - averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; - }); - - var metricsData = new MetricsData(); - metricsData.uptime = averageUptime; - await metricsData.save(); -} \ No newline at end of file + const monitoring = require('@google-cloud/monitoring'); + const projectId = 'hdruk-gateway'; + const client = new monitoring.MetricServiceClient(); + + var selectedMonthStart = new Date(); + selectedMonthStart.setMonth(selectedMonthStart.getMonth() - 1); + selectedMonthStart.setDate(1); + selectedMonthStart.setHours(0, 0, 0, 0); + + var selectedMonthEnd = new Date(); + selectedMonthEnd.setDate(0); + selectedMonthEnd.setHours(23, 59, 59, 999); + + const request = { + name: client.projectPath(projectId), + filter: + 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', + + interval: { + startTime: { + seconds: selectedMonthStart.getTime() / 1000, + }, + endTime: { + seconds: selectedMonthEnd.getTime() / 1000, + }, + }, + aggregation: { + alignmentPeriod: { + seconds: '86400s', + }, + crossSeriesReducer: 'REDUCE_NONE', + groupByFields: ['metric.label."checker_location"', 'resource.label."instance_id"'], + perSeriesAligner: 'ALIGN_FRACTION_TRUE', + }, + }; + + // Writes time series data + const [timeSeries] = await client.listTimeSeries(request); + var dailyUptime = []; + var averageUptime; + + timeSeries.forEach(data => { + data.points.forEach(data => { + dailyUptime.push(data.value.doubleValue); + }); + + averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; + }); + + var metricsData = new MetricsData(); + metricsData.uptime = averageUptime; + await metricsData.save(); +} diff --git a/src/resources/discourse/discourse.route.js b/src/resources/discourse/discourse.route.js index 7c22a8a0..087cb278 100644 --- a/src/resources/discourse/discourse.route.js +++ b/src/resources/discourse/discourse.route.js @@ -1,10 +1,16 @@ import express from 'express'; -import { createDiscourseTopic, getDiscourseTopic, deleteDiscoursePost, createDiscoursePost, updateDiscoursePost } from './discourse.service'; +import { + createDiscourseTopic, + getDiscourseTopic, + deleteDiscoursePost, + createDiscoursePost, + updateDiscoursePost, +} from './discourse.service'; import { Data } from '../tool/data.model'; import { Collections } from '../collections/collections.model'; -import { ROLES } from '../user/user.roles' -import passport from "passport"; -import { utils } from "../auth"; +import { ROLES } from '../user/user.roles'; +import passport from 'passport'; +import { utils } from '../auth'; import _ from 'lodash'; const inputSanitizer = require('../utilities/inputSanitizer'); @@ -15,223 +21,206 @@ const router = express.Router(); * @description This route retrieves all the data for a Discourse topic in the context of the system * @return This routes returns an object containing 'Topic' data - see Discourse docs */ -router.get( - '/topic/:topicId', - async (req, res) => { - try { - // 1. Pull topic Id from endpoint route value - const topicId = parseInt(req.params.topicId); - // 2. Get the Discourse topic using the Id - await getDiscourseTopic(topicId).then((topic) => { - // 3. If no topic could be found, return 404 - if(!topic) { - return res.status(404).json({ success: false, error: 'Topic not found.' }); - } - // 4. Return topic data - return res.json({success: true, topic }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error retrieving the topic, please try again later...' }); - } - } -); +router.get('/topic/:topicId', async (req, res) => { + try { + // 1. Pull topic Id from endpoint route value + const topicId = parseInt(req.params.topicId); + // 2. Get the Discourse topic using the Id + await getDiscourseTopic(topicId) + .then(topic => { + // 3. If no topic could be found, return 404 + if (!topic) { + return res.status(404).json({ success: false, error: 'Topic not found.' }); + } + // 4. Return topic data + return res.json({ success: true, topic }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error retrieving the topic, please try again later...' }); + } +}); /** * @route /api/v1/discourse/user/topic/:topicId * @description This route retrieves all the data for a Discourse topic in the context of a specific user * @return This routes returns an object containing 'Topic' data - see Discourse docs */ -router.get( - '/user/topic/:topicId', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Pull topic Id from endpoint route value - const topicId = parseInt(req.params.topicId); - // 2. Get the Discourse topic using the Id - await getDiscourseTopic(topicId, req.user).then((topic) => { - // 3. If no topic could be found, return 404 - if(!topic) { - return res.status(404).json({ success: false, error: 'Topic not found.' }); - } - // 4. Return topic data - return res.json({success: true, topic }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error retrieving the topic, please try again later...' }); - } - } -); +router.get('/user/topic/:topicId', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Pull topic Id from endpoint route value + const topicId = parseInt(req.params.topicId); + // 2. Get the Discourse topic using the Id + await getDiscourseTopic(topicId, req.user) + .then(topic => { + // 3. If no topic could be found, return 404 + if (!topic) { + return res.status(404).json({ success: false, error: 'Topic not found.' }); + } + // 4. Return topic data + return res.json({ success: true, topic }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error retrieving the topic, please try again later...' }); + } +}); /** * @route /api/v1/discourse/topic/tool/:toolId * @description This route creates a Discourse new topic if the tool exists and is active. * @return This routes returns an object { link: linkToDiscourseTopic, posts: Array of Discourse posts, (should be empty) } */ -router.put( - '/tool/:toolId', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Pull tool Id from endpoint route value - const toolId = parseInt(req.params.toolId); - // 2. Get the corresponding tool document from MongoDb - await Data.findOne({ id: toolId }).then( async (tool) => { - // 3. If no tool was found, return 404 - if (!tool) { - return res.status(404).json({ success: false, error: 'Tool not found.' }); - } - // 4. Create a new Discourse topic for the tool - const topicId = await createDiscourseTopic(tool); - // 5. Get the details of the new topic from Discourse - const topic = await getDiscourseTopic(topicId, req.user); - // 6. Return the topic data - return res.json({success: true, data: topic }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error creating the topic, please try again later...' }); - } - } -); +router.put('/tool/:toolId', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Pull tool Id from endpoint route value + const toolId = parseInt(req.params.toolId); + // 2. Get the corresponding tool document from MongoDb + await Data.findOne({ id: toolId }) + .then(async tool => { + // 3. If no tool was found, return 404 + if (!tool) { + return res.status(404).json({ success: false, error: 'Tool not found.' }); + } + // 4. Create a new Discourse topic for the tool + const topicId = await createDiscourseTopic(tool); + // 5. Get the details of the new topic from Discourse + const topic = await getDiscourseTopic(topicId, req.user); + // 6. Return the topic data + return res.json({ success: true, data: topic }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error creating the topic, please try again later...' }); + } +}); /** * @route /api/v1/discourse/user/posts/ * @description This route creates a Discourse new topic if the tool exists and is active. * @return This routes returns an object { link: linkToDiscourseTopic, posts: Array of Discourse posts (should have at least one) } */ -router.post( - '/user/posts', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - let { toolId, collectionId, topicId, comment } = req.body; - comment = inputSanitizer.removeNonBreakingSpaces(comment); - // 1. Check if the topicId has been passed, if it is 0, the topic needs to be created - if(!topicId) { - // 2. Check if comment is on a tool or collection - if(toolId){ - // 3. Get the tool details from MongoDb to create the new topic - await Data.findOne({ id: toolId }).then( async (tool) => { - // 4. If no tool was found, return 404 - if (!tool) { - return res.status(404).json({ success: false, error: 'Tool not found.' }); - } - // 5. Create a new Discourse topic for the tool - topicId = await createDiscourseTopic(tool); - // 6. Add the user's post to the new topic - await createDiscoursePost(topicId, comment, req.user); - // 7. Get topic for return - const topic = await getDiscourseTopic(topicId, req.user); - // 8. Return success with topic data - return res.json({success: true, topic }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } - else if(collectionId){ - // 3. Get the collection details from MongoDb to create the new topic - await Collections.findOne({ id: parseInt(collectionId) }).then( async (collection) => { - // 4. If no collection was found, return 404 - if (!collection) { - return res.status(404).json({ success: false, error: 'Collection not found.' }); - } - // 5. Create a new Discourse topic for the collection - collection.type = 'collection'; - topicId = await createDiscourseTopic(collection); - // 6. Add the user's post to the new topic - await createDiscoursePost(topicId, comment, req.user); - // 7. Get topic for return - const topic = await getDiscourseTopic(topicId, req.user); - // 8. Return success with topic data - return res.json({success: true, topic }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } - } else { - // 2. Add the user's post to the existing topic - await createDiscoursePost(topicId, comment, req.user); - // 3. Get the updated topic data - const topic = await getDiscourseTopic(topicId, req.user); - // 4. Return success - return res.json({success: true, topic }); - } - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error creating the topic, please try again later...' }); - } - } -); +router.post('/user/posts', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + let { toolId, collectionId, topicId, comment } = req.body; + comment = inputSanitizer.removeNonBreakingSpaces(comment); + // 1. Check if the topicId has been passed, if it is 0, the topic needs to be created + if (!topicId) { + // 2. Check if comment is on a tool or collection + if (toolId) { + // 3. Get the tool details from MongoDb to create the new topic + await Data.findOne({ id: toolId }) + .then(async tool => { + // 4. If no tool was found, return 404 + if (!tool) { + return res.status(404).json({ success: false, error: 'Tool not found.' }); + } + // 5. Create a new Discourse topic for the tool + topicId = await createDiscourseTopic(tool); + // 6. Add the user's post to the new topic + await createDiscoursePost(topicId, comment, req.user); + // 7. Get topic for return + const topic = await getDiscourseTopic(topicId, req.user); + // 8. Return success with topic data + return res.json({ success: true, topic }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } else if (collectionId) { + // 3. Get the collection details from MongoDb to create the new topic + await Collections.findOne({ id: parseInt(collectionId) }) + .then(async collection => { + // 4. If no collection was found, return 404 + if (!collection) { + return res.status(404).json({ success: false, error: 'Collection not found.' }); + } + // 5. Create a new Discourse topic for the collection + collection.type = 'collection'; + topicId = await createDiscourseTopic(collection); + // 6. Add the user's post to the new topic + await createDiscoursePost(topicId, comment, req.user); + // 7. Get topic for return + const topic = await getDiscourseTopic(topicId, req.user); + // 8. Return success with topic data + return res.json({ success: true, topic }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } + } else { + // 2. Add the user's post to the existing topic + await createDiscoursePost(topicId, comment, req.user); + // 3. Get the updated topic data + const topic = await getDiscourseTopic(topicId, req.user); + // 4. Return success + return res.json({ success: true, topic }); + } + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error creating the topic, please try again later...' }); + } +}); /** * @route /api/v1/discourse/user/posts/ * @description This route updates a Discourse post * @return This routes returns a success message */ -router.put( - '/user/posts/:postId', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Pull post Id from endpoint route value - const postId = parseInt(req.params.postId); - // 2. If valid post Id was not passed, return error - if (!postId) { - return res.status(500).json({ success: false, error: 'Post identifier was not specified' }); - } - // 2. Pull the new content from the request body - const { comment } = req.body; - // 3. Perform update of post in Discourse - const post = await updateDiscoursePost(postId, inputSanitizer.removeNonBreakingSpaces(comment), req.user); - // 4. Get the updated topic data - const topic = await getDiscourseTopic(post.topic_id, req.user); - // 5. Return the topic data - return res.json({success: true, topic }); - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error editing the post, please try again later...' }); - } - } -); +router.put('/user/posts/:postId', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Pull post Id from endpoint route value + const postId = parseInt(req.params.postId); + // 2. If valid post Id was not passed, return error + if (!postId) { + return res.status(500).json({ success: false, error: 'Post identifier was not specified' }); + } + // 2. Pull the new content from the request body + const { comment } = req.body; + // 3. Perform update of post in Discourse + const post = await updateDiscoursePost(postId, inputSanitizer.removeNonBreakingSpaces(comment), req.user); + // 4. Get the updated topic data + const topic = await getDiscourseTopic(post.topic_id, req.user); + // 5. Return the topic data + return res.json({ success: true, topic }); + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error editing the post, please try again later...' }); + } +}); /** * @route /api/v1/discourse/user/post/:postId * @description This route deletes a specific post and must be either owned by the requesting user or the user is an Admin of Discourse * @return This routes returns a message indicating success or failure in delete */ -router.delete( - '/user/posts/:postId', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Pull post Id from endpoint route value - const postId = parseInt(req.params.postId); - // 2. Call Discourse to delete the post - deleteDiscoursePost(postId, req.user).then(() => { - // 3. Return success message - return res.json({success: true }); - }).catch((error) => { - return res.status(500).json({ success: false, error: error.message }); - }); - } catch (err) { - console.error(err); - return res.status(500).json({ success: false, error: 'Error deleting the topic, please try again later...' }); - } - } -); +router.delete('/user/posts/:postId', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Pull post Id from endpoint route value + const postId = parseInt(req.params.postId); + // 2. Call Discourse to delete the post + deleteDiscoursePost(postId, req.user) + .then(() => { + // 3. Return success message + return res.json({ success: true }); + }) + .catch(error => { + return res.status(500).json({ success: false, error: error.message }); + }); + } catch (err) { + console.error(err); + return res.status(500).json({ success: false, error: 'Error deleting the topic, please try again later...' }); + } +}); module.exports = router; diff --git a/src/resources/discourse/discourse.service.js b/src/resources/discourse/discourse.service.js index 8d26f44c..39f1313f 100644 --- a/src/resources/discourse/discourse.service.js +++ b/src/resources/discourse/discourse.service.js @@ -3,7 +3,7 @@ import { UserModel } from '../user/user.model'; import axios from 'axios'; import { HmacSHA256 } from 'crypto-js'; -import base64url from "base64url"; +import base64url from 'base64url'; import _ from 'lodash'; import { Collections } from '../collections/collections.model'; @@ -15,38 +15,35 @@ import { Collections } from '../collections/collections.model'; * @return {object} Discourse topic posts and content link */ export async function getDiscourseTopic(topicId, user) { - // Guard clause for invalid identifier passed - if (!topicId) { - throw new Error("Topic can't be null"); - } - // 1. Get the requesting users credentials to retrieve the topic in their context - const config = await getCredentials(user, false); - try { - // 2. Issue GET request to Discourse to return the topic in response - const response = await axios.get( - `${process.env.DISCOURSE_URL}/t/${topicId}.json`, - config - ); - // 3. Remove the first post as it is system generated - let postsLength = response.data.post_stream.posts.length; - let posts = response.data.post_stream.posts.slice(1, postsLength); - // 4. Set the avatar size in each post and place each post into read mode by default - posts.map((post) => { - post.avatar_template = `${process.env.DISCOURSE_URL}${post.avatar_template.replace('{size}', '46')}`; - post.mode = "read"; - }); - // 5. Sort the post array by descending datetime created - posts.sort(function(a,b){ - return new Date(b.created_at) - new Date(a.created_at); - }); - // 6. Return the topic details - return { - link: `${process.env.DISCOURSE_URL}/t/${response.data.slug}/${topicId}`, - posts: posts, - }; - } catch (err) { - console.error(err); - } + // Guard clause for invalid identifier passed + if (!topicId) { + throw new Error("Topic can't be null"); + } + // 1. Get the requesting users credentials to retrieve the topic in their context + const config = await getCredentials(user, false); + try { + // 2. Issue GET request to Discourse to return the topic in response + const response = await axios.get(`${process.env.DISCOURSE_URL}/t/${topicId}.json`, config); + // 3. Remove the first post as it is system generated + let postsLength = response.data.post_stream.posts.length; + let posts = response.data.post_stream.posts.slice(1, postsLength); + // 4. Set the avatar size in each post and place each post into read mode by default + posts.map(post => { + post.avatar_template = `${process.env.DISCOURSE_URL}${post.avatar_template.replace('{size}', '46')}`; + post.mode = 'read'; + }); + // 5. Sort the post array by descending datetime created + posts.sort(function (a, b) { + return new Date(b.created_at) - new Date(a.created_at); + }); + // 6. Return the topic details + return { + link: `${process.env.DISCOURSE_URL}/t/${response.data.slug}/${topicId}`, + posts: posts, + }; + } catch (err) { + console.error(err); + } } /** @@ -56,79 +53,65 @@ export async function getDiscourseTopic(topicId, user) { * @return {int} The unique identifier for the new topic */ export async function createDiscourseTopic(tool) { - // 1. Establish system access config for Discourse as this is always used to create a topic - const config = { - headers: { - 'Api-Key': process.env.DISCOURSE_API_KEY, - 'Api-Username': 'system', - 'user-agent': 'node.js', - 'Content-Type': 'application/json', - }, - }; - // 2. Depending on tool type passed, generate initial post content based on tool description and original content link - var rawIs, categoryIs; - if (tool.type === 'tool') { - rawIs = `${tool.description}

Original content: ${process.env.homeURL}/tool/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_TOOLS_ID; - } - else if (tool.type === 'project') { - rawIs = `${tool.description}

Original content: ${process.env.homeURL}/project/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_PROJECTS_ID; - } - else if (tool.type === 'dataset') { - let { datasetfields : { abstract }} = tool; - rawIs = `${tool.description || abstract}

Original content: ${process.env.homeURL}/dataset/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_DATASETS_ID; - } - else if (tool.type === 'paper') { - rawIs = `${tool.description}

Original content: ${process.env.homeURL}/paper/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_PAPERS_ID; - } - else if (tool.type === 'course') { - rawIs = `${tool.description}

Original content: ${process.env.homeURL}/course/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_COURSES_ID; - } - else if (tool.type === 'collection') { - rawIs = `${tool.description}

Original content: ${process.env.homeURL}/collection/${tool.id}`; - categoryIs = process.env.DISCOURSE_CATEGORY_COLLECTIONS_ID; - } - // 3. Assemble payload for creating a topic in Discourse - let title = ''; - if (tool.type === 'course') tool.title - else tool.name - const payload = { - title: tool.name, - raw: rawIs, - category: categoryIs - }; - // 4. POST to Discourse to create the post - try { - const res = await axios.post( - `${process.env.DISCOURSE_URL}/posts.json`, - payload, - config - ); - // 5. If post was successful, update tool in MongoDb with topic identifier - if (res.data.topic_id) { - // 6. Check tool type and Return the topic identifier - if(tool.type === 'collection'){ - await Collections.findOneAndUpdate( - { id: tool.id }, - { $set: { discourseTopicId: res.data.topic_id } } - ); - return res.data.topic_id; - } - else{ - await Data.findOneAndUpdate( - { id: tool.id }, - { $set: { discourseTopicId: res.data.topic_id } } - ); - return res.data.topic_id; - } - } - } catch (err) { - console.error(err); - } + // 1. Establish system access config for Discourse as this is always used to create a topic + const config = { + headers: { + 'Api-Key': process.env.DISCOURSE_API_KEY, + 'Api-Username': 'system', + 'user-agent': 'node.js', + 'Content-Type': 'application/json', + }, + }; + // 2. Depending on tool type passed, generate initial post content based on tool description and original content link + var rawIs, categoryIs; + if (tool.type === 'tool') { + rawIs = `${tool.description}

Original content: ${process.env.homeURL}/tool/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_TOOLS_ID; + } else if (tool.type === 'project') { + rawIs = `${tool.description}

Original content: ${process.env.homeURL}/project/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_PROJECTS_ID; + } else if (tool.type === 'dataset') { + let { + datasetfields: { abstract }, + } = tool; + rawIs = `${tool.description || abstract}

Original content: ${process.env.homeURL}/dataset/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_DATASETS_ID; + } else if (tool.type === 'paper') { + rawIs = `${tool.description}

Original content: ${process.env.homeURL}/paper/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_PAPERS_ID; + } else if (tool.type === 'course') { + rawIs = `${tool.description}

Original content: ${process.env.homeURL}/course/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_COURSES_ID; + } else if (tool.type === 'collection') { + rawIs = `${tool.description}

Original content: ${process.env.homeURL}/collection/${tool.id}`; + categoryIs = process.env.DISCOURSE_CATEGORY_COLLECTIONS_ID; + } + // 3. Assemble payload for creating a topic in Discourse + let title = ''; + if (tool.type === 'course') tool.title; + else tool.name; + const payload = { + title: tool.name, + raw: rawIs, + category: categoryIs, + }; + // 4. POST to Discourse to create the post + try { + const res = await axios.post(`${process.env.DISCOURSE_URL}/posts.json`, payload, config); + // 5. If post was successful, update tool in MongoDb with topic identifier + if (res.data.topic_id) { + // 6. Check tool type and Return the topic identifier + if (tool.type === 'collection') { + await Collections.findOneAndUpdate({ id: tool.id }, { $set: { discourseTopicId: res.data.topic_id } }); + return res.data.topic_id; + } else { + await Data.findOneAndUpdate({ id: tool.id }, { $set: { discourseTopicId: res.data.topic_id } }); + return res.data.topic_id; + } + } + } catch (err) { + console.error(err); + } } /** @@ -138,32 +121,28 @@ export async function createDiscourseTopic(tool) { * @param {string} comment The text content for the post * @param {object} user The user object deserialised from the request cookie */ -export async function createDiscoursePost(topicId, comment, user){ - // Guard clause for invalid identifier passed - if (!topicId) { - return new Error("Topic can't be null"); - } - // Validation clause to ensure new post is at least 20 characters as per client side validation - if (comment.length < 20) { - return new Error("A Discourse post must be 20 characters or longer"); - } - // 1. Get the Discourse user credentials based on the requesting user - const config = await getCredentials(user, true); - // 2. Assemble payload to create new post - const payload = { - topic_id: topicId, - raw: comment - }; - // 3. POST to Discourse to create new post in the context of the current user - try { - const response = await axios.post( - `${process.env.DISCOURSE_URL}/posts.json`, - payload, - config - ); - } catch (err) { - console.error(err); - } +export async function createDiscoursePost(topicId, comment, user) { + // Guard clause for invalid identifier passed + if (!topicId) { + return new Error("Topic can't be null"); + } + // Validation clause to ensure new post is at least 20 characters as per client side validation + if (comment.length < 20) { + return new Error('A Discourse post must be 20 characters or longer'); + } + // 1. Get the Discourse user credentials based on the requesting user + const config = await getCredentials(user, true); + // 2. Assemble payload to create new post + const payload = { + topic_id: topicId, + raw: comment, + }; + // 3. POST to Discourse to create new post in the context of the current user + try { + const response = await axios.post(`${process.env.DISCOURSE_URL}/posts.json`, payload, config); + } catch (err) { + console.error(err); + } } /** @@ -174,34 +153,32 @@ export async function createDiscoursePost(topicId, comment, user){ * @param {object} user The user object deserialised from the request cookie * @return {object} Updated post data */ -export async function updateDiscoursePost(postId, comment, user){ - // Guard clause for invalid identifier passed - if (!postId) { - return new Error("Topic can't be null"); - } - // Validation clause to ensure new post is at least 20 characters as per client side validation - if (comment.length < 20) { - return new Error("A Discourse post must be 20 characters or longer"); - } - // 1. Get the Discourse user credentials based on the requesting user - const config = await getCredentials(user, true); - // 2. Assemble payload to create new post - const payload = { - raw: comment - }; - // 3. PUT to Discourse to update existing post in the context of the current user - try { - const response = await axios.put( - `${process.env.DISCOURSE_URL}/posts/${postId}.json`, - payload, - config - ); - const { data: { post }} = response; - // 4. Return the post data - return post; - } catch (err) { - console.error(err); - } +export async function updateDiscoursePost(postId, comment, user) { + // Guard clause for invalid identifier passed + if (!postId) { + return new Error("Topic can't be null"); + } + // Validation clause to ensure new post is at least 20 characters as per client side validation + if (comment.length < 20) { + return new Error('A Discourse post must be 20 characters or longer'); + } + // 1. Get the Discourse user credentials based on the requesting user + const config = await getCredentials(user, true); + // 2. Assemble payload to create new post + const payload = { + raw: comment, + }; + // 3. PUT to Discourse to update existing post in the context of the current user + try { + const response = await axios.put(`${process.env.DISCOURSE_URL}/posts/${postId}.json`, payload, config); + const { + data: { post }, + } = response; + // 4. Return the post data + return post; + } catch (err) { + console.error(err); + } } /** @@ -211,8 +188,8 @@ export async function updateDiscoursePost(postId, comment, user){ * @return {object} Credentials for the new user to access Discourse APIs */ export async function registerDiscourseUser(user) { - // 1. Call internal function to generate Discourse SSO user for a gateway user - return await getCredentials(user, false); + // 1. Call internal function to generate Discourse SSO user for a gateway user + return await getCredentials(user, false); } /** @@ -222,21 +199,18 @@ export async function registerDiscourseUser(user) { * @param {object} user The user object deserialised from the request cookie */ export async function deleteDiscoursePost(postId, user) { - // Guard clause for invalid identifier passed - if (!postId) { - return new Error("Post can't be null"); - } - // 1. Get the Discourse user credentials based on the requesting user - const config = await getCredentials(user, true); - // 3. DELETE to Discourse to remove post in the context of the current user - try { - const response = await axios.delete( - `${process.env.DISCOURSE_URL}/posts/${postId}`, - config - ); - } catch (err) { - console.error(err); - } + // Guard clause for invalid identifier passed + if (!postId) { + return new Error("Post can't be null"); + } + // 1. Get the Discourse user credentials based on the requesting user + const config = await getCredentials(user, true); + // 3. DELETE to Discourse to remove post in the context of the current user + try { + const response = await axios.delete(`${process.env.DISCOURSE_URL}/posts/${postId}`, config); + } catch (err) { + console.error(err); + } } /** @@ -247,40 +221,36 @@ export async function deleteDiscoursePost(postId, user) { * @param {string} username The username for the new user based on {firstnane.lastname} * @return {object} User object from Discourse */ -async function createUser({id, email, username}) { - // 1. Establish system access config for Discourse as this is always used to create users - const config = { - headers: { - 'Api-Key': process.env.DISCOURSE_API_KEY, - 'Api-Username': 'system', - 'user-agent': 'node.js', - 'Content-Type': 'application/json', - }, - }; - const sso_secret = process.env.DISCOURSE_SSO_SECRET; - // 1. Create SSO payload using users details - const sso_params = `external_id=${id}&email=${email}&username=${username}`; - // 2. Base64 encode params to create expected payload - const sso_payload = base64url(sso_params); - // 3. Generate SSO signature from SSO payload - const sig = HmacSHA256(sso_payload, sso_secret).toString(); - // 4. Assemble Disource endpoint payload - const payload = { - sso: sso_payload, - sig - } - // 5. POST to Discourse sync SSO endpoint to create the new user - try { - const res = await axios.post( - `${process.env.DISCOURSE_URL}/admin/users/sync_sso`, - payload, - config - ); - // 6. Return the new user object from Discourse - return res.data; - } catch (err) { - console.error(err); - } +async function createUser({ id, email, username }) { + // 1. Establish system access config for Discourse as this is always used to create users + const config = { + headers: { + 'Api-Key': process.env.DISCOURSE_API_KEY, + 'Api-Username': 'system', + 'user-agent': 'node.js', + 'Content-Type': 'application/json', + }, + }; + const sso_secret = process.env.DISCOURSE_SSO_SECRET; + // 1. Create SSO payload using users details + const sso_params = `external_id=${id}&email=${email}&username=${username}`; + // 2. Base64 encode params to create expected payload + const sso_payload = base64url(sso_params); + // 3. Generate SSO signature from SSO payload + const sig = HmacSHA256(sso_payload, sso_secret).toString(); + // 4. Assemble Disource endpoint payload + const payload = { + sso: sso_payload, + sig, + }; + // 5. POST to Discourse sync SSO endpoint to create the new user + try { + const res = await axios.post(`${process.env.DISCOURSE_URL}/admin/users/sync_sso`, payload, config); + // 6. Return the new user object from Discourse + return res.data; + } catch (err) { + console.error(err); + } } /** @@ -290,36 +260,36 @@ async function createUser({id, email, username}) { * @return {string} User API Key */ async function generateAPIKey(discourseUsername) { - // 1. Establish system access config for Discourse as this is always used to generate user API keys - const config = { - headers: { - 'Api-Key': process.env.DISCOURSE_API_KEY, - 'Api-Username': 'system', - 'user-agent': 'node.js', - 'Content-Type': 'application/json', - }, - }; - // 1. Assemble payload to create API key for user in Discourse - const payload = { - "key": { - "username": discourseUsername, - "description": "Auto generated API key by HDR-UK Innovation Gateway" - } - } - // 2. POST request to Discourse and expect API key in response - try { - const res = await axios.post( - `${process.env.DISCOURSE_URL}/admin/api/keys`, - payload, - config - ); - const { data: { key: { key } } } = res; - // 3. Return key - return key; - } catch (err) { - console.error(err); - return ''; - } + // 1. Establish system access config for Discourse as this is always used to generate user API keys + const config = { + headers: { + 'Api-Key': process.env.DISCOURSE_API_KEY, + 'Api-Username': 'system', + 'user-agent': 'node.js', + 'Content-Type': 'application/json', + }, + }; + // 1. Assemble payload to create API key for user in Discourse + const payload = { + key: { + username: discourseUsername, + description: 'Auto generated API key by HDR-UK Innovation Gateway', + }, + }; + // 2. POST request to Discourse and expect API key in response + try { + const res = await axios.post(`${process.env.DISCOURSE_URL}/admin/api/keys`, payload, config); + const { + data: { + key: { key }, + }, + } = res; + // 3. Return key + return key; + } catch (err) { + console.error(err); + return ''; + } } /** @@ -330,54 +300,53 @@ async function generateAPIKey(discourseUsername) { * @return {object} Configuration object for subsequent Discourse API calls */ async function getCredentials(user, strict) { - // 1. Return default system credentials if no user provided and endpoint should allow system access - if(!user && !strict) { - return { - headers: { - 'Api-Key': process.env.DISCOURSE_API_KEY, - 'Api-Username': 'system', - 'user-agent': 'node.js', - 'Content-Type': 'application/json', - } - } - } else if(!user && strict) { - throw new Error("Unauthorised access attempted"); - } - // 2. Deconstruct user object deserialised from cookie in request - let { id, discourseUsername, discourseKey, firstname, lastname, email } = user; - // 3. If gateway user has no Discourse username then register and generate API key - if(_.isEmpty(discourseUsername)) { - try { - const username = `${firstname.toLowerCase()}.${lastname.toLowerCase()}`; - // 4. Create Discourse user - const discourseUser = await createUser({id, email, username}); - discourseUsername = discourseUser.username; - // 5. Generate Discourse API key for user - discourseKey = await generateAPIKey(discourseUser.username); - // 6. Update MongoDb to contain users Discourse credentials - await UserModel.findOneAndUpdate({ id }, { $set: { discourseUsername, discourseKey }}); - } catch (err) { - console.error(err); - } - // 3. If user has username but no API key, generate new one - } else if(_.isEmpty(discourseKey)) { - try { - // 4. Generate Discourse API key for user - discourseKey = await generateAPIKey(discourseUsername); - // 5. Update MongoDb to contain users Discourse credentials - await UserModel.findOneAndUpdate({ id }, { $set: { discourseUsername, discourseKey }}); - } catch (err) { - console.error(err); - } - } - // Return identification payload of registered Discourse user - return { - headers: { - 'Api-Key': discourseKey, - 'Api-Username': discourseUsername, - 'user-agent': 'node.js', - 'Content-Type': 'application/json', - }, - }; + // 1. Return default system credentials if no user provided and endpoint should allow system access + if (!user && !strict) { + return { + headers: { + 'Api-Key': process.env.DISCOURSE_API_KEY, + 'Api-Username': 'system', + 'user-agent': 'node.js', + 'Content-Type': 'application/json', + }, + }; + } else if (!user && strict) { + throw new Error('Unauthorised access attempted'); + } + // 2. Deconstruct user object deserialised from cookie in request + let { id, discourseUsername, discourseKey, firstname, lastname, email } = user; + // 3. If gateway user has no Discourse username then register and generate API key + if (_.isEmpty(discourseUsername)) { + try { + const username = `${firstname.toLowerCase()}.${lastname.toLowerCase()}`; + // 4. Create Discourse user + const discourseUser = await createUser({ id, email, username }); + discourseUsername = discourseUser.username; + // 5. Generate Discourse API key for user + discourseKey = await generateAPIKey(discourseUser.username); + // 6. Update MongoDb to contain users Discourse credentials + await UserModel.findOneAndUpdate({ id }, { $set: { discourseUsername, discourseKey } }); + } catch (err) { + console.error(err); + } + // 3. If user has username but no API key, generate new one + } else if (_.isEmpty(discourseKey)) { + try { + // 4. Generate Discourse API key for user + discourseKey = await generateAPIKey(discourseUsername); + // 5. Update MongoDb to contain users Discourse credentials + await UserModel.findOneAndUpdate({ id }, { $set: { discourseUsername, discourseKey } }); + } catch (err) { + console.error(err); + } + } + // Return identification payload of registered Discourse user + return { + headers: { + 'Api-Key': discourseKey, + 'Api-Username': discourseUsername, + 'user-agent': 'node.js', + 'Content-Type': 'application/json', + }, + }; } - diff --git a/src/resources/eventlog/eventlog.controller.js b/src/resources/eventlog/eventlog.controller.js index ea07fa23..25fe3579 100644 --- a/src/resources/eventlog/eventlog.controller.js +++ b/src/resources/eventlog/eventlog.controller.js @@ -2,5 +2,5 @@ import { EventLogModel } from './eventlog.model'; import _ from 'lodash'; module.exports = { - logEvent: async (event) => await EventLogModel.create({...event}) -} \ No newline at end of file + logEvent: async event => await EventLogModel.create({ ...event }), +}; diff --git a/src/resources/eventlog/eventlog.model.js b/src/resources/eventlog/eventlog.model.js index a147fe83..2ae8e7c0 100644 --- a/src/resources/eventlog/eventlog.model.js +++ b/src/resources/eventlog/eventlog.model.js @@ -2,10 +2,10 @@ import { model, Schema } from 'mongoose'; const EventLogSchema = new Schema( { - userId: String, + userId: Number, event: String, timestamp: Date, } ); -export const EventLogModel = model('eventlog', EventLogSchema) \ No newline at end of file +export const EventLogModel = model('eventlog', EventLogSchema) diff --git a/src/resources/googleanalytics/googleanalytics.router.js b/src/resources/googleanalytics/googleanalytics.router.js index 6f91a1fe..a6bcecc1 100644 --- a/src/resources/googleanalytics/googleanalytics.router.js +++ b/src/resources/googleanalytics/googleanalytics.router.js @@ -3,34 +3,31 @@ import axios from 'axios'; var WidgetAuth = require('../../../WidgetAuth'); -const router = express.Router() +const router = express.Router(); //returns the number of unique users within a set timeframe specified by the start date and end date params passed router.get('/userspermonth', async (req, res) => { - var startDate = req.query.startDate; - var endDate = req.query.endDate; + var startDate = req.query.startDate; + var endDate = req.query.endDate; - var getUsersGAPromise = WidgetAuth.getUsersGA(startDate, endDate); + var getUsersGAPromise = WidgetAuth.getUsersGA(startDate, endDate); - getUsersGAPromise - .then(function (result){ - JSON.stringify(result); + getUsersGAPromise.then(function (result) { + JSON.stringify(result); - return res.json({'success': true, 'data' : result.data}); - }) + return res.json({ success: true, data: result.data }); + }); }); //returns the total number of unique users -router.get('/totalusers', async (req, res) => { +router.get('/totalusers', async (req, res) => { + var getTotalUsersGAPromise = WidgetAuth.getTotalUsersGA(); - var getTotalUsersGAPromise = WidgetAuth.getTotalUsersGA(); + getTotalUsersGAPromise.then(function (result) { + JSON.stringify(result); - getTotalUsersGAPromise - .then(function (result){ - JSON.stringify(result); - - return res.json({'success': true, 'data' : result.data}); - }) + return res.json({ success: true, data: result.data }); + }); }); -module.exports = router \ No newline at end of file +module.exports = router; diff --git a/src/resources/help/help.model.js b/src/resources/help/help.model.js index 122522f6..01b52829 100644 --- a/src/resources/help/help.model.js +++ b/src/resources/help/help.model.js @@ -1,12 +1,10 @@ import { model, Schema } from 'mongoose'; -const HelpSchema = new Schema( - { - question: String, - answer: String, - category: String, - activeFlag: Boolean - } -); +const HelpSchema = new Schema({ + question: String, + answer: String, + category: String, + activeFlag: Boolean, +}); -export const Help = model('help_faq', HelpSchema) \ No newline at end of file +export const Help = model('help_faq', HelpSchema); diff --git a/src/resources/help/help.router.js b/src/resources/help/help.router.js index e7d089b8..a76d8d3b 100644 --- a/src/resources/help/help.router.js +++ b/src/resources/help/help.router.js @@ -1,5 +1,5 @@ -import express from "express"; -import { Help } from "./help.model"; +import express from 'express'; +import { Help } from './help.model'; import _ from 'lodash'; const router = express.Router(); @@ -7,26 +7,25 @@ const router = express.Router(); // @router GET /api/help/:category // @desc Get Help FAQ for a category // @access Public -router.get("/:category", async (req, res) => { - try { - // 1. Destructure category parameter with safe default - let { category = '' } = req.params; - // 2. Check if parameter is empty (if required throw error response) - if(_.isEmpty(category)) { - return res.status(400).json({ success: false, message: 'Category is required' }); - } - // 3. Find matching help items in MongoDb - let help = await Help.find({ $and: [{ active: true }, { category }] }); - // 4. Return help data in response - return res.status(200).json({ success: true, help }); - } - catch (err) { - console.error(err.message); - return res.status(500).json({ - success: false, - message: 'An error occurred searching for help data', - }); - } +router.get('/:category', async (req, res) => { + try { + // 1. Destructure category parameter with safe default + let { category = '' } = req.params; + // 2. Check if parameter is empty (if required throw error response) + if (_.isEmpty(category)) { + return res.status(400).json({ success: false, message: 'Category is required' }); + } + // 3. Find matching help items in MongoDb + let help = await Help.find({ $and: [{ active: true }, { category }] }); + // 4. Return help data in response + return res.status(200).json({ success: true, help }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred searching for help data', + }); + } }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/linkchecker/linkchecker.repository.js b/src/resources/linkchecker/linkchecker.repository.js index 890f075e..f28055dc 100644 --- a/src/resources/linkchecker/linkchecker.repository.js +++ b/src/resources/linkchecker/linkchecker.repository.js @@ -1,40 +1,35 @@ import { Data } from '../tool/data.model'; import _ from 'lodash'; - export function getObjectResult(searchAll, searchQuery) { +export function getObjectResult(searchAll, searchQuery) { + var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + newSearchQuery['$and'].push({ $or: [{ type: 'paper' }, { type: 'project' }, { type: 'tool' }] }); - var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - newSearchQuery["$and"].push({ "$or":[ - {"type":"paper"}, - {"type":"project"}, - {"type":"tool"} - ]}) + var queryObject = [ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { + $project: { + _id: 0, + id: 1, + name: 1, + type: 1, + description: 1, + resultsInsights: 1, + link: 1, + 'persons.id': 1, + }, + }, + ]; - var queryObject = [ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { - $project: { - "_id": 0, - "id": 1, - "name": 1, - "type": 1, - "description": 1, - "resultsInsights": 1, - "link": 1, - "persons.id": 1 - } - } - ]; + if (searchAll) queryObject.push({ $sort: { name: 1 } }); + else queryObject.push({ $sort: { score: { $meta: 'textScore' } } }); - if (searchAll) queryObject.push({ "$sort": { "name": 1 }}); - else queryObject.push({ "$sort": { score: { $meta: "textScore" }}}); - - var q = Data.aggregate(queryObject); - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === "undefined") resolve([]); - else resolve(data); - }) - }) -} \ No newline at end of file + var q = Data.aggregate(queryObject); + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); +} diff --git a/src/resources/linkchecker/linkchecker.router.js b/src/resources/linkchecker/linkchecker.router.js index da1b97f9..072bb895 100644 --- a/src/resources/linkchecker/linkchecker.router.js +++ b/src/resources/linkchecker/linkchecker.router.js @@ -1,7 +1,7 @@ -import express from 'express' +import express from 'express'; import { getObjectResult } from './linkchecker.repository'; import { getUserByUserId } from '../user/user.repository'; -import { Data } from '../tool/data.model' +import { Data } from '../tool/data.model'; import emailGenerator from '../utilities/emailGenerator.util'; import _ from 'lodash'; const sgMail = require('@sendgrid/mail'); @@ -9,157 +9,145 @@ const sgMail = require('@sendgrid/mail'); const hdrukEmail = `enquiry@healthdatagateway.org`; const axios = require('axios'); -const router = express.Router(); +const router = express.Router(); router.post('/', async (req, res) => { - - let parsedBody = {} - if (req.header('content-type') === 'application/json') { - parsedBody = req.body; - } else { - parsedBody = JSON.parse(req.body); - } - - //Check for key - if (parsedBody.key !== process.env.linkcheckerkey) { - return res.json({ success: false, error: "Link checker failed" }); - } - - let results = []; - - const allowedKeys = ['link', 'description', 'resultsInsights']; - - results = await getObjectResult(true, {"$and":[{"activeflag":"active"}]}); - - const getAllUsers = (persons) => new Promise(async(resolve, reject) => { - - let users = [] - for (let p of persons) { - let user = await getUserByUserId(p.id); - if(!_.isEmpty(user)){ - users.push({ - _id: user._id, - id: user.id, - firstname: user.firstname, - lastname: user.lastname, - email: user.email - }); - } - } - // at end resolve the request - resolve(users); - - }) - - const getErrorLink = (link) => new Promise(async(resolve, reject) => { - try { - const response = await axios.get(link); - resolve(''); - } - catch (error) { - resolve('error'); - } - }) - - const checkLinks = (item, key) => new Promise(async (resolve, reject) => { - let errors = {}; - let linkErrors = []; - if(allowedKeys.includes(key)) { - // return [url, url]; - let links = item[key].match(/\bhttps?:\/\/\S+/gi); - // test links for errors - if(!_.isEmpty(links)) { - for(let link of links) { - // test our link is valid or not - let result = await getErrorLink(link) || ''; - // check to see if it contains a string with a value - if(!_.isEmpty(result)){ - - linkErrors.push(link); - } - } - - if (!_.isEmpty(linkErrors)) { - // we return errros: { link: [url, url, url]} - errors[key] = linkErrors; - } - } - } - //returns after processing our await via new promise - resolve(errors); - }); - - const sendEmailToUsers = async (users, errors, item) => { - - let footer; - sgMail.setApiKey(process.env.SENDGRID_API_KEY); - let resourceLink = process.env.homeURL + '/' + item.type + '/' + item.id; - - for(let user of users) { - - footer = emailGenerator.generateEmailFooter(user, "true" ) - - let checkUser = await Data.find({ - id: user.id - }) - - if(checkUser[0].emailNotifications === true){ - - let msg = { - to: user.email, - from: `${hdrukEmail}`, - subject: `Updates required for links in ${item.name}.` , - html: `${user.firstname} ${user.lastname},

+ let parsedBody = {}; + if (req.header('content-type') === 'application/json') { + parsedBody = req.body; + } else { + parsedBody = JSON.parse(req.body); + } + + //Check for key + if (parsedBody.key !== process.env.linkcheckerkey) { + return res.json({ success: false, error: 'Link checker failed' }); + } + + let results = []; + + const allowedKeys = ['link', 'description', 'resultsInsights']; + + results = await getObjectResult(true, { $and: [{ activeflag: 'active' }] }); + + const getAllUsers = persons => + new Promise(async (resolve, reject) => { + let users = []; + for (let p of persons) { + let user = await getUserByUserId(p.id); + if (!_.isEmpty(user)) { + users.push({ + _id: user._id, + id: user.id, + firstname: user.firstname, + lastname: user.lastname, + email: user.email, + }); + } + } + // at end resolve the request + resolve(users); + }); + + const getErrorLink = link => + new Promise(async (resolve, reject) => { + try { + const response = await axios.get(link); + resolve(''); + } catch (error) { + resolve('error'); + } + }); + + const checkLinks = (item, key) => + new Promise(async (resolve, reject) => { + let errors = {}; + let linkErrors = []; + if (allowedKeys.includes(key)) { + // return [url, url]; + let links = item[key].match(/\bhttps?:\/\/\S+/gi); + // test links for errors + if (!_.isEmpty(links)) { + for (let link of links) { + // test our link is valid or not + let result = (await getErrorLink(link)) || ''; + // check to see if it contains a string with a value + if (!_.isEmpty(result)) { + linkErrors.push(link); + } + } + + if (!_.isEmpty(linkErrors)) { + // we return errros: { link: [url, url, url]} + errors[key] = linkErrors; + } + } + } + //returns after processing our await via new promise + resolve(errors); + }); + + const sendEmailToUsers = async (users, errors, item) => { + let footer; + sgMail.setApiKey(process.env.SENDGRID_API_KEY); + let resourceLink = process.env.homeURL + '/' + item.type + '/' + item.id; + + for (let user of users) { + footer = emailGenerator.generateEmailFooter(user, 'true'); + + let checkUser = await Data.find({ + id: user.id, + }); + + if (checkUser[0].emailNotifications === true) { + let msg = { + to: user.email, + from: `${hdrukEmail}`, + subject: `Updates required for links in ${item.name}.`, + html: `${user.firstname} ${user.lastname},

Please review your ${item.type} "${item.name}" here: ${resourceLink}. This ${item.type} contains stale links which require updating. -

${footer}` - }; - - await sgMail.send(msg); - } - - } - - } - - let newResults = results.map(async (item) => { - - let errors = {}; - // 1. deconstruct the item and select persons [1,2,4,5,6] - let { persons } = {...item} - - let users = []; - // 1. users = [{id, firstname, lastname, email}, {}, {}]; - if(!_.isEmpty(persons)) { - users = await getAllUsers(persons); - } else { - users = [{email: 'support@healthdatagateway.org', firstname: 'HDRUK', lastname: 'Support'}] - } - - // loop over the item object and check each key meets link checking - for (let key in item) { - // error: {link: [url, url]} - let result = await checkLinks(item, key) || {}; - // link doing result.link - if(!_.isEmpty(result)) { - errors = { - ...errors, - [key]: result[key] - }; - - } - } - - // send email to all users - // loop over the users async await and send email here - if(!_.isEmpty(errors)) { - await sendEmailToUsers(users, errors, item) - .then(() =>{ - return res.json({ success: true }); - }) - } - - }); - +

${footer}`, + }; + + await sgMail.send(msg); + } + } + }; + + let newResults = results.map(async item => { + let errors = {}; + // 1. deconstruct the item and select persons [1,2,4,5,6] + let { persons } = { ...item }; + + let users = []; + // 1. users = [{id, firstname, lastname, email}, {}, {}]; + if (!_.isEmpty(persons)) { + users = await getAllUsers(persons); + } else { + users = [{ email: 'support@healthdatagateway.org', firstname: 'HDRUK', lastname: 'Support' }]; + } + + // loop over the item object and check each key meets link checking + for (let key in item) { + // error: {link: [url, url]} + let result = (await checkLinks(item, key)) || {}; + // link doing result.link + if (!_.isEmpty(result)) { + errors = { + ...errors, + [key]: result[key], + }; + } + } + + // send email to all users + // loop over the users async await and send email here + if (!_.isEmpty(errors)) { + await sendEmailToUsers(users, errors, item).then(() => { + return res.json({ success: true }); + }); + } + }); }); - module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/message/message.controller.js b/src/resources/message/message.controller.js index a62805f7..2d895982 100644 --- a/src/resources/message/message.controller.js +++ b/src/resources/message/message.controller.js @@ -5,6 +5,7 @@ import mongoose from 'mongoose'; import { UserModel } from '../user/user.model'; import emailGenerator from '../utilities/emailGenerator.util'; import { Data as ToolModel } from '../tool/data.model'; +import constants from '../utilities/constants.util'; const topicController = require('../topic/topic.controller'); @@ -74,11 +75,10 @@ module.exports = { let { additionalInfo: { emailNotifications }, _id} = user; return emailNotifications === true && _id.toString() !== createdBy.toString(); }); - const hdrukEmail = `enquiry@healthdatagateway.org`; // 9. Send email emailGenerator.sendEmail( optedInEmailRecipients, - `${hdrukEmail}`, + constants.hdrukEmail, `You have received a new message on the HDR UK Innovation Gateway`, `You have received a new message on the HDR UK Innovation Gateway.
Log in to view your messages here : HDR UK Innovation Gateway` ); diff --git a/src/resources/message/message.model.js b/src/resources/message/message.model.js index e6fc6f39..d34eddfd 100644 --- a/src/resources/message/message.model.js +++ b/src/resources/message/message.model.js @@ -1,59 +1,65 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -const MessageSchema = new Schema({ - messageID: Number, - messageTo: Number, - messageObjectID: Number, - messageDataRequestID: { - type: Schema.Types.ObjectId, - ref: 'data_request' - }, - messageDescription: String, - messageType: { - type: String, - enum: ['message', - 'add', - 'approved', - 'archive', - 'author', - 'rejected', - 'added collection', - 'review', - 'data access request', - 'data access request unlinked', - 'team', - 'team unlinked', - 'edit' - ] - }, - createdBy:{ - type: Schema.Types.ObjectId, - ref: 'User' - }, - createdDate: { - type: Date, - default: Date.now - }, - isRead: { - type: String, - enum : ['true','false'], - default: 'false' - }, - topic: { - type: Schema.Types.ObjectId, - ref: 'Topic' - }, - readBy: [{ - type: Schema.Types.ObjectId, - ref: 'User' - }], - createdByName: { - type: Object - } -},{ - toJSON: { virtuals: true}, - toObject: { virtuals: true} -}); +const MessageSchema = new Schema( + { + messageID: Number, + messageTo: Number, + messageObjectID: Number, + messageDataRequestID: { + type: Schema.Types.ObjectId, + ref: 'data_request', + }, + messageDescription: String, + messageType: { + type: String, + enum: [ + 'message', + 'add', + 'approved', + 'archive', + 'author', + 'rejected', + 'added collection', + 'review', + 'data access request', + 'data access request unlinked', + 'team', + 'team unlinked', + 'edit', + 'workflow' + ], + }, + createdBy: { + type: Schema.Types.ObjectId, + ref: 'User', + }, + createdDate: { + type: Date, + default: Date.now, + }, + isRead: { + type: String, + enum: ['true', 'false'], + default: 'false', + }, + topic: { + type: Schema.Types.ObjectId, + ref: 'Topic', + }, + readBy: [ + { + type: Schema.Types.ObjectId, + ref: 'User', + }, + ], + createdByName: { + type: Object, + }, + }, + { + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); - -export const MessagesModel = model('Messages', MessageSchema); \ No newline at end of file +export const MessagesModel = model('Messages', MessageSchema); diff --git a/src/resources/message/message.route.js b/src/resources/message/message.route.js index d3b00052..7a3e58c5 100644 --- a/src/resources/message/message.route.js +++ b/src/resources/message/message.route.js @@ -1,161 +1,139 @@ import express from 'express'; -import passport from "passport"; -import { utils } from "../auth"; -import { ROLES } from '../user/user.roles' -import { MessagesModel } from './message.model' +import passport from 'passport'; +import { utils } from '../auth'; +import { ROLES } from '../user/user.roles'; +import { MessagesModel } from './message.model'; const messageController = require('../message/message.controller'); // by default route has access to its own, allows access to parent param -const router = express.Router({ mergeParams: true}); - -router.get('/numberofunread/admin/:personID', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - - var idString = ""; - let countUnreadMessages = 0; - if (req.params.personID) { - idString = parseInt(req.params.personID); - } - - var m = MessagesModel.aggregate([ - { $match: { $and: [{ $or: [{ messageTo: idString }, { messageTo: 0 }] }] } }, - { $sort: { createdDate: -1 } }, - { $lookup: { from: "tools", localField: "messageObjectID", foreignField: "id", as: "tool" } } - ]).limit(50); - m.exec((err, data) => { - if (err) { - return res.json({ success: false, error: err }); - } else { - Array.prototype.forEach.call(data, element => { - if (element.isRead === 'false') { - countUnreadMessages++; - } - }); - return res.json({ countUnreadMessages }); - } - }) - }); - -router.get('/numberofunread/:personID', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Creator), - async (req, res) => { - - var idString = ""; - let countUnreadMessages = 0; - if (req.params.personID) { - idString = parseInt(req.params.personID); - } - - if (req.query.id) { - idString = parseInt(req.query.id); - } - var m = MessagesModel.aggregate([ - { $match: { $and: [{ messageTo: idString }] } }, - { $sort: { createdDate: -1 } }, - { $lookup: { from: "tools", localField: "messageObjectID", foreignField: "id", as: "tool" } } - ]).limit(50); - m.exec((err, data) => { - if (err) { - return res.json({ success: false, error: err }); - } else { - Array.prototype.forEach.call(data, element => { - if (element.isRead === 'false') { - countUnreadMessages++; - } - }); - return res.json({ countUnreadMessages }); - } - }) - }); - -router.get('/:personID', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Creator), - async (req, res) => { - var idString = ""; - - if (req.params.personID) { - idString = parseInt(req.params.personID); - } - var m = MessagesModel.aggregate([ - { $match: { $and: [{ messageTo: idString }] } }, - { $sort: { createdDate: -1 } }, - { $lookup: { from: "tools", localField: "messageObjectID", foreignField: "id", as: "tool" } }, - { $lookup: { from: "course", localField: "messageObjectID", foreignField: "id", as: "course" } } - ]).limit(50); - m.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, newData: data }); - }); - }); +const router = express.Router({ mergeParams: true }); + +router.get('/numberofunread/admin/:personID', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + var idString = ''; + let countUnreadMessages = 0; + if (req.params.personID) { + idString = parseInt(req.params.personID); + } + + var m = MessagesModel.aggregate([ + { $match: { $and: [{ $or: [{ messageTo: idString }, { messageTo: 0 }] }] } }, + { $sort: { createdDate: -1 } }, + { $lookup: { from: 'tools', localField: 'messageObjectID', foreignField: 'id', as: 'tool' } }, + ]).limit(50); + m.exec((err, data) => { + if (err) { + return res.json({ success: false, error: err }); + } else { + Array.prototype.forEach.call(data, element => { + if (element.isRead === 'false') { + countUnreadMessages++; + } + }); + return res.json({ countUnreadMessages }); + } + }); +}); + +router.get('/numberofunread/:personID', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Creator), async (req, res) => { + var idString = ''; + let countUnreadMessages = 0; + if (req.params.personID) { + idString = parseInt(req.params.personID); + } + + if (req.query.id) { + idString = parseInt(req.query.id); + } + var m = MessagesModel.aggregate([ + { $match: { $and: [{ messageTo: idString }] } }, + { $sort: { createdDate: -1 } }, + { $lookup: { from: 'tools', localField: 'messageObjectID', foreignField: 'id', as: 'tool' } }, + ]).limit(50); + m.exec((err, data) => { + if (err) { + return res.json({ success: false, error: err }); + } else { + Array.prototype.forEach.call(data, element => { + if (element.isRead === 'false') { + countUnreadMessages++; + } + }); + return res.json({ countUnreadMessages }); + } + }); +}); + +router.get('/:personID', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Creator), async (req, res) => { + var idString = ''; + + if (req.params.personID) { + idString = parseInt(req.params.personID); + } + var m = MessagesModel.aggregate([ + { $match: { $and: [{ messageTo: idString }] } }, + { $sort: { createdDate: -1 } }, + { $lookup: { from: 'tools', localField: 'messageObjectID', foreignField: 'id', as: 'tool' } }, + { $lookup: { from: 'course', localField: 'messageObjectID', foreignField: 'id', as: 'course' } }, + ]).limit(50); + m.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, newData: data }); + }); +}); /** * {get} /messages Messages - * + * * Return list of messages */ -router.get( - '/admin/:personID', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - var idString = ""; - - if (req.params.personID) { - idString = parseInt(req.params.personID); - } - - var m = MessagesModel.aggregate([ - { $match: { $and: [{ $or: [{ messageTo: idString }, { messageTo: 0 }] }] } }, - { $sort: { createdDate: -1 } }, - { $lookup: { from: "tools", localField: "messageObjectID", foreignField: "id", as: "tool" } }, - { $lookup: { from: "course", localField: "messageObjectID", foreignField: "id", as: "course" } } - ]).limit(50); - m.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, newData: data }); - }); - }); - -router.post( - '/markasread', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - console.log('in markAsRead'); - const messageIds = req.body; - - MessagesModel.updateMany( - { messageID: { $in: messageIds } }, - { isRead: true }, (err) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true }); - } - ) - }); - - // @route POST api/messages - // @desc POST A message - // @access Private - router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.createMessage); - - // @route DELETE api/messages/:id - // @desc DELETE Delete a message - // @access Private - router.delete('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.deleteMessage); - - // @route PUT api/messages - // @desc PUT Update a message - // @access Private - router.put('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.updateMessage); - - // @route GET api/messages/unread/count - // @desc GET the number of unread messages for a user - // @access Private - router.get('/unread/count', passport.authenticate('jwt'), messageController.getUnreadMessageCount); - - module.exports = router; \ No newline at end of file +router.get('/admin/:personID', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + var idString = ''; + + if (req.params.personID) { + idString = parseInt(req.params.personID); + } + + var m = MessagesModel.aggregate([ + { $match: { $and: [{ $or: [{ messageTo: idString }, { messageTo: 0 }] }] } }, + { $sort: { createdDate: -1 } }, + { $lookup: { from: 'tools', localField: 'messageObjectID', foreignField: 'id', as: 'tool' } }, + { $lookup: { from: 'course', localField: 'messageObjectID', foreignField: 'id', as: 'course' } }, + ]).limit(50); + m.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, newData: data }); + }); +}); + +router.post('/markasread', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + console.log('in markAsRead'); + const messageIds = req.body; + + MessagesModel.updateMany({ messageID: { $in: messageIds } }, { isRead: true }, err => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true }); + }); +}); + +// @route POST api/messages +// @desc POST A message +// @access Private +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.createMessage); + +// @route DELETE api/messages/:id +// @desc DELETE Delete a message +// @access Private +router.delete('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.deleteMessage); + +// @route PUT api/messages +// @desc PUT Update a message +// @access Private +router.put('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), messageController.updateMessage); + +// @route GET api/messages/unread/count +// @desc GET the number of unread messages for a user +// @access Private +router.get('/unread/count', passport.authenticate('jwt'), messageController.getUnreadMessageCount); + +module.exports = router; diff --git a/src/resources/paper/paper.route.js b/src/resources/paper/paper.route.js index 3dae0c81..a39e1100 100644 --- a/src/resources/paper/paper.route.js +++ b/src/resources/paper/paper.route.js @@ -1,9 +1,9 @@ -import express from 'express' -import { Data } from '../tool/data.model' -import { ROLES } from '../user/user.roles' -import passport from "passport"; -import { utils } from "../auth"; -import {addTool, editTool, setStatus, getTools, getToolsAdmin} from '../tool/data.repository'; +import express from 'express'; +import { Data } from '../tool/data.model'; +import { ROLES } from '../user/user.roles'; +import passport from 'passport'; +import { utils } from '../auth'; +import { addTool, editTool, setStatus, getTools, getToolsAdmin, getAllTools } from '../tool/data.repository'; import helper from '../utilities/helper.util'; import escape from 'escape-html'; const router = express.Router(); @@ -11,181 +11,161 @@ const router = express.Router(); // @router POST /api/v1/ // @desc Add paper user // @access Private -router.post('/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await addTool(req) - .then(response => { - return res.json({ success: true, response}); - }) - .catch(err => { - return res.json({ success: false, err}); - }) - } -); +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await addTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router GET /api/v1/ // @desc Returns List of Paper Objects Authenticated // @access Private -router.get( - '/getList', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - req.params.type = 'paper'; - let role = req.user.role; +router.get('/getList', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + req.params.type = 'paper'; + let role = req.user.role; - if (role === ROLES.Admin) { - await getToolsAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } else if (role === ROLES.Creator) { - await getTools(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } - } -); + if (role === ROLES.Admin) { + await getToolsAdmin(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } else if (role === ROLES.Creator) { + await getTools(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } +}); // @router POST /api/v1/ // @desc Validates that a paper link does not exist on the gateway // @access Private -router.post( - '/validate', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Deconstruct message body which contains the link entered by the user against a paper - const { link } = req.body; - // 2. Front end validation should prevent this occurrence, but we return success if empty string or not param is passed - if(!link) { - return res.status(200).json({ success: true }); - } - // 3. Use MongoDb to perform a direct comparison on all paper links, trimming leading and trailing white space from the request body - const papers = await Data.find({ type: "paper", link: link.trim(), activeflag: {$ne: "rejected"} }).count(); - // 4. If any results are found, return error that the link exists on the Gateway already - if(papers > 0) - return res.status(200).json({ success: true, error: "This link is already associated to another paper on the HDR-UK Innovation Gateway" }); - // 5. Otherwise return valid - return res.status(200).json({ success: true }); - } catch (error) { - console.error(error); - return res.status(500).json({ success: false, error: 'Paper link validation failed' }); - } - } -); +router.post('/validate', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Deconstruct message body which contains the link entered by the user against a paper + const { link } = req.body; + // 2. Front end validation should prevent this occurrence, but we return success if empty string or not param is passed + if (!link) { + return res.status(200).json({ success: true }); + } + // 3. Use MongoDb to perform a direct comparison on all paper links, trimming leading and trailing white space from the request body + const papers = await Data.find({ type: 'paper', link: link.trim(), activeflag: { $ne: 'rejected' } }).count(); + // 4. If any results are found, return error that the link exists on the Gateway already + if (papers > 0) + return res + .status(200) + .json({ success: true, error: 'This link is already associated to another paper on the HDR-UK Innovation Gateway' }); + // 5. Otherwise return valid + return res.status(200).json({ success: true }); + } catch (error) { + console.error(error); + return res.status(500).json({ success: false, error: 'Paper link validation failed' }); + } +}); // @router GET /api/v1/ // @desc Returns List of Paper Objects No auth // This unauthenticated route was created specifically for API-docs // @access Public -router.get( - '/', - async (req, res) => { - req.params.type = 'paper'; - await getToolsAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.get('/', async (req, res) => { + req.params.type = 'paper'; + await getAllTools(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PATCH /api/v1/ // @desc Change status of the Paper object. // @access Private -router.patch('/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - await setStatus(req) - .then(response => { - return res.json({success: true, response}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.patch('/:id', passport.authenticate('jwt'), async (req, res) => { + await setStatus(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PUT /api/v1/ // @desc Returns edited Paper object. // @access Private -router.put('/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await editTool(req) - .then(response => { - return res.json({ success: true, response}); - }) - .catch(err => { - return res.json({ success: false, err}); - }) - } -); +router.put('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await editTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); /** * {get} /paper​/:paper​ID Paper - * + * * Return the details on the paper based on the tool ID. */ router.get('/:paperID', async (req, res) => { - var q = Data.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.paperID) }, {type: 'paper'}] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $lookup: { from: "tools", localField: "uploader", foreignField: "id", as: "uploaderIs" } } - ]); - q.exec((err, data) => { - if (data.length > 0) { - var p = Data.aggregate([ - { $match: { $and: [{ "relatedObjects": { $elemMatch: { "objectId": req.params.paperID } } }] } }, - ]); - p.exec((err, relatedData) => { - relatedData.forEach((dat) => { - dat.relatedObjects.forEach((x) => { - if (x.objectId === req.params.paperID && dat.id !== req.params.paperID) { - if (typeof data[0].relatedObjects === "undefined") data[0].relatedObjects=[]; - data[0].relatedObjects.push({ objectId: dat.id, reason: x.reason, objectType: dat.type, user: x.user, updated: x.updated }) - } - }) - }); - if (err) return res.json({ success: false, error: err }); + var q = Data.aggregate([ + { $match: { $and: [{ id: parseInt(req.params.paperID) }, { type: 'paper' }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $lookup: { from: 'tools', localField: 'uploader', foreignField: 'id', as: 'uploaderIs' } }, + { + $addFields: { + uploader: { + $concat: [{ $arrayElemAt: ['$uploaderIs.firstname', 0] }, ' ', { $arrayElemAt: ['$uploaderIs.lastname', 0] }], + }, + }, + }, + ]); + q.exec((err, data) => { + if (data.length > 0) { + var p = Data.aggregate([{ $match: { $and: [{ relatedObjects: { $elemMatch: { objectId: req.params.paperID } } }] } }]); + p.exec((err, relatedData) => { + relatedData.forEach(dat => { + dat.relatedObjects.forEach(x => { + if (x.objectId === req.params.paperID && dat.id !== req.params.paperID) { + if (typeof data[0].relatedObjects === 'undefined') data[0].relatedObjects = []; + data[0].relatedObjects.push({ objectId: dat.id, reason: x.reason, objectType: dat.type, user: x.user, updated: x.updated }); + } + }); + }); + if (err) return res.json({ success: false, error: err }); - data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); - return res.json({ success: true, data: data }); - }); - } - else{ - return res.status(404).send(`Paper not found for Id: ${escape(req.params.paperID)}`); - } - }); + data[0].persons = helper.hidePrivateProfileDetails(data[0].persons); + return res.json({ success: true, data: data }); + }); + } else { + return res.status(404).send(`Paper not found for Id: ${escape(req.params.paperID)}`); + } + }); }); -router.get('/edit/:paperID', async (req, res) => { - var query = Data.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.paperID) }] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } } - ]); - query.exec((err, data) => { - if(data.length > 0){ - return res.json({ success: true, data: data }); - } - else { - return res.json({success: false, error: `Paper not found for paper id ${req.params.id}`}) - } - }); +router.get('/edit/:paperID', async (req, res) => { + var query = Data.aggregate([ + { $match: { $and: [{ id: parseInt(req.params.paperID) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]); + query.exec((err, data) => { + if (data.length > 0) { + return res.json({ success: true, data: data }); + } else { + return res.json({ success: false, error: `Paper not found for paper id ${req.params.id}` }); + } + }); }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/person/person.route.js b/src/resources/person/person.route.js index a540f01b..fe0666be 100644 --- a/src/resources/person/person.route.js +++ b/src/resources/person/person.route.js @@ -1,189 +1,204 @@ -import express from 'express' -import { Data } from '../tool/data.model' -import { utils } from "../auth"; -import passport from "passport"; -import { ROLES } from '../user/user.roles' -import {addTool, editTool, deleteTool, setStatus, getTools, getToolsAdmin} from '../tool/data.repository'; -import emailGenerator from '../utilities/emailGenerator.util'; -import { UserModel } from '../user/user.model' -import helper from '../utilities/helper.util'; +import express from 'express'; +import { Data } from '../tool/data.model'; +import { utils } from '../auth'; +import passport from 'passport'; +import { ROLES } from '../user/user.roles'; +import { getAllTools } from '../tool/data.repository'; +import { UserModel } from '../user/user.model'; +import helper from '../utilities/helper.util'; +import _ from 'lodash'; const urlValidator = require('../utilities/urlValidator'); const inputSanitizer = require('../utilities/inputSanitizer'); -const router = express.Router() +const router = express.Router(); -router.post('/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - const { firstname, lastname, bio, emailNotifications, terms, sector, organisation, showOrganisation, tags} = req.body; - let link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)); - let orcid = req.body.orcid !== '' ? urlValidator.validateOrcidURL(inputSanitizer.removeNonBreakingSpaces(req.body.orcid)) : ''; - let data = Data(); - console.log(req.body) - data.id = parseInt(Math.random().toString().replace('0.', '')); - data.firstname = inputSanitizer.removeNonBreakingSpaces(firstname), - data.lastname = inputSanitizer.removeNonBreakingSpaces(lastname), - data.type = "person"; - data.bio = inputSanitizer.removeNonBreakingSpaces(bio); - data.link = link; - data.orcid = orcid; - data.emailNotifications = emailNotifications; - data.terms = terms; - data.sector = inputSanitizer.removeNonBreakingSpaces(sector); - data.organisation = inputSanitizer.removeNonBreakingSpaces(organisation); - data.showOrganisation = showOrganisation; - data.tags = inputSanitizer.removeNonBreakingSpaces(tags); - let newPersonObj = await data.save(); - if(!newPersonObj) - return res.json({ success: false, error: "Can't persist data to DB" }); - - return res.json({ success: true, data: newPersonObj}); - }); +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + const { firstname, lastname, bio, emailNotifications, terms, sector, organisation, showOrganisation, tags } = req.body; + let link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)); + let orcid = req.body.orcid !== '' ? urlValidator.validateOrcidURL(inputSanitizer.removeNonBreakingSpaces(req.body.orcid)) : ''; + let data = Data(); + console.log(req.body); + data.id = parseInt(Math.random().toString().replace('0.', '')); + (data.firstname = inputSanitizer.removeNonBreakingSpaces(firstname)), + (data.lastname = inputSanitizer.removeNonBreakingSpaces(lastname)), + (data.type = 'person'); + data.bio = inputSanitizer.removeNonBreakingSpaces(bio); + data.link = link; + data.orcid = orcid; + data.emailNotifications = emailNotifications; + data.terms = terms; + data.sector = inputSanitizer.removeNonBreakingSpaces(sector); + data.organisation = inputSanitizer.removeNonBreakingSpaces(organisation); + data.showOrganisation = showOrganisation; + data.tags = inputSanitizer.removeNonBreakingSpaces(tags); + let newPersonObj = await data.save(); + if (!newPersonObj) return res.json({ success: false, error: "Can't persist data to DB" }); -router.put('/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - let { id, firstname, lastname, email, bio, showBio, showLink, showOrcid, emailNotifications, terms, sector, showSector, organisation, showOrganisation, tags, showDomain, profileComplete } = req.body; - const type = 'person'; - let link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)); - let orcid = req.body.orcid !== '' ? urlValidator.validateOrcidURL(inputSanitizer.removeNonBreakingSpaces(req.body.orcid)) : ''; - firstname = inputSanitizer.removeNonBreakingSpaces(firstname), - lastname = inputSanitizer.removeNonBreakingSpaces(lastname), - bio = inputSanitizer.removeNonBreakingSpaces(bio); - sector = inputSanitizer.removeNonBreakingSpaces(sector); - organisation = inputSanitizer.removeNonBreakingSpaces(organisation); - tags.topics = inputSanitizer.removeNonBreakingSpaces(tags.topics); - console.log(req.body) + return res.json({ success: true, data: newPersonObj }); +}); + +router.put('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + let { + id, + firstname, + lastname, + email, + bio, + showBio, + showLink, + showOrcid, + emailNotifications, + terms, + sector, + showSector, + organisation, + showOrganisation, + tags, + showDomain, + profileComplete, + } = req.body; + const type = 'person'; + let link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(req.body.link)); + let orcid = req.body.orcid !== '' ? urlValidator.validateOrcidURL(inputSanitizer.removeNonBreakingSpaces(req.body.orcid)) : ''; + (firstname = inputSanitizer.removeNonBreakingSpaces(firstname)), + (lastname = inputSanitizer.removeNonBreakingSpaces(lastname)), + (bio = inputSanitizer.removeNonBreakingSpaces(bio)); + sector = inputSanitizer.removeNonBreakingSpaces(sector); + organisation = inputSanitizer.removeNonBreakingSpaces(organisation); + tags.topics = inputSanitizer.removeNonBreakingSpaces(tags.topics); + console.log(req.body); - await Data.findOneAndUpdate({ id: id }, - { - firstname, - lastname, - type, - bio, - showBio, - link, - showLink, - orcid, - showOrcid, - emailNotifications, - terms, - sector, - showSector, - organisation, - showOrganisation, - tags, - showDomain, - profileComplete - }, - {new:true}); - await UserModel.findOneAndUpdate({ id: id }, - { $set: { firstname: firstname, lastname: lastname, email: email } }) - .then(person => { - return res.json({ success: true, data: person}); - }) - .catch(err =>{ - return res.json({ success: false, error: err }); - }) - }); + await Data.findOneAndUpdate( + { id: id }, + { + firstname, + lastname, + type, + bio, + showBio, + link, + showLink, + orcid, + showOrcid, + emailNotifications, + terms, + sector, + showSector, + organisation, + showOrganisation, + tags, + showDomain, + profileComplete, + }, + { new: true } + ); + await UserModel.findOneAndUpdate({ id: id }, { $set: { firstname: firstname, lastname: lastname, email: email } }) + .then(person => { + return res.json({ success: true, data: person }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); +}); // @router GET /api/v1/person/unsubscribe/:userObjectId // @desc Unsubscribe a single user from email notifications without challenging authentication // @access Public router.put('/unsubscribe/:userObjectId', async (req, res) => { - const userId = req.params.userObjectId; - // 1. Use _id param issued by MongoDb as unique reference to find user entry - await UserModel.findOne({ _id: userId }) - .then(async (user) => { - // 2. Find person entry using numeric id and update email notifications to false - await Data.findOneAndUpdate({ id: user.id }, - { - emailNotifications: false - }).then(() => { - // 3a. Return success message - return res.json({ success: true, msg: "You've been successfully unsubscribed from all emails. You can change this setting via your account." }); - }); - }) - .catch(() =>{ - // 3b. Return generic failure message in all cases without disclosing reason or data structure - return res.status(500).send({ success: false, msg: "A problem occurred unsubscribing from email notifications." }); - }) + const userId = req.params.userObjectId; + // 1. Use _id param issued by MongoDb as unique reference to find user entry + await UserModel.findOne({ _id: userId }) + .then(async user => { + // 2. Find person entry using numeric id and update email notifications to false + await Data.findOneAndUpdate( + { id: user.id }, + { + emailNotifications: false, + } + ).then(() => { + // 3a. Return success message + return res.json({ + success: true, + msg: "You've been successfully unsubscribed from all emails. You can change this setting via your account.", + }); + }); + }) + .catch(() => { + // 3b. Return generic failure message in all cases without disclosing reason or data structure + return res.status(500).send({ success: false, msg: 'A problem occurred unsubscribing from email notifications.' }); + }); }); // @router GET /api/v1/person/:id // @desc Get person info based on personID router.get('/:id', async (req, res) => { + if (req.params.id === 'null') { + return res.json({ data: null }); + } + let person = await Data.findOne({ id: parseInt(req.params.id) }) + .populate([{ path: 'tools' }, { path: 'reviews' }]) + .catch(err => { + return res.json({ success: false, error: err }); + }); - let person = Data.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.id) }] } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "tools" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "reviewerID", as: "reviews" } } - ]); - person.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - - data = helper.hidePrivateProfileDetails(data); - return res.json({ success: true, data: data }); - }); + if (_.isEmpty(person)) { + return res.status(404).send(`Person not found for Id: ${escape(req.params.id)}`); + } else { + person = helper.hidePrivateProfileDetails([person])[0]; + return res.json({ person }); + } }); // @router GET /api/v1/person/profile/:id // @desc Get person info for their account router.get('/profile/:id', async (req, res) => { - - let profileData = Data.aggregate([ - { $match: { $and: [{ id: parseInt(req.params.id) }] } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "tools" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "reviewerID", as: "reviews" } } - ]); - profileData.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); + let profileData = Data.aggregate([ + { $match: { $and: [{ id: parseInt(req.params.id) }] } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'authors', as: 'tools' } }, + { $lookup: { from: 'reviews', localField: 'id', foreignField: 'reviewerID', as: 'reviews' } }, + ]); + profileData.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); }); -// @router GET /api/v1/person +// @router GET /api/v1/person // @desc Get paper for an author // @access Private -router.get('/', - async (req, res) => { - let personArray = []; - req.params.type = "person"; - await getToolsAdmin(req) - .then(data => { - data.map((personObj) => { - personArray.push( - { - "id":personObj.id, - "type":personObj.type, - "firstname":personObj.firstname, - "lastname":personObj.lastname, - "bio":personObj.bio, - "sociallinks":personObj.sociallinks, - "company":personObj.company, - "link":personObj.link, - "orcid":personObj.orcid, - "activeflag":personObj.activeflag, - "createdAt":personObj.createdAt, - "updatedAt":personObj.updatedAt, - "__v":personObj.__v, - "emailNotifications":personObj.emailNotifications, - "terms":personObj.terms, - "counter":personObj.counter, - "sector":personObj.sector, - "organisation":personObj.organisation, - "showOrganisation":personObj.showOrganisation - } - ); - }) - return res.json({success: true, data: personArray}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); - -module.exports = router \ No newline at end of file +router.get('/', async (req, res) => { + let personArray = []; + req.params.type = 'person'; + await getAllTools(req) + .then(data => { + data.map(personObj => { + personArray.push({ + id: personObj.id, + type: personObj.type, + firstname: personObj.firstname, + lastname: personObj.lastname, + bio: personObj.bio, + sociallinks: personObj.sociallinks, + company: personObj.company, + link: personObj.link, + orcid: personObj.orcid, + activeflag: personObj.activeflag, + createdAt: personObj.createdAt, + updatedAt: personObj.updatedAt, + __v: personObj.__v, + emailNotifications: personObj.emailNotifications, + terms: personObj.terms, + counter: personObj.counter, + sector: personObj.sector, + organisation: personObj.organisation, + showOrganisation: personObj.showOrganisation, + }); + }); + return res.json({ success: true, data: personArray }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); + +module.exports = router; diff --git a/src/resources/person/person.service.js b/src/resources/person/person.service.js index bd5f9536..0e18b909 100644 --- a/src/resources/person/person.service.js +++ b/src/resources/person/person.service.js @@ -1,53 +1,53 @@ -import { Data } from '../tool/data.model' +import { Data } from '../tool/data.model'; export async function createPerson({ - id, - firstname, - lastname, - bio, - link, - orcid, - emailNotifications, - terms, - sector, - organisation, - showMyOrganisation, - tags, - showSector, - showOrganisation, - showBio, - showLink, - showOrcid, - showDomain, - profileComplete -}){ - var type = "person"; - var activeflag = "active"; - return new Promise(async (resolve, reject) => { - return resolve( - await Data.create({ - id, - type, - firstname, - lastname, - bio, - link, - orcid, - activeflag, - emailNotifications, - terms, - sector, - organisation, - showMyOrganisation, - tags, - showSector, - showOrganisation, - showBio, - showLink, - showOrcid, - showDomain, - profileComplete - }) - ) - }) -}; \ No newline at end of file + id, + firstname, + lastname, + bio, + link, + orcid, + emailNotifications, + terms, + sector, + organisation, + showMyOrganisation, + tags, + showSector, + showOrganisation, + showBio, + showLink, + showOrcid, + showDomain, + profileComplete, +}) { + var type = 'person'; + var activeflag = 'active'; + return new Promise(async (resolve, reject) => { + return resolve( + await Data.create({ + id, + type, + firstname, + lastname, + bio, + link, + orcid, + activeflag, + emailNotifications, + terms, + sector, + organisation, + showMyOrganisation, + tags, + showSector, + showOrganisation, + showBio, + showLink, + showOrcid, + showDomain, + profileComplete, + }) + ); + }); +} diff --git a/src/resources/project/project.route.js b/src/resources/project/project.route.js index b992c545..14def5fe 100644 --- a/src/resources/project/project.route.js +++ b/src/resources/project/project.route.js @@ -3,13 +3,7 @@ import { Data } from '../tool/data.model'; import { ROLES } from '../user/user.roles'; import passport from 'passport'; import { utils } from '../auth'; -import { - addTool, - editTool, - setStatus, - getTools, - getToolsAdmin, -} from '../tool/data.repository'; +import { addTool, editTool, setStatus, getTools, getToolsAdmin, getAllTools } from '../tool/data.repository'; import helper from '../utilities/helper.util'; import escape from 'escape-html'; @@ -18,51 +12,41 @@ const router = express.Router(); // @router POST /api/v1/ // @desc Add project user // @access Private -router.post( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await addTool(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await addTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router GET /api/v1/ // @desc Returns List of Project Objects Authenticated // @access Private -router.get( - '/getList', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - req.params.type = 'project'; - let role = req.user.role; +router.get('/getList', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + req.params.type = 'project'; + let role = req.user.role; - if (role === ROLES.Admin) { - await getToolsAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } else if (role === ROLES.Creator) { - await getTools(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } + if (role === ROLES.Admin) { + await getToolsAdmin(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } else if (role === ROLES.Creator) { + await getTools(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); } -); +}); // @router GET /api/v1/ // @desc Returns List of Project Objects No auth @@ -70,11 +54,11 @@ router.get( // @access Public router.get('/', async (req, res) => { req.params.type = 'project'; - await getToolsAdmin(req) - .then((data) => { + await getAllTools(req) + .then(data => { return res.json({ success: true, data }); }) - .catch((err) => { + .catch(err => { return res.json({ success: false, err }); }); }); @@ -97,6 +81,14 @@ router.get('/:projectID', async (req, res) => { as: 'persons', }, }, + { $lookup: { from: 'tools', localField: 'uploader', foreignField: 'id', as: 'uploaderIs' } }, + { + $addFields: { + uploader: { + $concat: [{ $arrayElemAt: ['$uploaderIs.firstname', 0] }, ' ', { $arrayElemAt: ['$uploaderIs.lastname', 0] }], + }, + }, + }, ]); q.exec((err, data) => { if (data.length > 0) { @@ -116,14 +108,10 @@ router.get('/:projectID', async (req, res) => { ]); p.exec(async (err, relatedData) => { - relatedData.forEach((dat) => { - dat.relatedObjects.forEach((x) => { - if ( - x.objectId === req.params.projectID && - dat.id !== req.params.projectID - ) { - if (typeof data[0].relatedObjects === 'undefined') - data[0].relatedObjects = []; + relatedData.forEach(dat => { + dat.relatedObjects.forEach(x => { + if (x.objectId === req.params.projectID && dat.id !== req.params.projectID) { + if (typeof data[0].relatedObjects === 'undefined') data[0].relatedObjects = []; data[0].relatedObjects.push({ objectId: dat.id, reason: x.reason, @@ -139,9 +127,7 @@ router.get('/:projectID', async (req, res) => { return res.json({ success: true, data: data }); }); } else { - return res - .status(404) - .send(`Project not found for Id: ${escape(req.params.projectID)}`); + return res.status(404).send(`Project not found for Id: ${escape(req.params.projectID)}`); } }); }); @@ -149,37 +135,27 @@ router.get('/:projectID', async (req, res) => { // @router PATCH /api/v1/status // @desc Set project status // @access Private -router.patch( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - await setStatus(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.patch('/:id', passport.authenticate('jwt'), async (req, res) => { + await setStatus(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PUT /api/v1/ // @desc Edit project // @access Private -router.put( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await editTool(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.put('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await editTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); module.exports = router; diff --git a/src/resources/publisher/publisher.controller.js b/src/resources/publisher/publisher.controller.js index 9ff7e7af..d48a3bc2 100644 --- a/src/resources/publisher/publisher.controller.js +++ b/src/resources/publisher/publisher.controller.js @@ -34,6 +34,7 @@ module.exports = { // 1. Get the datasets for the publisher from the database let datasets = await Data.find({ type: 'dataset', + activeflag: 'active', 'datasetfields.publisher': req.params.id, }) .populate('publisher') @@ -181,6 +182,7 @@ module.exports = { .map((app) => { return datarequestController.createApplicationDTO( app.toObject(), + constants.userTypes.CUSTODIAN, _id.toString() ); }) @@ -264,7 +266,7 @@ module.exports = { }, []); applications = applications.map((app) => { - let { aboutApplication, _id } = app; + let { aboutApplication = {}, _id } = app; if(typeof aboutApplication === 'string') { aboutApplication = JSON.parse(aboutApplication) || {}; } diff --git a/src/resources/publisher/publisher.model.js b/src/resources/publisher/publisher.model.js index 50385cbe..bfc2dc2e 100644 --- a/src/resources/publisher/publisher.model.js +++ b/src/resources/publisher/publisher.model.js @@ -1,39 +1,42 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -const PublisherSchema = new Schema({ - id: { - type: Number, - unique: true - }, - name: String, - active: { - type: Boolean, - default: true - }, - imageURL: String, - allowsMessaging: { - type: Boolean, - default: false - }, - dataRequestModalContent: { - header: String, - body: String, - footer: String - }, - workflowEnabled: { - type: Boolean, - default: false - } -}, { - toJSON: { virtuals: true }, - toObject: { virtuals: true } -}); +const PublisherSchema = new Schema( + { + id: { + type: Number, + unique: true, + }, + name: String, + active: { + type: Boolean, + default: true, + }, + imageURL: String, + allowsMessaging: { + type: Boolean, + default: false, + }, + dataRequestModalContent: { + header: String, + body: String, + footer: String, + }, + workflowEnabled: { + type: Boolean, + default: false, + }, + }, + { + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); PublisherSchema.virtual('team', { - ref: 'Team', - foreignField: '_id', - localField: '_id', - justOne: true + ref: 'Team', + foreignField: '_id', + localField: '_id', + justOne: true, }); -export const PublisherModel = model('Publisher', PublisherSchema) \ No newline at end of file +export const PublisherModel = model('Publisher', PublisherSchema); diff --git a/src/resources/publisher/publisher.route.js b/src/resources/publisher/publisher.route.js index cbbde1ad..19bd0ac5 100644 --- a/src/resources/publisher/publisher.route.js +++ b/src/resources/publisher/publisher.route.js @@ -25,4 +25,4 @@ router.get('/:id/dataaccessrequests', passport.authenticate('jwt'), publisherCon // @access Private router.get('/:id/workflows', passport.authenticate('jwt'), publisherController.getPublisherWorkflows); -module.exports = router +module.exports = router; diff --git a/src/resources/relatedobjects/relatedobjects.route.js b/src/resources/relatedobjects/relatedobjects.route.js index 2cfefde5..31b4e4ad 100644 --- a/src/resources/relatedobjects/relatedobjects.route.js +++ b/src/resources/relatedobjects/relatedobjects.route.js @@ -7,58 +7,57 @@ const router = express.Router(); /** * {get} /relatedobjects/:id - * + * * Return the details on the relatedobject based on the ID. */ router.get('/:id', async (req, res) => { - console.log(`in relatedobjects.route`); - let id = req.params.id; - if (!isNaN(id)) { - let q = Data.aggregate([ - { $match: { $and: [{ id: parseInt(id) }] } }, - { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, - ]); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - } else { - try { - // Get related dataset - let dataVersion = await Data.findOne({ datasetid: id }); - - if (!_.isNil(dataVersion)) { - id = dataVersion.pid; - } - - let data = await Data.findOne({ pid: id, activeflag: 'active' }); - - if (_.isNil(data)) { - data = await Data.findOne({ pid: id, activeflag: 'archive' }).sort({ createdAt: -1 }); - if (_.isNil(data)) { - data = dataVersion; - } - } - - return res.json({ success: true, data: [data] }); - } catch (err) { - return res.json({ success: false, error: err }); - } - } + console.log(`in relatedobjects.route`); + let id = req.params.id; + if (!isNaN(id)) { + let q = Data.aggregate([ + { $match: { $and: [{ id: parseInt(id) }] } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + ]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + } else { + try { + // Get related dataset + let dataVersion = await Data.findOne({ datasetid: id }); + + if (!_.isNil(dataVersion)) { + id = dataVersion.pid; + } + + let data = await Data.findOne({ pid: id, activeflag: 'active' }); + + if (_.isNil(data)) { + data = await Data.findOne({ pid: id, activeflag: 'archive' }).sort({ createdAt: -1 }); + if (_.isNil(data)) { + data = dataVersion; + } + } + + return res.json({ success: true, data: [data] }); + } catch (err) { + return res.json({ success: false, error: err }); + } + } }); router.get('/course/:id', async (req, res) => { - var id = req.params.id; - - var q = Course.aggregate([ - { $match: { $and: [{ id: parseInt(id) }] } }, - // { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } } - ]); - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - + var id = req.params.id; + + var q = Course.aggregate([ + { $match: { $and: [{ id: parseInt(id) }] } }, + // { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } } + ]); + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/search/filter.route.js b/src/resources/search/filter.route.js index a32d9642..532c298d 100644 --- a/src/resources/search/filter.route.js +++ b/src/resources/search/filter.route.js @@ -1,383 +1,368 @@ -import express from 'express' +import express from 'express'; import { getObjectFilters, getFilter } from './search.repository'; const router = express.Router(); - // @route GET api/v1/search/filter // @desc GET Get filters // @access Public router.get('/', async (req, res) => { - var searchString = req.query.search || ""; //If blank then return all - var tab = req.query.tab || ""; //If blank then return all - if (tab === '') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - - await Promise.all([ - getFilter(searchString, 'dataset', 'license', false, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'tags.features', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.publisher', false, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, getObjectFilters(searchQuery, req, 'dataset')), + var searchString = req.query.search || ''; //If blank then return all + var tab = req.query.tab || ''; //If blank then return all + if (tab === '') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); - getFilter(searchString, 'tool', 'tags.topic', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'tags.features', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'tool', 'categories.category', false, getObjectFilters(searchQuery, req, 'tool')), + await Promise.all([ + getFilter(searchString, 'dataset', 'license', false, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'tags.features', true, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'datasetfields.publisher', false, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, getObjectFilters(searchQuery, req, 'dataset')), + getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, getObjectFilters(searchQuery, req, 'dataset')), - getFilter(searchString, 'project', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'project', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'project', 'categories.category', false, getObjectFilters(searchQuery, req, 'project')), + getFilter(searchString, 'tool', 'tags.topic', true, getObjectFilters(searchQuery, req, 'tool')), + getFilter(searchString, 'tool', 'tags.features', true, getObjectFilters(searchQuery, req, 'tool')), + getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, getObjectFilters(searchQuery, req, 'tool')), + getFilter(searchString, 'tool', 'categories.category', false, getObjectFilters(searchQuery, req, 'tool')), - getFilter(searchString, 'paper', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), - getFilter(searchString, 'paper', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - licenseFilter: values[0][0], - sampleFilter: values[1][0], - datasetFeatureFilter: values[2][0], - publisherFilter: values[3][0], - ageBandFilter: values[4][0], - geographicCoverageFilter: values[5][0], - phenotypesFilter: values[6][0], + getFilter(searchString, 'project', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), + getFilter(searchString, 'project', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')), + getFilter(searchString, 'project', 'categories.category', false, getObjectFilters(searchQuery, req, 'project')), - toolTopicFilter: values[6][0], - toolFeatureFilter: values[7][0], - toolLanguageFilter: values[8][0], - toolCategoryFilter: values[9][0], + getFilter(searchString, 'paper', 'tags.topics', true, getObjectFilters(searchQuery, req, 'project')), + getFilter(searchString, 'paper', 'tags.features', true, getObjectFilters(searchQuery, req, 'project')), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + licenseFilter: values[0][0], + sampleFilter: values[1][0], + datasetFeatureFilter: values[2][0], + publisherFilter: values[3][0], + ageBandFilter: values[4][0], + geographicCoverageFilter: values[5][0], + phenotypesFilter: values[6][0], - projectTopicFilter: values[10][0], - projectFeatureFilter: values[11][0], - projectCategoryFilter: values[12][0], + toolTopicFilter: values[6][0], + toolFeatureFilter: values[7][0], + toolLanguageFilter: values[8][0], + toolCategoryFilter: values[9][0], - paperTopicFilter: values[13][0], - paperFeatureFilter: values[14][0] - }, - filterOptions: { - licenseFilterOptions: values[0][1], - sampleFilterOptions: values[1][1], - datasetFeaturesFilterOptions: values[2][1], - publisherFilterOptions: values[3][1], - ageBandFilterOptions: values[4][1], - geographicCoverageFilterOptions: values[5][1], - phenotypesOptions: values[6][1], + projectTopicFilter: values[10][0], + projectFeatureFilter: values[11][0], + projectCategoryFilter: values[12][0], - toolTopicsFilterOptions: values[7][1], - featuresFilterOptions: values[8][1], - programmingLanguageFilterOptions: values[9][1], - toolCategoriesFilterOptions: values[10][1], + paperTopicFilter: values[13][0], + paperFeatureFilter: values[14][0], + }, + filterOptions: { + licenseFilterOptions: values[0][1], + sampleFilterOptions: values[1][1], + datasetFeaturesFilterOptions: values[2][1], + publisherFilterOptions: values[3][1], + ageBandFilterOptions: values[4][1], + geographicCoverageFilterOptions: values[5][1], + phenotypesOptions: values[6][1], - projectTopicsFilterOptions: values[11][1], - projectFeaturesFilterOptions: values[12][1], - projectCategoriesFilterOptions: values[13][1], + toolTopicsFilterOptions: values[7][1], + featuresFilterOptions: values[8][1], + programmingLanguageFilterOptions: values[9][1], + toolCategoriesFilterOptions: values[10][1], - paperTopicsFilterOptions: values[14][1], - paperFeaturesFilterOptions: values[15][1] - } - }); - }); - } - else if (tab === 'Datasets') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'dataset') - - await Promise.all([ - getFilter(searchString, 'dataset', 'license', false, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'tags.features', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.publisher', false, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, activeFiltersQuery), - getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, activeFiltersQuery) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - licenseFilter: values[0][0], - sampleFilter: values[1][0], - datasetFeatureFilter: values[2][0], - publisherFilter: values[3][0], - ageBandFilter: values[4][0], - geographicCoverageFilter: values[5][0], - phenotypesFilter: values[6][0] - }, - filterOptions: { - licenseFilterOptions: values[0][1], - sampleFilterOptions: values[1][1], - datasetFeaturesFilterOptions: values[2][1], - publisherFilterOptions: values[3][1], - ageBandFilterOptions: values[4][1], - geographicCoverageFilterOptions: values[5][1], - phenotypesOptions: values[6][1] - } - }); - }); - } - else if (tab === 'Tools') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'tool') - - await Promise.all([ - getFilter(searchString, 'tool', 'tags.topics', true, activeFiltersQuery), - getFilter(searchString, 'tool', 'tags.features', true, activeFiltersQuery), - getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, activeFiltersQuery), - getFilter(searchString, 'tool', 'categories.category', false, activeFiltersQuery) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - toolTopicFilter: values[0][0], - toolFeatureFilter: values[1][0], - toolLanguageFilter: values[2][0], - toolCategoryFilter: values[3][0] - }, - filterOptions: { - toolTopicsFilterOptions: values[0][1], - featuresFilterOptions: values[1][1], - programmingLanguageFilterOptions: values[2][1], - toolCategoriesFilterOptions: values[3][1] - } - }); - }); - } - else if (tab === 'Projects') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'project') - - await Promise.all([ - getFilter(searchString, 'project', 'tags.topics', true, activeFiltersQuery), - getFilter(searchString, 'project', 'tags.features', true, activeFiltersQuery), - getFilter(searchString, 'project', 'categories.category', false, activeFiltersQuery) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - projectTopicFilter: values[0][0], - projectFeatureFilter: values[1][0], - projectCategoryFilter: values[2][0], - }, - filterOptions: { - projectTopicsFilterOptions: values[0][1], - projectFeaturesFilterOptions: values[1][1], - projectCategoriesFilterOptions: values[2][1] - } - }); - }); - } - else if (tab === 'Papers') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'paper') - - await Promise.all([ - getFilter(searchString, 'paper', 'tags.topics', true, activeFiltersQuery), - getFilter(searchString, 'paper', 'tags.features', true, activeFiltersQuery) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - paperTopicFilter: values[0][0], - paperFeatureFilter: values[1][0] - }, - filterOptions: { - paperTopicsFilterOptions: values[0][1], - paperFeaturesFilterOptions: values[1][1] - } - }); - }); - } - else if (tab === 'Courses') { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - if (searchString.length > 0) searchQuery["$and"].push({ $text: { $search: searchString } }); - var activeFiltersQuery = getObjectFilters(searchQuery, req, 'course') - - await Promise.all([ - getFilter(searchString, 'course', 'courseOptions.startDate', true, activeFiltersQuery), - getFilter(searchString, 'course', 'provider', true, activeFiltersQuery), - getFilter(searchString, 'course', 'location', true, activeFiltersQuery), - getFilter(searchString, 'course', 'courseOptions.studyMode', true, activeFiltersQuery), - getFilter(searchString, 'course', 'award', true, activeFiltersQuery), - getFilter(searchString, 'course', 'entries.level', true, activeFiltersQuery), - getFilter(searchString, 'course', 'domains', true, activeFiltersQuery), - getFilter(searchString, 'course', 'keywords', true, activeFiltersQuery), - getFilter(searchString, 'course', 'competencyFramework', true, activeFiltersQuery), - getFilter(searchString, 'course', 'nationalPriority', true, activeFiltersQuery) - ]).then((values) => { - return res.json({ - success: true, - allFilters: { - courseStartDatesFilter: values[0][0], - courseProviderFilter: values[1][0], - courseLocationFilter: values[2][0], - courseStudyModeFilter: values[3][0], - courseAwardFilter: values[4][0], - courseEntryLevelFilter: values[5][0], - courseDomainsFilter: values[6][0], - courseKeywordsFilter: values[7][0], - courseFrameworkFilter: values[8][0], - coursePriorityFilter: values[9][0] - }, - filterOptions: { - courseStartDatesFilterOptions: values[0][1], - courseProviderFilterOptions: values[1][1], - courseLocationFilterOptions: values[2][1], - courseStudyModeFilterOptions: values[3][1], - courseAwardFilterOptions: values[4][1], - courseEntryLevelFilterOptions: values[5][1], - courseDomainsFilterOptions: values[6][1], - courseKeywordsFilterOptions: values[7][1], - courseFrameworkFilterOptions: values[8][1], - coursePriorityFilterOptions: values[9][1] - } - }); - }); - } -}); + projectTopicsFilterOptions: values[11][1], + projectFeaturesFilterOptions: values[12][1], + projectCategoriesFilterOptions: values[13][1], + + paperTopicsFilterOptions: values[14][1], + paperFeaturesFilterOptions: values[15][1], + }, + }); + }); + } else if (tab === 'Datasets') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + var activeFiltersQuery = getObjectFilters(searchQuery, req, 'dataset'); + + await Promise.all([ + getFilter(searchString, 'dataset', 'license', false, activeFiltersQuery), + getFilter(searchString, 'dataset', 'datasetfields.physicalSampleAvailability', true, activeFiltersQuery), + getFilter(searchString, 'dataset', 'tags.features', true, activeFiltersQuery), + getFilter(searchString, 'dataset', 'datasetfields.publisher', false, activeFiltersQuery), + getFilter(searchString, 'dataset', 'datasetfields.ageBand', true, activeFiltersQuery), + getFilter(searchString, 'dataset', 'datasetfields.geographicCoverage', true, activeFiltersQuery), + getFilter(searchString, 'dataset', 'datasetfields.phenotypes', true, activeFiltersQuery), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + licenseFilter: values[0][0], + sampleFilter: values[1][0], + datasetFeatureFilter: values[2][0], + publisherFilter: values[3][0], + ageBandFilter: values[4][0], + geographicCoverageFilter: values[5][0], + phenotypesFilter: values[6][0], + }, + filterOptions: { + licenseFilterOptions: values[0][1], + sampleFilterOptions: values[1][1], + datasetFeaturesFilterOptions: values[2][1], + publisherFilterOptions: values[3][1], + ageBandFilterOptions: values[4][1], + geographicCoverageFilterOptions: values[5][1], + phenotypesOptions: values[6][1], + }, + }); + }); + } else if (tab === 'Tools') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + var activeFiltersQuery = getObjectFilters(searchQuery, req, 'tool'); + await Promise.all([ + getFilter(searchString, 'tool', 'tags.topics', true, activeFiltersQuery), + getFilter(searchString, 'tool', 'tags.features', true, activeFiltersQuery), + getFilter(searchString, 'tool', 'programmingLanguage.programmingLanguage', true, activeFiltersQuery), + getFilter(searchString, 'tool', 'categories.category', false, activeFiltersQuery), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + toolTopicFilter: values[0][0], + toolFeatureFilter: values[1][0], + toolLanguageFilter: values[2][0], + toolCategoryFilter: values[3][0], + }, + filterOptions: { + toolTopicsFilterOptions: values[0][1], + featuresFilterOptions: values[1][1], + programmingLanguageFilterOptions: values[2][1], + toolCategoriesFilterOptions: values[3][1], + }, + }); + }); + } else if (tab === 'Projects') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + var activeFiltersQuery = getObjectFilters(searchQuery, req, 'project'); + await Promise.all([ + getFilter(searchString, 'project', 'tags.topics', true, activeFiltersQuery), + getFilter(searchString, 'project', 'tags.features', true, activeFiltersQuery), + getFilter(searchString, 'project', 'categories.category', false, activeFiltersQuery), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + projectTopicFilter: values[0][0], + projectFeatureFilter: values[1][0], + projectCategoryFilter: values[2][0], + }, + filterOptions: { + projectTopicsFilterOptions: values[0][1], + projectFeaturesFilterOptions: values[1][1], + projectCategoriesFilterOptions: values[2][1], + }, + }); + }); + } else if (tab === 'Papers') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + var activeFiltersQuery = getObjectFilters(searchQuery, req, 'paper'); + await Promise.all([ + getFilter(searchString, 'paper', 'tags.topics', true, activeFiltersQuery), + getFilter(searchString, 'paper', 'tags.features', true, activeFiltersQuery), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + paperTopicFilter: values[0][0], + paperFeatureFilter: values[1][0], + }, + filterOptions: { + paperTopicsFilterOptions: values[0][1], + paperFeaturesFilterOptions: values[1][1], + }, + }); + }); + } else if (tab === 'Courses') { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + if (searchString.length > 0) searchQuery['$and'].push({ $text: { $search: searchString } }); + var activeFiltersQuery = getObjectFilters(searchQuery, req, 'course'); + + await Promise.all([ + getFilter(searchString, 'course', 'courseOptions.startDate', true, activeFiltersQuery), + getFilter(searchString, 'course', 'provider', true, activeFiltersQuery), + getFilter(searchString, 'course', 'location', true, activeFiltersQuery), + getFilter(searchString, 'course', 'courseOptions.studyMode', true, activeFiltersQuery), + getFilter(searchString, 'course', 'award', true, activeFiltersQuery), + getFilter(searchString, 'course', 'entries.level', true, activeFiltersQuery), + getFilter(searchString, 'course', 'domains', true, activeFiltersQuery), + getFilter(searchString, 'course', 'keywords', true, activeFiltersQuery), + getFilter(searchString, 'course', 'competencyFramework', true, activeFiltersQuery), + getFilter(searchString, 'course', 'nationalPriority', true, activeFiltersQuery), + ]).then(values => { + return res.json({ + success: true, + allFilters: { + courseStartDatesFilter: values[0][0], + courseProviderFilter: values[1][0], + courseLocationFilter: values[2][0], + courseStudyModeFilter: values[3][0], + courseAwardFilter: values[4][0], + courseEntryLevelFilter: values[5][0], + courseDomainsFilter: values[6][0], + courseKeywordsFilter: values[7][0], + courseFrameworkFilter: values[8][0], + coursePriorityFilter: values[9][0], + }, + filterOptions: { + courseStartDatesFilterOptions: values[0][1], + courseProviderFilterOptions: values[1][1], + courseLocationFilterOptions: values[2][1], + courseStudyModeFilterOptions: values[3][1], + courseAwardFilterOptions: values[4][1], + courseEntryLevelFilterOptions: values[5][1], + courseDomainsFilterOptions: values[6][1], + courseKeywordsFilterOptions: values[7][1], + courseFrameworkFilterOptions: values[8][1], + coursePriorityFilterOptions: values[9][1], + }, + }); + }); + } +}); // @route GET api/v1/search/filter/topic/:type // @desc GET Get list of topics by entity type // @access Public -router.get('/topic/:type', - async (req, res) => { - await getFilter('', req.params.type, 'tags.topics', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/topic/:type', async (req, res) => { + await getFilter('', req.params.type, 'tags.topics', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/feature/:type // @desc GET Get list of features by entity type // @access Public -router.get('/feature/:type', - async (req, res) => { - await getFilter('', req.params.type, 'tags.features', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/feature/:type', async (req, res) => { + await getFilter('', req.params.type, 'tags.features', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/language/:type // @desc GET Get list of languages by entity type // @access Public -router.get('/language/:type', - async (req, res) => { - await getFilter('', req.params.type, 'programmingLanguage.programmingLanguage', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/language/:type', async (req, res) => { + await getFilter( + '', + req.params.type, + 'programmingLanguage.programmingLanguage', + true, + getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type) + ) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/category/:type // @desc GET Get list of categories by entity type // @access Public -router.get('/category/:type', - async (req, res) => { - await getFilter('', req.params.type, 'categories.category', false, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/category/:type', async (req, res) => { + await getFilter( + '', + req.params.type, + 'categories.category', + false, + getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type) + ) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/license/:type // @desc GET Get list of licenses by entity type // @access Public -router.get('/license/:type', - async (req, res) => { - await getFilter('', req.params.type, 'license', false, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/license/:type', async (req, res) => { + await getFilter('', req.params.type, 'license', false, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/organisation/:type // @desc GET Get list of organisations by entity type // @access Public -router.get('/organisation/:type', - async (req, res) => { - await getFilter('', req.params.type, 'organisation', false, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/organisation/:type', async (req, res) => { + await getFilter('', req.params.type, 'organisation', false, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/domains/:type // @desc GET Get list of features by entity type // @access Public -router.get('/domains/:type', - async (req, res) => { - await getFilter('', req.params.type, 'domains', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/domains/:type', async (req, res) => { + await getFilter('', req.params.type, 'domains', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/keywords/:type // @desc GET Get list of features by entity type // @access Public -router.get('/keywords/:type', - async (req, res) => { - await getFilter('', req.params.type, 'keywords', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/keywords/:type', async (req, res) => { + await getFilter('', req.params.type, 'keywords', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @route GET api/v1/search/filter/awards/:type // @desc GET Get list of features by entity type // @access Public -router.get('/awards/:type', - async (req, res) => { - await getFilter('', req.params.type, 'award', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) - .then(data => { - return res.json({success: true, data}); - }) - .catch(err => { - return res.json({success: false, err}); - }); - } -); +router.get('/awards/:type', async (req, res) => { + await getFilter('', req.params.type, 'award', true, getObjectFilters({ $and: [{ activeflag: 'active' }] }, req, req.params.type)) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/search/record.search.model.js b/src/resources/search/record.search.model.js index 1c310fc0..c2d62137 100644 --- a/src/resources/search/record.search.model.js +++ b/src/resources/search/record.search.model.js @@ -1,22 +1,21 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; const RecordSearchSchema = new Schema( - { - searched: String, - returned: - { - dataset: Number, - tool: Number, - project: Number, - paper: Number, - person: Number - }, - datesearched: Date - }, - { - collection: 'recorded_search', - timestamps: true - } + { + searched: String, + returned: { + dataset: Number, + tool: Number, + project: Number, + paper: Number, + person: Number, + }, + datesearched: Date, + }, + { + collection: 'recorded_search', + timestamps: true, + } ); -export const RecordSearchData = model('RecordSearchModel', RecordSearchSchema); \ No newline at end of file +export const RecordSearchData = model('RecordSearchModel', RecordSearchSchema); diff --git a/src/resources/search/search.repository.js b/src/resources/search/search.repository.js index 3f82aae2..2473217c 100644 --- a/src/resources/search/search.repository.js +++ b/src/resources/search/search.repository.js @@ -4,568 +4,587 @@ import _ from 'lodash'; import moment from 'moment'; export function getObjectResult(type, searchAll, searchQuery, startIndex, maxResults, sort) { - let collection = Data; - if (type === 'course') collection = Course; - var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - newSearchQuery["$and"].push({ type: type }) - - if (type === 'course') { - newSearchQuery["$and"].forEach((x) => { - if (x.$or) { - x.$or.forEach((y) => { - if (y['courseOptions.startDate']) y['courseOptions.startDate'] = new Date (y['courseOptions.startDate']) - }) - } - }) - newSearchQuery["$and"].push({$or:[{"courseOptions.startDate": { $gte: new Date(Date.now())}}, { 'courseOptions.flexibleDates':true}]}); - } - - var queryObject; - if (type === 'course') { - queryObject = [ - { $unwind: '$courseOptions' }, - { $match: newSearchQuery }, - { - $project: { - "_id": 0, - "id": 1, - "title": 1, - "provider": 1, - "type": 1, - "description": 1, - "courseOptions.flexibleDates": 1, - "courseOptions.startDate": 1, - "courseOptions.studyMode": 1, - "domains": 1, - "award": 1 - } - } - ]; - } - else { - queryObject = [ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { - $project: { - "_id": 0, - "id": 1, - "name": 1, - "type": 1, - "description": 1, - "bio": { - $cond: { - if: { $eq: [ false, "$showBio" ] }, - then: "$$REMOVE", - else: "$bio" - } - }, - "categories.category": 1, - "categories.programmingLanguage": 1, - "programmingLanguage.programmingLanguage": 1, - "programmingLanguage.version": 1, - "license": 1, - "tags.features": 1, - "tags.topics": 1, - "firstname": 1, - "lastname": 1, - "datasetid": 1, - "pid": 1, - "datasetfields.publisher": 1, - "datasetfields.geographicCoverage": 1, - "datasetfields.physicalSampleAvailability": 1, - "datasetfields.abstract": 1, - "datasetfields.ageBand": 1, - "datasetfields.phenotypes": 1, - "datasetv2": 1, - - "persons.id": 1, - "persons.firstname": 1, - "persons.lastname": 1, - - "activeflag": 1, - "counter": 1, - "datasetfields.metadataquality.quality_score": 1 - } - } - ]; - } - - if (sort === '' || sort ==='relevance') { - if (type === "person") { - if (searchAll) queryObject.push({ "$sort": { "lastname": 1 }}); - else queryObject.push({ "$sort": { score: { $meta: "textScore" }}}); - } - else { - if (searchAll) queryObject.push({ "$sort": { "name": 1 }}); - else queryObject.push({ "$sort": { score: { $meta: "textScore" }}}); - } - } - else if (sort === 'popularity') { - if (type === "person") { - if (searchAll) queryObject.push({ "$sort": { "counter": -1, "lastname": 1 }}); - else queryObject.push({ "$sort": { "counter": -1, score: { $meta: "textScore" }}}); - } - else { - if (searchAll) queryObject.push({ "$sort": { "counter": -1, "name": 1 }}); - else queryObject.push({ "$sort": { "counter": -1, score: { $meta: "textScore" }}}); - } - } - else if (sort === 'metadata') { - if (searchAll) queryObject.push({ "$sort": { "datasetfields.metadataquality.quality_score": -1, "name": 1 }}); - else queryObject.push({ "$sort": { "datasetfields.metadataquality.quality_score": -1, score: { $meta: "textScore" }}}); - } - else if (sort === 'startdate') { - if (searchAll) queryObject.push({ "$sort": { "courseOptions.startDate": 1 }}); - else queryObject.push({ "$sort": { "courseOptions.startDate": 1, score: { $meta: "textScore" }}}); - } - - var q = collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)); - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === "undefined") resolve([]); - else resolve(data); - }) - }) + let collection = Data; + if (type === 'course') collection = Course; + var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + newSearchQuery['$and'].push({ type: type }); + + if (type === 'course') { + newSearchQuery['$and'].forEach(x => { + if (x.$or) { + x.$or.forEach(y => { + if (y['courseOptions.startDate']) y['courseOptions.startDate'] = new Date(y['courseOptions.startDate']); + }); + } + }); + newSearchQuery['$and'].push({ + $or: [{ 'courseOptions.startDate': { $gte: new Date(Date.now()) } }, { 'courseOptions.flexibleDates': true }], + }); + } + + var queryObject; + if (type === 'course') { + queryObject = [ + { $unwind: '$courseOptions' }, + { $match: newSearchQuery }, + { + $project: { + _id: 0, + id: 1, + title: 1, + provider: 1, + type: 1, + description: 1, + 'courseOptions.flexibleDates': 1, + 'courseOptions.startDate': 1, + 'courseOptions.studyMode': 1, + domains: 1, + award: 1, + }, + }, + ]; + } else { + queryObject = [ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { + $project: { + _id: 0, + id: 1, + name: 1, + type: 1, + description: 1, + bio: { + $cond: { + if: { $eq: [false, '$showBio'] }, + then: '$$REMOVE', + else: '$bio', + }, + }, + 'categories.category': 1, + 'categories.programmingLanguage': 1, + 'programmingLanguage.programmingLanguage': 1, + 'programmingLanguage.version': 1, + license: 1, + 'tags.features': 1, + 'tags.topics': 1, + firstname: 1, + lastname: 1, + datasetid: 1, + pid: 1, + 'datasetfields.publisher': 1, + 'datasetfields.geographicCoverage': 1, + 'datasetfields.physicalSampleAvailability': 1, + 'datasetfields.abstract': 1, + 'datasetfields.ageBand': 1, + 'datasetfields.phenotypes': 1, + datasetv2: 1, + + 'persons.id': 1, + 'persons.firstname': 1, + 'persons.lastname': 1, + + activeflag: 1, + counter: 1, + 'datasetfields.metadataquality.quality_score': 1, + }, + }, + ]; + } + + if (sort === '' || sort === 'relevance') { + if (type === 'person') { + if (searchAll) queryObject.push({ $sort: { lastname: 1 } }); + else queryObject.push({ $sort: { score: { $meta: 'textScore' } } }); + } else { + if (searchAll) queryObject.push({ $sort: { name: 1 } }); + else queryObject.push({ $sort: { score: { $meta: 'textScore' } } }); + } + } else if (sort === 'popularity') { + if (type === 'person') { + if (searchAll) queryObject.push({ $sort: { counter: -1, lastname: 1 } }); + else queryObject.push({ $sort: { counter: -1, score: { $meta: 'textScore' } } }); + } else { + if (searchAll) queryObject.push({ $sort: { counter: -1, name: 1 } }); + else queryObject.push({ $sort: { counter: -1, score: { $meta: 'textScore' } } }); + } + } else if (sort === 'metadata') { + if (searchAll) queryObject.push({ $sort: { 'datasetfields.metadataquality.quality_score': -1, name: 1 } }); + else queryObject.push({ $sort: { 'datasetfields.metadataquality.quality_score': -1, score: { $meta: 'textScore' } } }); + } else if (sort === 'startdate') { + if (searchAll) queryObject.push({ $sort: { 'courseOptions.startDate': 1 } }); + else queryObject.push({ $sort: { 'courseOptions.startDate': 1, score: { $meta: 'textScore' } } }); + } + + var q = collection.aggregate(queryObject).skip(parseInt(startIndex)).limit(parseInt(maxResults)); + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); } export function getObjectCount(type, searchAll, searchQuery) { - let collection = Data; - if (type === 'course') collection = Course; - var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - newSearchQuery["$and"].push({ type: type }) - if (type === 'course') { - newSearchQuery["$and"].forEach((x) => { - if (x.$or) { - x.$or.forEach((y) => { - if (y['courseOptions.startDate']) y['courseOptions.startDate'] = new Date (y['courseOptions.startDate']) - }) - } - }) - newSearchQuery["$and"].push({$or:[{"courseOptions.startDate": { $gte: new Date(Date.now())}}, { 'courseOptions.flexibleDates':true}]}); - } - - var q = ''; - if (type === 'course') { - if (searchAll) { - q = collection.aggregate([ - { $unwind: '$courseOptions' }, - { $match: newSearchQuery }, - { - "$group": { - "_id": {}, - "count": { - "$sum": 1 - } - } - }, - { - "$project": { - "count": "$count", - "_id": 0 - } - } - ]); - } - else { - q = collection.aggregate([ - { $unwind: '$courseOptions' }, - { $match: newSearchQuery }, - { - "$group": { - "_id": {}, - "count": { - "$sum": 1 - } - } - }, - { - "$project": { - "count": "$count", - "_id": 0 - } - } - ]).sort({ score: { $meta: "textScore" } }); - } - } - else { - if (searchAll) { - q = collection.aggregate([ - { $match: newSearchQuery }, - { - "$group": { - "_id": {}, - "count": { - "$sum": 1 - } - } - }, - { - "$project": { - "count": "$count", - "_id": 0 - } - } - ]); - } - else { - q = collection.aggregate([ - { $match: newSearchQuery }, - { - "$group": { - "_id": {}, - "count": { - "$sum": 1 - } - } - }, - { - "$project": { - "count": "$count", - "_id": 0 - } - } - ]).sort({ score: { $meta: "textScore" } }); - } - } - - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === "undefined") resolve([]); - else resolve(data); - }) - }) + let collection = Data; + if (type === 'course') collection = Course; + var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + newSearchQuery['$and'].push({ type: type }); + if (type === 'course') { + newSearchQuery['$and'].forEach(x => { + if (x.$or) { + x.$or.forEach(y => { + if (y['courseOptions.startDate']) y['courseOptions.startDate'] = new Date(y['courseOptions.startDate']); + }); + } + }); + newSearchQuery['$and'].push({ + $or: [{ 'courseOptions.startDate': { $gte: new Date(Date.now()) } }, { 'courseOptions.flexibleDates': true }], + }); + } + + var q = ''; + if (type === 'course') { + if (searchAll) { + q = collection.aggregate([ + { $unwind: '$courseOptions' }, + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]); + } else { + q = collection + .aggregate([ + { $unwind: '$courseOptions' }, + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]) + .sort({ score: { $meta: 'textScore' } }); + } + } else { + if (searchAll) { + q = collection.aggregate([ + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]); + } else { + q = collection + .aggregate([ + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]) + .sort({ score: { $meta: 'textScore' } }); + } + } + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); } export function getObjectFilters(searchQueryStart, req, type) { - var searchQuery = JSON.parse(JSON.stringify(searchQueryStart)); - - let { - license = '', sampleavailability = '', keywords = '', publisher = '', ageband = '', geographiccover = '', phenotypes = '', - programmingLanguage = '', toolcategories = '', features = '', tooltopics = '', - projectcategories = '', projectfeatures = '', projecttopics = '', - paperfeatures = '', papertopics = '', - coursestartdates = '', coursedomains = '', coursekeywords = '', courseprovider = '', courselocation = '', coursestudymode = '', courseaward = '', courseentrylevel = '', courseframework = '', coursepriority = '' - } = req.query; - - if (type === "dataset") { - if (license.length > 0) { - var filterTermArray = []; - license.split('::').forEach((filterTerm) => { - filterTermArray.push({ "license": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (sampleavailability.length > 0) { - var filterTermArray = []; - sampleavailability.split('::').forEach((filterTerm) => { - filterTermArray.push({ "datasetfields.physicalSampleAvailability": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (keywords.length > 0) { - var filterTermArray = []; - keywords.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.features": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (publisher.length > 0) { - var filterTermArray = []; - publisher.split('::').forEach((filterTerm) => { - filterTermArray.push({ "datasetfields.publisher": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (ageband.length > 0) { - var filterTermArray = []; - ageband.split('::').forEach((filterTerm) => { - filterTermArray.push({ "datasetfields.ageBand": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (geographiccover.length > 0) { - var filterTermArray = []; - geographiccover.split('::').forEach((filterTerm) => { - filterTermArray.push({ "datasetfields.geographicCoverage": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (phenotypes.length > 0) { - var filterTermArray = []; - phenotypes.split('::').forEach((filterTerm) => { - filterTermArray.push({ "datasetfields.phenotypes.name": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - } - - if (type === "tool") { - if (programmingLanguage.length > 0) { - var filterTermArray = []; - programmingLanguage.split('::').forEach((filterTerm) => { - filterTermArray.push({ "programmingLanguage.programmingLanguage": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (toolcategories.length > 0) { - var filterTermArray = []; - toolcategories.split('::').forEach((filterTerm) => { - filterTermArray.push({ "categories.category": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (features.length > 0) { - var filterTermArray = []; - features.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.features": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (tooltopics.length > 0) { - var filterTermArray = []; - tooltopics.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.topics": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - } - else if (type === "project") { - if (projectcategories.length > 0) { - var filterTermArray = []; - projectcategories.split('::').forEach((filterTerm) => { - filterTermArray.push({ "categories.category": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (projectfeatures.length > 0) { - var filterTermArray = []; - projectfeatures.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.features": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (projecttopics.length > 0) { - var filterTermArray = []; - projecttopics.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.topics": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - } - else if (type === "paper") { - if (paperfeatures.length > 0) { - var filterTermArray = []; - paperfeatures.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.features": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (papertopics.length > 0) { - var filterTermArray = []; - papertopics.split('::').forEach((filterTerm) => { - filterTermArray.push({ "tags.topics": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - } - else if (type === "course") { - if (coursestartdates.length > 0) { - var filterTermArray = []; - coursestartdates.split('::').forEach((filterTerm) => { - filterTermArray.push({ "courseOptions.startDate": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (courseprovider.length > 0) { - var filterTermArray = []; - courseprovider.split('::').forEach((filterTerm) => { - filterTermArray.push({ "provider": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (courselocation.length > 0) { - var filterTermArray = []; - courselocation.split('::').forEach((filterTerm) => { - filterTermArray.push({ "location": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (coursestudymode.length > 0) { - var filterTermArray = []; - coursestudymode.split('::').forEach((filterTerm) => { - filterTermArray.push({ "courseOptions.studyMode": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (courseaward.length > 0) { - var filterTermArray = []; - courseaward.split('::').forEach((filterTerm) => { - filterTermArray.push({ "award": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (courseentrylevel.length > 0) { - var filterTermArray = []; - courseentrylevel.split('::').forEach((filterTerm) => { - filterTermArray.push({ "entries.level": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (coursedomains.length > 0) { - var filterTermArray = []; - coursedomains.split('::').forEach((filterTerm) => { - filterTermArray.push({ "domains": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (coursekeywords.length > 0) { - var filterTermArray = []; - coursekeywords.split('::').forEach((filterTerm) => { - filterTermArray.push({ "keywords": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (courseframework.length > 0) { - var filterTermArray = []; - courseframework.split('::').forEach((filterTerm) => { - filterTermArray.push({ "competencyFramework": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - - if (coursepriority.length > 0) { - var filterTermArray = []; - coursepriority.split('::').forEach((filterTerm) => { - filterTermArray.push({ "nationalPriority": filterTerm }) - }); - searchQuery["$and"].push({ "$or": filterTermArray }); - } - } - return searchQuery; + var searchQuery = JSON.parse(JSON.stringify(searchQueryStart)); + + let { + license = '', + sampleavailability = '', + keywords = '', + publisher = '', + ageband = '', + geographiccover = '', + phenotypes = '', + programmingLanguage = '', + toolcategories = '', + features = '', + tooltopics = '', + projectcategories = '', + projectfeatures = '', + projecttopics = '', + paperfeatures = '', + papertopics = '', + coursestartdates = '', + coursedomains = '', + coursekeywords = '', + courseprovider = '', + courselocation = '', + coursestudymode = '', + courseaward = '', + courseentrylevel = '', + courseframework = '', + coursepriority = '', + } = req.query; + + if (type === 'dataset') { + if (license.length > 0) { + var filterTermArray = []; + license.split('::').forEach(filterTerm => { + filterTermArray.push({ license: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (sampleavailability.length > 0) { + var filterTermArray = []; + sampleavailability.split('::').forEach(filterTerm => { + filterTermArray.push({ 'datasetfields.physicalSampleAvailability': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (keywords.length > 0) { + var filterTermArray = []; + keywords.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.features': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (publisher.length > 0) { + var filterTermArray = []; + publisher.split('::').forEach(filterTerm => { + filterTermArray.push({ 'datasetfields.publisher': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (ageband.length > 0) { + var filterTermArray = []; + ageband.split('::').forEach(filterTerm => { + filterTermArray.push({ 'datasetfields.ageBand': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (geographiccover.length > 0) { + var filterTermArray = []; + geographiccover.split('::').forEach(filterTerm => { + filterTermArray.push({ 'datasetfields.geographicCoverage': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (phenotypes.length > 0) { + var filterTermArray = []; + phenotypes.split('::').forEach(filterTerm => { + filterTermArray.push({ 'datasetfields.phenotypes.name': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + } + + if (type === 'tool') { + if (programmingLanguage.length > 0) { + var filterTermArray = []; + programmingLanguage.split('::').forEach(filterTerm => { + filterTermArray.push({ 'programmingLanguage.programmingLanguage': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (toolcategories.length > 0) { + var filterTermArray = []; + toolcategories.split('::').forEach(filterTerm => { + filterTermArray.push({ 'categories.category': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (features.length > 0) { + var filterTermArray = []; + features.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.features': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (tooltopics.length > 0) { + var filterTermArray = []; + tooltopics.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.topics': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + } else if (type === 'project') { + if (projectcategories.length > 0) { + var filterTermArray = []; + projectcategories.split('::').forEach(filterTerm => { + filterTermArray.push({ 'categories.category': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (projectfeatures.length > 0) { + var filterTermArray = []; + projectfeatures.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.features': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (projecttopics.length > 0) { + var filterTermArray = []; + projecttopics.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.topics': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + } else if (type === 'paper') { + if (paperfeatures.length > 0) { + var filterTermArray = []; + paperfeatures.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.features': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (papertopics.length > 0) { + var filterTermArray = []; + papertopics.split('::').forEach(filterTerm => { + filterTermArray.push({ 'tags.topics': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + } else if (type === 'course') { + if (coursestartdates.length > 0) { + var filterTermArray = []; + coursestartdates.split('::').forEach(filterTerm => { + filterTermArray.push({ 'courseOptions.startDate': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (courseprovider.length > 0) { + var filterTermArray = []; + courseprovider.split('::').forEach(filterTerm => { + filterTermArray.push({ provider: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (courselocation.length > 0) { + var filterTermArray = []; + courselocation.split('::').forEach(filterTerm => { + filterTermArray.push({ location: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (coursestudymode.length > 0) { + var filterTermArray = []; + coursestudymode.split('::').forEach(filterTerm => { + filterTermArray.push({ 'courseOptions.studyMode': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (courseaward.length > 0) { + var filterTermArray = []; + courseaward.split('::').forEach(filterTerm => { + filterTermArray.push({ award: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (courseentrylevel.length > 0) { + var filterTermArray = []; + courseentrylevel.split('::').forEach(filterTerm => { + filterTermArray.push({ 'entries.level': filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (coursedomains.length > 0) { + var filterTermArray = []; + coursedomains.split('::').forEach(filterTerm => { + filterTermArray.push({ domains: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (coursekeywords.length > 0) { + var filterTermArray = []; + coursekeywords.split('::').forEach(filterTerm => { + filterTermArray.push({ keywords: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (courseframework.length > 0) { + var filterTermArray = []; + courseframework.split('::').forEach(filterTerm => { + filterTermArray.push({ competencyFramework: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + + if (coursepriority.length > 0) { + var filterTermArray = []; + coursepriority.split('::').forEach(filterTerm => { + filterTermArray.push({ nationalPriority: filterTerm }); + }); + searchQuery['$and'].push({ $or: filterTermArray }); + } + } + return searchQuery; } export const getFilter = async (searchString, type, field, isArray, activeFiltersQuery) => { - return new Promise(async (resolve, reject) => { - let collection = Data; - if (type === 'course') collection = Course; - var q = '', p = ''; - var combinedResults = [], activeCombinedResults = []; - - if (searchString) q = collection.aggregate(filterQueryGenerator(field, searchString, type, isArray, {})); - else q = collection.aggregate(filterQueryGenerator(field, '', type, isArray, {})); - - q.exec((err, data) => { - if (err) return resolve({}) - - if (data.length) { - data.forEach((dat) => { - if (dat.result && dat.result !== '') { - if (field === 'datasetfields.phenotypes') combinedResults.push(dat.result.name.trim()); - else if (field === 'courseOptions.startDate') combinedResults.push(moment(dat.result).format("DD MMM YYYY")); - else combinedResults.push(dat.result.trim()); - } - }) - } - - var newSearchQuery = JSON.parse(JSON.stringify(activeFiltersQuery)); - newSearchQuery["$and"].push({ type: type }) - - if (searchString) p = collection.aggregate(filterQueryGenerator(field, searchString, type, isArray, newSearchQuery)); - else p = collection.aggregate(filterQueryGenerator(field, '', type, isArray, newSearchQuery)); - - p.exec((activeErr, activeData) => { - if (activeData.length) { - activeData.forEach((dat) => { - if (dat.result && dat.result !== '') { - if (field === 'datasetfields.phenotypes') activeCombinedResults.push(dat.result.name.trim()); - else if (field === 'courseOptions.startDate') activeCombinedResults.push(moment(dat.result).format("DD MMM YYYY")); - else activeCombinedResults.push(dat.result.trim()); - } - }) - } - resolve([combinedResults, activeCombinedResults]); - }); - }); - }) -} + return new Promise(async (resolve, reject) => { + let collection = Data; + if (type === 'course') collection = Course; + var q = '', + p = ''; + var combinedResults = [], + activeCombinedResults = []; + + if (searchString) q = collection.aggregate(filterQueryGenerator(field, searchString, type, isArray, {})); + else q = collection.aggregate(filterQueryGenerator(field, '', type, isArray, {})); + + q.exec((err, data) => { + if (err) return resolve({}); + + if (data.length) { + data.forEach(dat => { + if (dat.result && dat.result !== '') { + if (field === 'datasetfields.phenotypes') combinedResults.push(dat.result.name.trim()); + else if (field === 'courseOptions.startDate') combinedResults.push(moment(dat.result).format('DD MMM YYYY')); + else combinedResults.push(dat.result.trim()); + } + }); + } + + var newSearchQuery = JSON.parse(JSON.stringify(activeFiltersQuery)); + newSearchQuery['$and'].push({ type: type }); + + if (searchString) p = collection.aggregate(filterQueryGenerator(field, searchString, type, isArray, newSearchQuery)); + else p = collection.aggregate(filterQueryGenerator(field, '', type, isArray, newSearchQuery)); + + p.exec((activeErr, activeData) => { + if (activeData.length) { + activeData.forEach(dat => { + if (dat.result && dat.result !== '') { + if (field === 'datasetfields.phenotypes') activeCombinedResults.push(dat.result.name.trim()); + else if (field === 'courseOptions.startDate') activeCombinedResults.push(moment(dat.result).format('DD MMM YYYY')); + else activeCombinedResults.push(dat.result.trim()); + } + }); + } + resolve([combinedResults, activeCombinedResults]); + }); + }); + }); +}; export function filterQueryGenerator(filter, searchString, type, isArray, activeFiltersQuery) { - var queryArray = [] - - if (type === "course") { - queryArray.push({ $unwind: '$courseOptions' }); - queryArray.push({ $match: {$or:[{"courseOptions.startDate": { $gte: new Date(Date.now())}}, { 'courseOptions.flexibleDates':true}]}}); - } - - if (!_.isEmpty(activeFiltersQuery)) { - queryArray.push({ $match: activeFiltersQuery}); - } - else { - if (searchString !=='') queryArray.push({ $match: { $and: [{ $text: { $search: searchString } }, { type: type }, { activeflag: 'active' }] } }); - else queryArray.push({ $match: { $and: [{ type: type }, { activeflag: 'active' }] } }); - } - - queryArray.push( - { - "$project" : { - "result" : "$"+filter, - "_id": 0 - } - } - ); - - if (isArray) { - queryArray.push({"$unwind": '$result'}); - queryArray.push({"$unwind": '$result'}); - } - - queryArray.push( - { - "$group" : { - "_id" : null, - "distinct" : { - "$addToSet" : "$$ROOT" - } - } - }, - { - "$unwind" : { - "path" : "$distinct", - "preserveNullAndEmptyArrays" : false - } - }, - { - "$replaceRoot" : { - "newRoot" : "$distinct" - } - }, - { - "$sort": { - "result": 1 - } - } - ); - - return queryArray; -} \ No newline at end of file + var queryArray = []; + + if (type === 'course') { + queryArray.push({ $unwind: '$courseOptions' }); + queryArray.push({ + $match: { $or: [{ 'courseOptions.startDate': { $gte: new Date(Date.now()) } }, { 'courseOptions.flexibleDates': true }] }, + }); + } + + if (!_.isEmpty(activeFiltersQuery)) { + queryArray.push({ $match: activeFiltersQuery }); + } else { + if (searchString !== '') + queryArray.push({ $match: { $and: [{ $text: { $search: searchString } }, { type: type }, { activeflag: 'active' }] } }); + else queryArray.push({ $match: { $and: [{ type: type }, { activeflag: 'active' }] } }); + } + + queryArray.push({ + $project: { + result: '$' + filter, + _id: 0, + }, + }); + + if (isArray) { + queryArray.push({ $unwind: '$result' }); + queryArray.push({ $unwind: '$result' }); + } + + queryArray.push( + { + $group: { + _id: null, + distinct: { + $addToSet: '$$ROOT', + }, + }, + }, + { + $unwind: { + path: '$distinct', + preserveNullAndEmptyArrays: false, + }, + }, + { + $replaceRoot: { + newRoot: '$distinct', + }, + }, + { + $sort: { + result: 1, + }, + } + ); + + return queryArray; +} diff --git a/src/resources/search/search.router.js b/src/resources/search/search.router.js index c3714af0..4037b10a 100644 --- a/src/resources/search/search.router.js +++ b/src/resources/search/search.router.js @@ -1,39 +1,39 @@ -import express from 'express' +import express from 'express'; import { RecordSearchData } from '../search/record.search.model'; import { getObjectResult, getObjectCount, getObjectFilters } from './search.repository'; const router = express.Router(); - + /** * {get} /api/search Search tools - * + * * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ router.get('/', async (req, res) => { - var authorID = parseInt(req.query.userID); - var searchString = req.query.search || ""; //If blank then return all - //If searchString is applied, format any hyphenated words to enclose them as a phrase - if(searchString.includes('-') && !searchString.includes('\"')) { - // Matches on any whole word containing a hyphen - const regex = /(?=\S*[-])([a-zA-Z'-]+)/g; - // Surround matching words in quotation marks - searchString = searchString.replace(regex, "\"$1\""); - } - var tab = req.query.tab || ""; - let searchQuery = { $and: [{ activeflag: 'active' }] }; + var authorID = parseInt(req.query.userID); + var searchString = req.query.search || ''; //If blank then return all + //If searchString is applied, format any hyphenated words to enclose them as a phrase + if (searchString.includes('-') && !searchString.includes('"')) { + // Matches on any whole word containing a hyphen + const regex = /(?=\S*[-])([a-zA-Z'-]+)/g; + // Surround matching words in quotation marks + searchString = searchString.replace(regex, '"$1"'); + } + var tab = req.query.tab || ''; + let searchQuery = { $and: [{ activeflag: 'active' }] }; - if(req.query.form){ - searchQuery = {$and:[{$or:[{$and:[{activeflag:'review'},{authors:authorID}]},{activeflag:'active'}]}]}; - } + if (req.query.form) { + searchQuery = { $and: [{ $or: [{ $and: [{ activeflag: 'review' }, { authors: authorID }] }, { activeflag: 'active' }] }] }; + } - var searchAll = false; + var searchAll = false; - if (searchString.length > 0) { - searchQuery["$and"].push({ $text: { $search: searchString } }); + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); - /* datasetSearchString = '"' + searchString.split(' ').join('""') + '"'; + /* datasetSearchString = '"' + searchString.split(' ').join('""') + '"'; //The following code is a workaround for the way search works TODO:work with MDC to improve API if (searchString.match(/"/)) { //user has added quotes so pass string through @@ -42,105 +42,174 @@ router.get('/', async (req, res) => { //no quotes so lets a proximiy search datasetSearchString = '"'+searchString+'"~25'; } */ - } - else { - searchAll = true; - } - - var allResults = [], datasetResults = [], toolResults = [], projectResults = [], paperResults = [], personResults = [], courseResults = []; + } else { + searchAll = true; + } + + var allResults = [], + datasetResults = [], + toolResults = [], + projectResults = [], + paperResults = [], + personResults = [], + courseResults = []; - if (tab === '') { - allResults = await Promise.all([ - getObjectResult('dataset', searchAll, getObjectFilters(searchQuery, req, 'dataset'), req.query.datasetIndex || 0, req.query.maxResults || 40, req.query.datasetSort || ''), - getObjectResult('tool', searchAll, getObjectFilters(searchQuery, req, 'tool'), req.query.toolIndex || 0, req.query.maxResults || 40, req.query.toolSort || ''), - getObjectResult('project', searchAll, getObjectFilters(searchQuery, req, 'project'), req.query.projectIndex || 0, req.query.maxResults || 40, req.query.projectSort || ''), - getObjectResult('paper', searchAll, getObjectFilters(searchQuery, req, 'paper'), req.query.paperIndex || 0, req.query.maxResults || 40, req.query.paperSort || ''), - getObjectResult('person', searchAll, searchQuery, req.query.personIndex || 0, req.query.maxResults || 40, req.query.personSort), - getObjectResult('course', searchAll, getObjectFilters(searchQuery, req, 'course'), req.query.courseIndex || 0, req.query.maxResults || 40, 'startdate') - ]); - } - else if (tab === 'Datasets') { - datasetResults = await Promise.all([ - getObjectResult('dataset', searchAll, getObjectFilters(searchQuery, req, 'dataset'), req.query.datasetIndex || 0, req.query.maxResults || 40, req.query.datasetSort || '') - ]); - } - else if (tab === 'Tools') { - toolResults = await Promise.all([ - getObjectResult('tool', searchAll, getObjectFilters(searchQuery, req, 'tool'), req.query.toolIndex || 0, req.query.maxResults || 40, req.query.toolSort || '') - ]); - } - else if (tab === 'Projects') { - projectResults = await Promise.all([ - getObjectResult('project', searchAll, getObjectFilters(searchQuery, req, 'project'), req.query.projectIndex || 0, req.query.maxResults || 40, req.query.projectSort || '') - ]); - } - else if (tab === 'Papers') { - paperResults = await Promise.all([ - getObjectResult('paper', searchAll, getObjectFilters(searchQuery, req, 'paper'), req.query.paperIndex || 0, req.query.maxResults || 40, req.query.paperSort || '') - ]); - } - else if (tab === 'People') { - personResults = await Promise.all([ - getObjectResult('person', searchAll, searchQuery, req.query.personIndex || 0, req.query.maxResults || 40, req.query.personSort || '') - ]); - } - else if (tab === 'Courses') { - courseResults = await Promise.all([ - getObjectResult('course', searchAll, getObjectFilters(searchQuery, req, 'course'), req.query.courseIndex || 0, req.query.maxResults || 40, 'startdate') - ]); - } + if (tab === '') { + allResults = await Promise.all([ + getObjectResult( + 'dataset', + searchAll, + getObjectFilters(searchQuery, req, 'dataset'), + req.query.datasetIndex || 0, + req.query.maxResults || 40, + req.query.datasetSort || '' + ), + getObjectResult( + 'tool', + searchAll, + getObjectFilters(searchQuery, req, 'tool'), + req.query.toolIndex || 0, + req.query.maxResults || 40, + req.query.toolSort || '' + ), + getObjectResult( + 'project', + searchAll, + getObjectFilters(searchQuery, req, 'project'), + req.query.projectIndex || 0, + req.query.maxResults || 40, + req.query.projectSort || '' + ), + getObjectResult( + 'paper', + searchAll, + getObjectFilters(searchQuery, req, 'paper'), + req.query.paperIndex || 0, + req.query.maxResults || 40, + req.query.paperSort || '' + ), + getObjectResult('person', searchAll, searchQuery, req.query.personIndex || 0, req.query.maxResults || 40, req.query.personSort), + getObjectResult( + 'course', + searchAll, + getObjectFilters(searchQuery, req, 'course'), + req.query.courseIndex || 0, + req.query.maxResults || 40, + 'startdate' + ), + ]); + } else if (tab === 'Datasets') { + datasetResults = await Promise.all([ + getObjectResult( + 'dataset', + searchAll, + getObjectFilters(searchQuery, req, 'dataset'), + req.query.datasetIndex || 0, + req.query.maxResults || 40, + req.query.datasetSort || '' + ), + ]); + } else if (tab === 'Tools') { + toolResults = await Promise.all([ + getObjectResult( + 'tool', + searchAll, + getObjectFilters(searchQuery, req, 'tool'), + req.query.toolIndex || 0, + req.query.maxResults || 40, + req.query.toolSort || '' + ), + ]); + } else if (tab === 'Projects') { + projectResults = await Promise.all([ + getObjectResult( + 'project', + searchAll, + getObjectFilters(searchQuery, req, 'project'), + req.query.projectIndex || 0, + req.query.maxResults || 40, + req.query.projectSort || '' + ), + ]); + } else if (tab === 'Papers') { + paperResults = await Promise.all([ + getObjectResult( + 'paper', + searchAll, + getObjectFilters(searchQuery, req, 'paper'), + req.query.paperIndex || 0, + req.query.maxResults || 40, + req.query.paperSort || '' + ), + ]); + } else if (tab === 'People') { + personResults = await Promise.all([ + getObjectResult('person', searchAll, searchQuery, req.query.personIndex || 0, req.query.maxResults || 40, req.query.personSort || ''), + ]); + } else if (tab === 'Courses') { + courseResults = await Promise.all([ + getObjectResult( + 'course', + searchAll, + getObjectFilters(searchQuery, req, 'course'), + req.query.courseIndex || 0, + req.query.maxResults || 40, + 'startdate' + ), + ]); + } - var summaryCounts = await Promise.all([ - getObjectCount('dataset', searchAll, getObjectFilters(searchQuery, req, 'dataset')), - getObjectCount('tool', searchAll, getObjectFilters(searchQuery, req, 'tool')), - getObjectCount('project', searchAll, getObjectFilters(searchQuery, req, 'project')), - getObjectCount('paper', searchAll, getObjectFilters(searchQuery, req, 'paper')), - getObjectCount('person', searchAll, searchQuery), - getObjectCount('course', searchAll, getObjectFilters(searchQuery, req, 'course')) - ]); + var summaryCounts = await Promise.all([ + getObjectCount('dataset', searchAll, getObjectFilters(searchQuery, req, 'dataset')), + getObjectCount('tool', searchAll, getObjectFilters(searchQuery, req, 'tool')), + getObjectCount('project', searchAll, getObjectFilters(searchQuery, req, 'project')), + getObjectCount('paper', searchAll, getObjectFilters(searchQuery, req, 'paper')), + getObjectCount('person', searchAll, searchQuery), + getObjectCount('course', searchAll, getObjectFilters(searchQuery, req, 'course')), + ]); - var summary = { - datasets: summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0, - tools: summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0, - projects: summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0, - papers: summaryCounts[3][0] !== undefined ? summaryCounts[3][0].count : 0, - persons: summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0, - courses: summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0 - } + var summary = { + datasets: summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0, + tools: summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0, + projects: summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0, + papers: summaryCounts[3][0] !== undefined ? summaryCounts[3][0].count : 0, + persons: summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0, + courses: summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0, + }; - let recordSearchData = new RecordSearchData(); - recordSearchData.searched = searchString; - recordSearchData.returned.dataset = summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0; - recordSearchData.returned.tool = summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0; - recordSearchData.returned.project = summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0; - recordSearchData.returned.paper = summaryCounts[3][0] !== undefined ? summaryCounts[3][0].count : 0; - recordSearchData.returned.person = summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0; - recordSearchData.returned.course = summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0; - recordSearchData.datesearched = Date.now(); - recordSearchData.save((err) => { }); + let recordSearchData = new RecordSearchData(); + recordSearchData.searched = searchString; + recordSearchData.returned.dataset = summaryCounts[0][0] !== undefined ? summaryCounts[0][0].count : 0; + recordSearchData.returned.tool = summaryCounts[1][0] !== undefined ? summaryCounts[1][0].count : 0; + recordSearchData.returned.project = summaryCounts[2][0] !== undefined ? summaryCounts[2][0].count : 0; + recordSearchData.returned.paper = summaryCounts[3][0] !== undefined ? summaryCounts[3][0].count : 0; + recordSearchData.returned.person = summaryCounts[4][0] !== undefined ? summaryCounts[4][0].count : 0; + recordSearchData.returned.course = summaryCounts[5][0] !== undefined ? summaryCounts[5][0].count : 0; + recordSearchData.datesearched = Date.now(); + recordSearchData.save(err => {}); - if (tab === '') { - return res.json({ - success: true, - datasetResults: allResults[0], - toolResults: allResults[1], - projectResults: allResults[2], - paperResults: allResults[3], - personResults: allResults[4], - courseResults: allResults[5], - summary: summary - }); - } - return res.json({ - success: true, - datasetResults: datasetResults[0], - toolResults: toolResults[0], - projectResults: projectResults[0], - paperResults: paperResults[0], - personResults: personResults[0], - courseResults: courseResults[0], - summary: summary - }); + if (tab === '') { + return res.json({ + success: true, + datasetResults: allResults[0], + toolResults: allResults[1], + projectResults: allResults[2], + paperResults: allResults[3], + personResults: allResults[4], + courseResults: allResults[5], + summary: summary, + }); + } + return res.json({ + success: true, + datasetResults: datasetResults[0], + toolResults: toolResults[0], + projectResults: projectResults[0], + paperResults: paperResults[0], + personResults: personResults[0], + courseResults: courseResults[0], + summary: summary, + }); }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/src/resources/stats/kpis.router.js b/src/resources/stats/kpis.router.js index a05347a1..02112233 100644 --- a/src/resources/stats/kpis.router.js +++ b/src/resources/stats/kpis.router.js @@ -1,396 +1,419 @@ import express from 'express'; -import { RecordSearchData } from '../search/record.search.model'; +import { RecordSearchData } from '../search/record.search.model'; import { Data } from '../tool/data.model'; -import {DataRequestModel} from '../datarequests/datarequests.model'; - -const router = express.Router() - -router.get('', async (req, res) => { - - var selectedMonthStart = new Date(req.query.selectedDate); - selectedMonthStart.setMonth(selectedMonthStart.getMonth()); - selectedMonthStart.setDate(1); - selectedMonthStart.setHours(0,0,0,0); - - var selectedMonthEnd = new Date(req.query.selectedDate); - selectedMonthEnd.setMonth(selectedMonthEnd.getMonth()+1); - selectedMonthEnd.setDate(0); - selectedMonthEnd.setHours(23,59,59,999); - - switch (req.query.kpi) { - case 'technicalmetadata': - var totalDatasetsQuery = [ - { - $facet: { - TotalDataSets: [ - { - $match: { - $and: [ - { activeflag: "active" }, - { type: "dataset" }, - { "datasetfields.publisher": { $ne: "OTHER > HEALTH DATA RESEARCH UK" } }, - { "datasetfields.publisher": { $ne: "HDR UK" } }, - ], - }, - }, - { $count: "TotalDataSets" }, - ], - TotalMetaData: [ - { - $match: { - activeflag: "active", - type: "dataset", - "datasetfields.technicaldetails": { - $exists: true, - $not: { - $size: 0, - }, - }, - }, - }, - { - $count: "TotalMetaData", - }, - ], - }, - }, - ]; - - var q = Data.aggregate(totalDatasetsQuery); - - var result; - q.exec((err, dataSets) => { - if (err) return res.json({ success: false, error: err }); - - if (typeof dataSets[0].TotalDataSets[0] === "undefined") { - dataSets[0].TotalDataSets[0].TotalDataSets = 0; - } - if (typeof dataSets[0].TotalMetaData[0] === "undefined") { - dataSets[0].TotalMetaData[0].TotalMetaData = 0; - } - - result = res.json({ - success: true, - data: { - totalDatasets: dataSets[0].TotalDataSets[0].TotalDataSets, - datasetsMetadata: dataSets[0].TotalMetaData[0].TotalMetaData, - }, - }); - }); - - return result; - break; - - case 'searchanddar': - var result; - - var aggregateQuerySearches = [ - { - $facet: { - "totalMonth": [ - { "$match": { "datesearched": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} } }, - - { - $group: { - _id: 'totalMonth', - count: { $sum: 1 } - }, - } - ], - "noResultsMonth": [ - { "$match": { $and: [{"datesearched": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} }, {"returned.dataset": 0}, {"returned.tool": 0}, {"returned.project": 0}, {"returned.paper": 0}, {"returned.person": 0} ] } }, - { - $group: { - _id: 'noResultsMonth', - count: { $sum: 1 } - }, - } - ], - "accessRequestsMonth": [ - //used only createdAt first { "$match": { "createdAt": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} } }, - // some older fields only have timeStamp --> only timeStamp in the production db - //checking for both currently - { $match: { - $and: [ - { - $or: [ - { "createdAt": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} }, - { "timeStamp": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} } - ] - }, - { - $or: [ - {"applicationStatus":"submitted"}, - {"applicationStatus":"approved"}, - {"applicationStatus":"rejected"}, - {"applicationStatus": "inReview" }, - {"applicationStatus":"approved with conditions"} - ] - } - ] - } - } - ], - } - }]; - - var q = RecordSearchData.aggregate(aggregateQuerySearches); - - var y = DataRequestModel.aggregate(aggregateQuerySearches); - - - q.exec((err, dataSearches) => { - if (err) return res.json({ success: false, error: err }); - - if (typeof dataSearches[0].totalMonth[0] === "undefined") { - dataSearches[0].totalMonth[0] = { count: 0 }; - } - if (typeof dataSearches[0].noResultsMonth[0] === "undefined") { - dataSearches[0].noResultsMonth[0] = { count: 0 }; - } - - y.exec(async(err, accessRequests) => { - let hdrDatasetID = await getHdrDatasetId() - let hdrDatasetIds = []; - hdrDatasetID.map((hdrDatasetid) => {hdrDatasetIds.push(hdrDatasetid.datasetid)}) - let accessRequestsMonthCount = 0; - - if (err) return res.json({ success: false, error: err }); - - accessRequests[0].accessRequestsMonth.map((accessRequest) => { - if (accessRequest.dataSetId && accessRequest.dataSetId.length > 0 && !hdrDatasetIds.includes(accessRequest.dataSetId)) { - accessRequestsMonthCount++ - } - - if(accessRequest.datasetIds && accessRequest.datasetIds.length > 0){ - accessRequest.datasetIds.map((datasetid) => { - if (!hdrDatasetIds.includes(datasetid)) { - accessRequestsMonthCount++ - } - }) - } - }) - - result = res.json( - { - 'success': true, 'data': - { - 'totalMonth': dataSearches[0].totalMonth[0].count, - 'noResultsMonth': dataSearches[0].noResultsMonth[0].count, - 'accessRequestsMonth': accessRequestsMonthCount - } - } - ) - }); - }); - - return result; - break; - - case 'uptime': - const monitoring = require('@google-cloud/monitoring'); - const projectId = 'hdruk-gateway'; - const client = new monitoring.MetricServiceClient(); - - var result; - - const request = { - name: client.projectPath(projectId), - filter: 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', - - interval: { - startTime: { - seconds: selectedMonthStart.getTime() / 1000, - }, - endTime: { - seconds: selectedMonthEnd.getTime() / 1000, - }, - }, - aggregation: { - alignmentPeriod: { - seconds: '86400s', - }, - crossSeriesReducer: 'REDUCE_NONE', - groupByFields: [ - 'metric.label."checker_location"', - 'resource.label."instance_id"' - ], - perSeriesAligner: 'ALIGN_FRACTION_TRUE', - }, - - }; - - // Writes time series data - const [timeSeries] = await client.listTimeSeries(request); - var dailyUptime = []; - var averageUptime; - - timeSeries.forEach(data => { - - data.points.forEach(data => { - dailyUptime.push(data.value.doubleValue) - }) - - averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; - - result = res.json( - { - 'success': true, 'data': averageUptime - } - ) - }); - - return result; - break; - - case 'topdatasets': - let DarInfoMap = new Map() - - let hdrDatasetID = await getHdrDatasetId() - let hdrDatasetIds = []; - hdrDatasetID.map((hdrDatasetid) => {hdrDatasetIds.push(hdrDatasetid.datasetid)}) - - await getDarIds(req, selectedMonthStart, selectedMonthEnd) - .then(async (data) => { - for (let datasetIdObject in data) { - if(data[datasetIdObject].datasetIds && data[datasetIdObject].datasetIds.length > 0){ - - for (let datasetId in data[datasetIdObject].datasetIds) { - - if(!hdrDatasetIds.includes(data[datasetIdObject].datasetIds[datasetId])){ - let result = await getDarInfo(data[datasetIdObject].datasetIds[datasetId]) - - if(result.length > 0){ - if (DarInfoMap.has(data[datasetIdObject].datasetIds[datasetId])){ - let count = DarInfoMap.get(data[datasetIdObject].datasetIds[datasetId]) - count.requests++ - DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], {"requests": count.requests, "name": result[0].name, "publisher": result[0].datasetfields.publisher}); - } else { - DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], {"requests": 1, "name": result[0].name, "publisher": result[0].datasetfields.publisher}); - } - } - } - } - } - else - if(data[datasetIdObject].dataSetId && data[datasetIdObject].dataSetId.length > 0 && !hdrDatasetIds.includes(data[datasetIdObject].dataSetId) ){ - let result = await getDarInfo(data[datasetIdObject].dataSetId) - if(result.length > 0){ - if (DarInfoMap.has(data[datasetIdObject].dataSetId)){ - let count = DarInfoMap.get(data[datasetIdObject].dataSetId) - count.requests++ - DarInfoMap.set(data[datasetIdObject].dataSetId, {"requests": count.requests, "name": result[0].name, "publisher": result[0].datasetfields.publisher}); - } else { - DarInfoMap.set(data[datasetIdObject].dataSetId, {"requests": 1, "name": result[0].name, "publisher": result[0].datasetfields.publisher}); - } - } - } - } - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - - let sortedResults = Array.from(DarInfoMap).sort((a,b) => { return b[1].requests - a[1].requests}) - - sortedResults = sortedResults.slice(0, 5) - - return res.json({ success: true, data: sortedResults }); - - break; - } - }); - -module.exports = router - -export const getHdrDatasetId = async() => { - return new Promise(async (resolve, reject) => { - let hdrDatasetID = Data.find( - { - "datasetfields.publisher": { "$in": ["HDR UK", "OTHER > HEALTH DATA RESEARCH UK"]} - }, - { - _id: 0, - datasetid: 1, - } - ); - - hdrDatasetID.exec((err, data) => { - if (err) reject(err); - else resolve(data); - }); - }) -} - -const getDarIds = async(req, selectedMonthStart, selectedMonthEnd) => { - return new Promise(async (resolve, reject) => { - - let DarDatasetIds = DataRequestModel.find( - { - // VALUES YOU ARE CHECKING MATCH SPECIFIED CRITERIA IE. WHERE - $and: [ - { - $or: [ - { - createdAt: { - $gte: selectedMonthStart, - $lt: selectedMonthEnd - } - }, - { - timeStamp: { - $gte: selectedMonthStart, - $lt: selectedMonthEnd - } - } - ] - }, - { - $or: [ - { applicationStatus: "submitted" }, - { applicationStatus: "approved" }, - { applicationStatus: "rejected" }, - { applicationStatus: "inReview" }, - {applicationStatus: "approved with conditions" } - - ] - } - ] - }, - { - // THE FIELDS YOU WANT TO RETURN - _id: 0, - dataSetId: 1, - datasetIds: 1 - } - ); - - DarDatasetIds.exec((err, data) => { - if (err) reject(err); - return resolve(data); - }); - - }); -} - -const getDarInfo = async(id) => { -return new Promise(async (resolve, reject) => { - let DarDatasetInfo = Data.find( - { - datasetid: id - }, - { - _id: 0, - datasetid: 1, - name: 1, - //RETURN EMBEDDED FIELD - "datasetfields.publisher": 1 - } - ); - - DarDatasetInfo.exec((err, data) => { - if (err) reject(err); - else resolve(data); - }); - }) -} +import { DataRequestModel } from '../datarequests/datarequests.model'; + +const router = express.Router(); + +router.get('', async (req, res) => { + var selectedMonthStart = new Date(req.query.selectedDate); + selectedMonthStart.setMonth(selectedMonthStart.getMonth()); + selectedMonthStart.setDate(1); + selectedMonthStart.setHours(0, 0, 0, 0); + + var selectedMonthEnd = new Date(req.query.selectedDate); + selectedMonthEnd.setMonth(selectedMonthEnd.getMonth() + 1); + selectedMonthEnd.setDate(0); + selectedMonthEnd.setHours(23, 59, 59, 999); + + switch (req.query.kpi) { + case 'technicalmetadata': + var totalDatasetsQuery = [ + { + $facet: { + TotalDataSets: [ + { + $match: { + $and: [ + { activeflag: 'active' }, + { type: 'dataset' }, + { 'datasetfields.publisher': { $ne: 'OTHER > HEALTH DATA RESEARCH UK' } }, + { 'datasetfields.publisher': { $ne: 'HDR UK' } }, + ], + }, + }, + { $count: 'TotalDataSets' }, + ], + TotalMetaData: [ + { + $match: { + activeflag: 'active', + type: 'dataset', + 'datasetfields.technicaldetails': { + $exists: true, + $not: { + $size: 0, + }, + }, + }, + }, + { + $count: 'TotalMetaData', + }, + ], + }, + }, + ]; + + var q = Data.aggregate(totalDatasetsQuery); + + var result; + q.exec((err, dataSets) => { + if (err) return res.json({ success: false, error: err }); + + if (typeof dataSets[0].TotalDataSets[0] === 'undefined') { + dataSets[0].TotalDataSets[0].TotalDataSets = 0; + } + if (typeof dataSets[0].TotalMetaData[0] === 'undefined') { + dataSets[0].TotalMetaData[0].TotalMetaData = 0; + } + + result = res.json({ + success: true, + data: { + totalDatasets: dataSets[0].TotalDataSets[0].TotalDataSets, + datasetsMetadata: dataSets[0].TotalMetaData[0].TotalMetaData, + }, + }); + }); + + return result; + break; + + case 'searchanddar': + var result; + + var aggregateQuerySearches = [ + { + $facet: { + totalMonth: [ + { $match: { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } } }, + + { + $group: { + _id: 'totalMonth', + count: { $sum: 1 }, + }, + }, + ], + noResultsMonth: [ + { + $match: { + $and: [ + { datesearched: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, + { 'returned.dataset': 0 }, + { 'returned.tool': 0 }, + { 'returned.project': 0 }, + { 'returned.paper': 0 }, + { 'returned.person': 0 }, + ], + }, + }, + { + $group: { + _id: 'noResultsMonth', + count: { $sum: 1 }, + }, + }, + ], + accessRequestsMonth: [ + //used only createdAt first { "$match": { "createdAt": {"$gte": selectedMonthStart, "$lt": selectedMonthEnd} } }, + // some older fields only have timeStamp --> only timeStamp in the production db + //checking for both currently + { + $match: { + $and: [ + { + $or: [ + { createdAt: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, + { timeStamp: { $gte: selectedMonthStart, $lt: selectedMonthEnd } }, + ], + }, + { + $or: [ + { applicationStatus: 'submitted' }, + { applicationStatus: 'approved' }, + { applicationStatus: 'rejected' }, + { applicationStatus: 'inReview' }, + { applicationStatus: 'approved with conditions' }, + ], + }, + ], + }, + }, + ], + }, + }, + ]; + + var q = RecordSearchData.aggregate(aggregateQuerySearches); + + var y = DataRequestModel.aggregate(aggregateQuerySearches); + + q.exec((err, dataSearches) => { + if (err) return res.json({ success: false, error: err }); + + if (typeof dataSearches[0].totalMonth[0] === 'undefined') { + dataSearches[0].totalMonth[0] = { count: 0 }; + } + if (typeof dataSearches[0].noResultsMonth[0] === 'undefined') { + dataSearches[0].noResultsMonth[0] = { count: 0 }; + } + + y.exec(async (err, accessRequests) => { + let hdrDatasetID = await getHdrDatasetId(); + let hdrDatasetIds = []; + hdrDatasetID.map(hdrDatasetid => { + hdrDatasetIds.push(hdrDatasetid.datasetid); + }); + let accessRequestsMonthCount = 0; + + if (err) return res.json({ success: false, error: err }); + + accessRequests[0].accessRequestsMonth.map(accessRequest => { + if (accessRequest.dataSetId && accessRequest.dataSetId.length > 0 && !hdrDatasetIds.includes(accessRequest.dataSetId)) { + accessRequestsMonthCount++; + } + + if (accessRequest.datasetIds && accessRequest.datasetIds.length > 0) { + accessRequest.datasetIds.map(datasetid => { + if (!hdrDatasetIds.includes(datasetid)) { + accessRequestsMonthCount++; + } + }); + } + }); + + result = res.json({ + success: true, + data: { + totalMonth: dataSearches[0].totalMonth[0].count, + noResultsMonth: dataSearches[0].noResultsMonth[0].count, + accessRequestsMonth: accessRequestsMonthCount, + }, + }); + }); + }); + + return result; + break; + + case 'uptime': + const monitoring = require('@google-cloud/monitoring'); + const projectId = 'hdruk-gateway'; + const client = new monitoring.MetricServiceClient(); + + var result; + + const request = { + name: client.projectPath(projectId), + filter: + 'metric.type="monitoring.googleapis.com/uptime_check/check_passed" AND resource.type="uptime_url" AND metric.label."check_id"="check-production-web-app-qsxe8fXRrBo" AND metric.label."checker_location"="eur-belgium"', + + interval: { + startTime: { + seconds: selectedMonthStart.getTime() / 1000, + }, + endTime: { + seconds: selectedMonthEnd.getTime() / 1000, + }, + }, + aggregation: { + alignmentPeriod: { + seconds: '86400s', + }, + crossSeriesReducer: 'REDUCE_NONE', + groupByFields: ['metric.label."checker_location"', 'resource.label."instance_id"'], + perSeriesAligner: 'ALIGN_FRACTION_TRUE', + }, + }; + + // Writes time series data + const [timeSeries] = await client.listTimeSeries(request); + var dailyUptime = []; + var averageUptime; + + timeSeries.forEach(data => { + data.points.forEach(data => { + dailyUptime.push(data.value.doubleValue); + }); + + averageUptime = (dailyUptime.reduce((a, b) => a + b, 0) / dailyUptime.length) * 100; + + result = res.json({ + success: true, + data: averageUptime, + }); + }); + + return result; + break; + + case 'topdatasets': + let DarInfoMap = new Map(); + + let hdrDatasetID = await getHdrDatasetId(); + let hdrDatasetIds = []; + hdrDatasetID.map(hdrDatasetid => { + hdrDatasetIds.push(hdrDatasetid.datasetid); + }); + + await getDarIds(req, selectedMonthStart, selectedMonthEnd) + .then(async data => { + for (let datasetIdObject in data) { + if (data[datasetIdObject].datasetIds && data[datasetIdObject].datasetIds.length > 0) { + for (let datasetId in data[datasetIdObject].datasetIds) { + if (!hdrDatasetIds.includes(data[datasetIdObject].datasetIds[datasetId])) { + let result = await getDarInfo(data[datasetIdObject].datasetIds[datasetId]); + + if (result.length > 0) { + if (DarInfoMap.has(data[datasetIdObject].datasetIds[datasetId])) { + let count = DarInfoMap.get(data[datasetIdObject].datasetIds[datasetId]); + count.requests++; + DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], { + requests: count.requests, + name: result[0].name, + publisher: result[0].datasetfields.publisher, + }); + } else { + DarInfoMap.set(data[datasetIdObject].datasetIds[datasetId], { + requests: 1, + name: result[0].name, + publisher: result[0].datasetfields.publisher, + }); + } + } + } + } + } else if ( + data[datasetIdObject].dataSetId && + data[datasetIdObject].dataSetId.length > 0 && + !hdrDatasetIds.includes(data[datasetIdObject].dataSetId) + ) { + let result = await getDarInfo(data[datasetIdObject].dataSetId); + if (result.length > 0) { + if (DarInfoMap.has(data[datasetIdObject].dataSetId)) { + let count = DarInfoMap.get(data[datasetIdObject].dataSetId); + count.requests++; + DarInfoMap.set(data[datasetIdObject].dataSetId, { + requests: count.requests, + name: result[0].name, + publisher: result[0].datasetfields.publisher, + }); + } else { + DarInfoMap.set(data[datasetIdObject].dataSetId, { + requests: 1, + name: result[0].name, + publisher: result[0].datasetfields.publisher, + }); + } + } + } + } + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + + let sortedResults = Array.from(DarInfoMap).sort((a, b) => { + return b[1].requests - a[1].requests; + }); + + sortedResults = sortedResults.slice(0, 5); + + return res.json({ success: true, data: sortedResults }); + + break; + } +}); + +module.exports = router; + +export const getHdrDatasetId = async () => { + return new Promise(async (resolve, reject) => { + let hdrDatasetID = Data.find( + { + 'datasetfields.publisher': { $in: ['HDR UK', 'OTHER > HEALTH DATA RESEARCH UK'] }, + }, + { + _id: 0, + datasetid: 1, + } + ); + + hdrDatasetID.exec((err, data) => { + if (err) reject(err); + else resolve(data); + }); + }); +}; + +const getDarIds = async (req, selectedMonthStart, selectedMonthEnd) => { + return new Promise(async (resolve, reject) => { + let DarDatasetIds = DataRequestModel.find( + { + // VALUES YOU ARE CHECKING MATCH SPECIFIED CRITERIA IE. WHERE + $and: [ + { + $or: [ + { + createdAt: { + $gte: selectedMonthStart, + $lt: selectedMonthEnd, + }, + }, + { + timeStamp: { + $gte: selectedMonthStart, + $lt: selectedMonthEnd, + }, + }, + ], + }, + { + $or: [ + { applicationStatus: 'submitted' }, + { applicationStatus: 'approved' }, + { applicationStatus: 'rejected' }, + { applicationStatus: 'inReview' }, + { applicationStatus: 'approved with conditions' }, + ], + }, + ], + }, + { + // THE FIELDS YOU WANT TO RETURN + _id: 0, + dataSetId: 1, + datasetIds: 1, + } + ); + + DarDatasetIds.exec((err, data) => { + if (err) reject(err); + return resolve(data); + }); + }); +}; + +const getDarInfo = async id => { + return new Promise(async (resolve, reject) => { + let DarDatasetInfo = Data.find( + { + datasetid: id, + }, + { + _id: 0, + datasetid: 1, + name: 1, + //RETURN EMBEDDED FIELD + 'datasetfields.publisher': 1, + } + ); + + DarDatasetInfo.exec((err, data) => { + if (err) reject(err); + else resolve(data); + }); + }); +}; diff --git a/src/resources/stats/metrics.model.js b/src/resources/stats/metrics.model.js index 4762df2e..f7904cd3 100644 --- a/src/resources/stats/metrics.model.js +++ b/src/resources/stats/metrics.model.js @@ -1,13 +1,13 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; const MetricsSchema = new Schema( - { - uptime: Number - }, - { - collection: 'metrics', - timestamps: true - } + { + uptime: Number, + }, + { + collection: 'metrics', + timestamps: true, + } ); -export const MetricsData = model('MetricsModel', MetricsSchema); \ No newline at end of file +export const MetricsData = model('MetricsModel', MetricsSchema); diff --git a/src/resources/stats/stats.router.js b/src/resources/stats/stats.router.js index 142d64ba..c50548c5 100644 --- a/src/resources/stats/stats.router.js +++ b/src/resources/stats/stats.router.js @@ -1,432 +1,424 @@ import express from 'express'; import { RecordSearchData } from '../search/record.search.model'; import { Data } from '../tool/data.model'; -import {DataRequestModel} from '../datarequests/datarequests.model'; -import {getHdrDatasetId} from './kpis.router'; - -const router = express.Router() +import { DataRequestModel } from '../datarequests/datarequests.model'; +import { getHdrDatasetId } from './kpis.router'; + +const router = express.Router(); /** * {get} /stats get some basic high level stats - * + * * This will return a JSON document to show high level stats */ -router.get('', async (req, res) => { - - switch (req.query.rank) { - - case undefined: - - var result; - - //get some dates for query - var lastDay = new Date(); - lastDay.setDate(lastDay.getDate() - 1); - - var lastWeek = new Date(); - lastWeek.setDate(lastWeek.getDate() - 7); - - var lastMonth = new Date(); - lastMonth.setMonth(lastMonth.getMonth() - 1); - - var lastYear = new Date(); - lastYear.setYear(lastYear.getYear() - 1); - - var aggregateQuerySearches = [ - { - $facet: { - "lastDay": [ - { "$match": { "datesearched": { "$gt": lastDay } } }, - { - $group: { - _id: 'lastDay', - count: { $sum: 1 } - }, - } - ], - "lastWeek": [ - { "$match": { "datesearched": { "$gt": lastWeek } } }, - { - $group: { - _id: 'lastWeek', - count: { $sum: 1 } - }, - } - ], - "lastMonth": [ - { "$match": { "datesearched": { "$gt": lastMonth } } }, - { - $group: { - _id: 'lastMonth', - count: { $sum: 1 } - }, - } - ], - "lastYear": [ - { "$match": { "datesearched": { "$gt": lastYear } } }, - { - $group: { - _id: 'lastYear', - count: { $sum: 1 } - }, - } - ], - } - }]; - - //set the aggregate queries - var aggregateQueryTypes = [ +router.get('', async (req, res) => { + switch (req.query.rank) { + case undefined: + var result; + + //get some dates for query + var lastDay = new Date(); + lastDay.setDate(lastDay.getDate() - 1); + + var lastWeek = new Date(); + lastWeek.setDate(lastWeek.getDate() - 7); + + var lastMonth = new Date(); + lastMonth.setMonth(lastMonth.getMonth() - 1); + + var lastYear = new Date(); + lastYear.setYear(lastYear.getYear() - 1); + + var aggregateQuerySearches = [ + { + $facet: { + lastDay: [ + { $match: { datesearched: { $gt: lastDay } } }, + { + $group: { + _id: 'lastDay', + count: { $sum: 1 }, + }, + }, + ], + lastWeek: [ + { $match: { datesearched: { $gt: lastWeek } } }, + { + $group: { + _id: 'lastWeek', + count: { $sum: 1 }, + }, + }, + ], + lastMonth: [ + { $match: { datesearched: { $gt: lastMonth } } }, + { + $group: { + _id: 'lastMonth', + count: { $sum: 1 }, + }, + }, + ], + lastYear: [ + { $match: { datesearched: { $gt: lastYear } } }, + { + $group: { + _id: 'lastYear', + count: { $sum: 1 }, + }, + }, + ], + }, + }, + ]; + + //set the aggregate queries + var aggregateQueryTypes = [ { $match: { $and: [ - { activeflag: "active" }, - { "datasetfields.publisher": { $ne: "OTHER > HEALTH DATA RESEARCH UK" } }, - { "datasetfields.publisher": { $ne: "HDR UK" } }, + { activeflag: 'active' }, + { 'datasetfields.publisher': { $ne: 'OTHER > HEALTH DATA RESEARCH UK' } }, + { 'datasetfields.publisher': { $ne: 'HDR UK' } }, ], }, }, - { $group: { _id: "$type", count: { $sum: 1 } } }, - ]; - - var q = RecordSearchData.aggregate(aggregateQuerySearches); + { $group: { _id: '$type', count: { $sum: 1 } } }, + ]; - var aggregateAccessRequests = [ - { + var q = RecordSearchData.aggregate(aggregateQuerySearches); + + var aggregateAccessRequests = [ + { $match: { $or: [ - { applicationStatus: "submitted" }, - { applicationStatus: "approved" }, - { applicationStatus: "rejected" }, - { applicationStatus: "inReview" }, - {applicationStatus:"approved with conditions"} + { applicationStatus: 'submitted' }, + { applicationStatus: 'approved' }, + { applicationStatus: 'rejected' }, + { applicationStatus: 'inReview' }, + { applicationStatus: 'approved with conditions' }, ], }, - } + }, ]; - var y = DataRequestModel.aggregate(aggregateAccessRequests); - - q.exec((err, dataSearches) => { - if (err) return res.json({ success: false, error: err }); - - var x = Data.aggregate(aggregateQueryTypes); - x.exec((errx, dataTypes) => { - if (errx) return res.json({ success: false, error: errx }); - - var counts = {}; //hold the type (i.e. tool, person, project, access requests) counts data - for (var i = 0; i < dataTypes.length; i++) { //format the result in a clear and dynamic way - counts[dataTypes[i]._id] = dataTypes[i].count; - } - - y.exec(async(err, accessRequests) => { - let hdrDatasetID = await getHdrDatasetId() - let hdrDatasetIds = []; - hdrDatasetID.map((hdrDatasetid) => {hdrDatasetIds.push(hdrDatasetid.datasetid)}) - let accessRequestsCount = 0; - - if (err) return res.json({ success: false, error: err }); - - accessRequests.map((accessRequest) => { - if(accessRequest.datasetIds && accessRequest.datasetIds.length > 0){ - accessRequest.datasetIds.map((datasetid) => { - if (!hdrDatasetIds.includes(datasetid)) { - accessRequestsCount++ - } - }) - } - - counts["accessRequests"] = accessRequestsCount; - }) - - if (typeof dataSearches[0].lastDay[0] === "undefined") { - dataSearches[0].lastDay[0] = { count: 0 }; - } - if (typeof dataSearches[0].lastWeek[0] === "undefined") { - dataSearches[0].lastWeek[0] = { count: 0 }; - } - if (typeof dataSearches[0].lastMonth[0] === "undefined") { - dataSearches[0].lastMonth[0] = { count: 0 }; - } - if (typeof dataSearches[0].lastYear[0] === "undefined") { - dataSearches[0].lastYear[0] = { count: 0 }; - } - - result = res.json( - { - 'success': true, 'data': - { - 'typecounts': counts, - 'daycounts': { - 'day': dataSearches[0].lastDay[0].count, - 'week': dataSearches[0].lastWeek[0].count, - 'month': dataSearches[0].lastMonth[0].count, - 'year': dataSearches[0].lastYear[0].count, - - }, - } - } - ); - }); - }); - }); - - return result; - break; - - case 'recent': - var q = RecordSearchData.aggregate([ - { $match: { $or: [ { "returned.tool": { $gt : 0}}, { "returned.project": { $gt : 0}}, { "returned.person": { $gt : 0}} ] }}, - { - $group: { - _id: {$toLower: "$searched"}, - count: { $sum: 1 }, - returned: { $first: "$returned" } - } - }, - {$sort:{ datesearched : 1}} - ]).limit(10); - - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - break; - - case 'popular': - var q = Data.find({ counter: { $gt : 0} }).sort({ counter: -1 }).limit(10); - - if (req.query.type) { - q = Data.find({ $and:[ {type : req.query.type, counter: { $gt : 0} }]}).sort({ counter: -1 }).limit(10); - } - - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - break; - - - case 'updates': - var q = Data.find({activeflag: "active", counter: { $gt : 0} }).sort({ updatedon: -1 }).limit(10); - - if (req.query.type) { - q = Data.find({ $and:[ {type : req.query.type, activeflag: "active", updatedon: { $gt : 0} }]}).sort({ counter: -1 }).limit(10); - } - - q.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data }); - }); - break; - - case 'unmet': - switch (req.query.type) { - case 'Datasets': - req.entity = "dataset"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - - case 'Tools': - req.entity = "tool"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - - case 'Projects': - req.entity = "project"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - - case 'Courses': - req.entity = "course"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - - case 'Papers': - req.entity = "paper"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - - case 'People': - req.entity = "person"; - await getUnmetSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - break; - } - - } + var y = DataRequestModel.aggregate(aggregateAccessRequests); + + q.exec((err, dataSearches) => { + if (err) return res.json({ success: false, error: err }); + + var x = Data.aggregate(aggregateQueryTypes); + x.exec((errx, dataTypes) => { + if (errx) return res.json({ success: false, error: errx }); + + var counts = {}; //hold the type (i.e. tool, person, project, access requests) counts data + for (var i = 0; i < dataTypes.length; i++) { + //format the result in a clear and dynamic way + counts[dataTypes[i]._id] = dataTypes[i].count; + } + + y.exec(async (err, accessRequests) => { + let hdrDatasetID = await getHdrDatasetId(); + let hdrDatasetIds = []; + hdrDatasetID.map(hdrDatasetid => { + hdrDatasetIds.push(hdrDatasetid.datasetid); + }); + let accessRequestsCount = 0; + + if (err) return res.json({ success: false, error: err }); + + accessRequests.map(accessRequest => { + if (accessRequest.datasetIds && accessRequest.datasetIds.length > 0) { + accessRequest.datasetIds.map(datasetid => { + if (!hdrDatasetIds.includes(datasetid)) { + accessRequestsCount++; + } + }); + } + + counts['accessRequests'] = accessRequestsCount; + }); + + if (typeof dataSearches[0].lastDay[0] === 'undefined') { + dataSearches[0].lastDay[0] = { count: 0 }; + } + if (typeof dataSearches[0].lastWeek[0] === 'undefined') { + dataSearches[0].lastWeek[0] = { count: 0 }; + } + if (typeof dataSearches[0].lastMonth[0] === 'undefined') { + dataSearches[0].lastMonth[0] = { count: 0 }; + } + if (typeof dataSearches[0].lastYear[0] === 'undefined') { + dataSearches[0].lastYear[0] = { count: 0 }; + } + + result = res.json({ + success: true, + data: { + typecounts: counts, + daycounts: { + day: dataSearches[0].lastDay[0].count, + week: dataSearches[0].lastWeek[0].count, + month: dataSearches[0].lastMonth[0].count, + year: dataSearches[0].lastYear[0].count, + }, + }, + }); + }); + }); + }); + + return result; + break; + + case 'recent': + var q = RecordSearchData.aggregate([ + { $match: { $or: [{ 'returned.tool': { $gt: 0 } }, { 'returned.project': { $gt: 0 } }, { 'returned.person': { $gt: 0 } }] } }, + { + $group: { + _id: { $toLower: '$searched' }, + count: { $sum: 1 }, + returned: { $first: '$returned' }, + }, + }, + { $sort: { datesearched: 1 } }, + ]).limit(10); + + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + break; + + case 'popular': + var q = Data.find({ counter: { $gt: 0 } }) + .sort({ counter: -1 }) + .limit(10); + + if (req.query.type) { + q = Data.find({ $and: [{ type: req.query.type, counter: { $gt: 0 } }] }) + .sort({ counter: -1 }) + .limit(10); + } + + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + break; + + case 'updates': + var q = Data.find({ activeflag: 'active', counter: { $gt: 0 } }) + .sort({ updatedon: -1 }) + .limit(10); + + if (req.query.type) { + q = Data.find({ $and: [{ type: req.query.type, activeflag: 'active', updatedon: { $gt: 0 } }] }) + .sort({ counter: -1 }) + .limit(10); + } + + q.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data }); + }); + break; + + case 'unmet': + switch (req.query.type) { + case 'Datasets': + req.entity = 'dataset'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + + case 'Tools': + req.entity = 'tool'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + + case 'Projects': + req.entity = 'project'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + + case 'Courses': + req.entity = 'course'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + + case 'Papers': + req.entity = 'paper'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + + case 'People': + req.entity = 'person'; + await getUnmetSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); + break; + } + } +}); + +router.get('/topSearches', async (req, res) => { + await getTopSearches(req) + .then(data => { + return res.json({ success: true, data: data }); + }) + .catch(err => { + return res.json({ success: false, error: err }); + }); }); - router.get('/topSearches', async (req, res) => { - await getTopSearches(req) - .then((data) =>{ - return res.json({ success: true, data: data }); - }) - .catch((err) => { - return res.json({ success: false, error: err }); - }); - }); - - module.exports = router - - const getTopSearches = async(req, res) => { - return new Promise(async (resolve, reject) => { - let searchMonth = parseInt(req.query.month); - let searchYear = parseInt(req.query.year); - - let q = RecordSearchData.aggregate([ - - { $addFields: { "month": {$month: '$createdAt'}, - "year": {$year: '$createdAt'}}}, - {$match:{ - $and: [ - { month: searchMonth }, - { year: searchYear }, - { "searched": {$ne :""}} - ] - } - }, - { - $group: { - _id: { $toLower: "$searched"}, - count: { $sum: 1 }, - } - }, - {$sort:{ count : -1}} - ]).limit(10); - - q.exec(async (err, topSearches) => { - if (err) reject(err); - - let resolvedArray = await Promise.all(topSearches.map(async(topSearch) => { - let searchQuery = { $and: [{ activeflag: 'active' }] }; - searchQuery["$and"].push({ $text: { $search: topSearch._id } }); - - await Promise.all([ - - getObjectResult('dataset', searchQuery), - getObjectResult('tool', searchQuery), - getObjectResult('project', searchQuery), - getObjectResult('paper', searchQuery), - getObjectResult('course', searchQuery) - - ]).then((resources) => { - topSearch.datasets = resources[0][0] !== undefined && resources[0][0].count !== undefined ? resources[0][0].count : 0; - topSearch.tools = resources[1][0] !== undefined && resources[1][0].count !== undefined ? resources[1][0].count : 0; - topSearch.projects = resources[2][0] !== undefined && resources[2][0].count !== undefined ? resources[2][0].count : 0; - topSearch.papers = resources[3][0] !== undefined && resources[3][0].count !== undefined ? resources[3][0].count : 0; - topSearch.course = resources[4][0] !== undefined && resources[4][0].count !== undefined ? resources[4][0].count : 0; - }) - return topSearch; - })) - resolve(resolvedArray); - }); - }); - } - - function getObjectResult(type, searchQuery) { - var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - newSearchQuery["$and"].push({ type: type }) - var q = ''; - - q = Data.aggregate([ - { $match: newSearchQuery }, - { - "$group": { - "_id": {}, - "count": { - "$sum": 1 - } - } - }, - { - "$project": { - "count": "$count", - "_id": 0 - } - } - ]); - - return new Promise((resolve, reject) => { - q.exec((err, data) => { - if (typeof data === "undefined") resolve([]); - else resolve(data); - }) - }) +module.exports = router; + +const getTopSearches = async (req, res) => { + return new Promise(async (resolve, reject) => { + let searchMonth = parseInt(req.query.month); + let searchYear = parseInt(req.query.year); + + let q = RecordSearchData.aggregate([ + { $addFields: { month: { $month: '$createdAt' }, year: { $year: '$createdAt' } } }, + { + $match: { + $and: [{ month: searchMonth }, { year: searchYear }, { searched: { $ne: '' } }], + }, + }, + { + $group: { + _id: { $toLower: '$searched' }, + count: { $sum: 1 }, + }, + }, + { $sort: { count: -1 } }, + ]).limit(10); + + q.exec(async (err, topSearches) => { + if (err) reject(err); + + let resolvedArray = await Promise.all( + topSearches.map(async topSearch => { + let searchQuery = { $and: [{ activeflag: 'active' }] }; + searchQuery['$and'].push({ $text: { $search: topSearch._id } }); + + await Promise.all([ + getObjectResult('dataset', searchQuery), + getObjectResult('tool', searchQuery), + getObjectResult('project', searchQuery), + getObjectResult('paper', searchQuery), + getObjectResult('course', searchQuery), + ]).then(resources => { + topSearch.datasets = resources[0][0] !== undefined && resources[0][0].count !== undefined ? resources[0][0].count : 0; + topSearch.tools = resources[1][0] !== undefined && resources[1][0].count !== undefined ? resources[1][0].count : 0; + topSearch.projects = resources[2][0] !== undefined && resources[2][0].count !== undefined ? resources[2][0].count : 0; + topSearch.papers = resources[3][0] !== undefined && resources[3][0].count !== undefined ? resources[3][0].count : 0; + topSearch.course = resources[4][0] !== undefined && resources[4][0].count !== undefined ? resources[4][0].count : 0; + }); + return topSearch; + }) + ); + resolve(resolvedArray); + }); + }); +}; + +function getObjectResult(type, searchQuery) { + var newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + newSearchQuery['$and'].push({ type: type }); + var q = ''; + + q = Data.aggregate([ + { $match: newSearchQuery }, + { + $group: { + _id: {}, + count: { + $sum: 1, + }, + }, + }, + { + $project: { + count: '$count', + _id: 0, + }, + }, + ]); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); } - const getUnmetSearches = async(req, res) => { - return new Promise(async (resolve, reject) => { - - let searchMonth = parseInt(req.query.month); - let searchYear = parseInt(req.query.year); - let entitySearch = { ["returned." + req.entity] : {$lte : 0} }; - let q = RecordSearchData.aggregate([ - - { $addFields: { "month": {$month: '$createdAt'}, - "year": {$year: '$createdAt'}}}, - {$match:{ - $and: [ - { month: searchMonth }, - { year: searchYear }, - entitySearch, - { "searched": {$ne :""}} - ] - } - }, - { - $group: { - _id: { $toLower: "$searched"}, - count: { $sum: 1 }, - maxDatasets: { $max: "$returned.dataset" }, - maxProjects: { $max: "$returned.project" }, - maxTools: { $max: "$returned.tool" }, - maxPapers: { $max: "$returned.paper" }, - maxCourses: { $max: "$returned.course" }, - maxPeople: { $max: "$returned.people" }, - entity: { $max: req.entity} - } - }, - {$sort:{ count : -1}} - ]).limit(10); - - q.exec((err, data) => { - if (err) reject(err); - return resolve(data); - }); - }); -} \ No newline at end of file +const getUnmetSearches = async (req, res) => { + return new Promise(async (resolve, reject) => { + let searchMonth = parseInt(req.query.month); + let searchYear = parseInt(req.query.year); + let entitySearch = { ['returned.' + req.entity]: { $lte: 0 } }; + let q = RecordSearchData.aggregate([ + { $addFields: { month: { $month: '$createdAt' }, year: { $year: '$createdAt' } } }, + { + $match: { + $and: [{ month: searchMonth }, { year: searchYear }, entitySearch, { searched: { $ne: '' } }], + }, + }, + { + $group: { + _id: { $toLower: '$searched' }, + count: { $sum: 1 }, + maxDatasets: { $max: '$returned.dataset' }, + maxProjects: { $max: '$returned.project' }, + maxTools: { $max: '$returned.tool' }, + maxPapers: { $max: '$returned.paper' }, + maxCourses: { $max: '$returned.course' }, + maxPeople: { $max: '$returned.people' }, + entity: { $max: req.entity }, + }, + }, + { $sort: { count: -1 } }, + ]).limit(10); + + q.exec((err, data) => { + if (err) reject(err); + return resolve(data); + }); + }); +}; diff --git a/src/resources/team/team.controller.js b/src/resources/team/team.controller.js index 5f361c77..fbdb70a8 100644 --- a/src/resources/team/team.controller.js +++ b/src/resources/team/team.controller.js @@ -2,19 +2,8 @@ import _ from 'lodash'; import { TeamModel } from './team.model'; import { UserModel } from '../user/user.model'; import emailGenerator from '../utilities/emailGenerator.util'; - -const notificationBuilder = require('../utilities/notificationBuilder'); - -const hdrukEmail = `enquiry@healthdatagateway.org`; -const notificationTypes = { - MEMBERADDED: 'MemberAdded', - MEMBERREMOVED: 'MemberRemoved', - MEMBERROLECHANGED: 'MemberRoleChanged', -}; -const roleTypes = { - MANAGER: 'manager', - REVIEWER: 'reviewer', -}; +import notificationBuilder from '../utilities/notificationBuilder'; +import constants from '../utilities/constants.util'; // GET api/v1/teams/:id const getTeamById = async (req, res) => { @@ -165,7 +154,7 @@ const addTeamMembers = async (req, res) => { let newMemberIds = newMembers.map((mem) => mem.memberid); let newUsers = await UserModel.find({ _id: newMemberIds }); createNotifications( - notificationTypes.MEMBERADDED, + constants.notificationTypes.MEMBERADDED, { newUsers }, team, req.user @@ -276,7 +265,7 @@ const deleteTeamMember = async (req, res) => { (user) => user._id.toString() === memberid.toString() ); createNotifications( - notificationTypes.MEMBERREMOVED, + constants.notificationTypes.MEMBERREMOVED, { removedUser }, team, req.user @@ -317,7 +306,7 @@ const checkTeamPermissions = (role, team, userId) => { let { roles = [] } = userMember; if ( roles.includes(role) || - roles.includes(roleTypes.MANAGER) || + roles.includes(constants.roleTypes.MANAGER) || role === '' ) { return true; @@ -361,7 +350,7 @@ const createNotifications = async (type, context, team, user) => { let html = ''; switch (type) { - case notificationTypes.MEMBERREMOVED: + case constants.notificationTypes.MEMBERREMOVED: // 1. Get user removed const { removedUser } = context; // 2. Create user notifications @@ -378,13 +367,13 @@ const createNotifications = async (type, context, team, user) => { html = emailGenerator.generateRemovedFromTeam(options); emailGenerator.sendEmail( [removedUser], - hdrukEmail, + constants.hdrukEmail, `You have been removed from the team ${teamName}`, html, false ); break; - case notificationTypes.MEMBERADDED: + case constants.notificationTypes.MEMBERADDED: // 1. Get users added const { newUsers } = context; const newUserIds = newUsers.map((user) => user.id); @@ -398,12 +387,12 @@ const createNotifications = async (type, context, team, user) => { // 3. Create email for reviewers options = { teamName, - role: roleTypes.REVIEWER, + role: constants.roleTypes.REVIEWER, }; html = emailGenerator.generateAddedToTeam(options); emailGenerator.sendEmail( newUsers, - hdrukEmail, + constants.hdrukEmail, `You have been added as a reviewer to the team ${teamName} on the HDR UK Innovation Gateway`, html, false @@ -411,18 +400,18 @@ const createNotifications = async (type, context, team, user) => { // 4. Create email for managers options = { teamName, - role: roleTypes.MANAGER, + role: constants.roleTypes.MANAGER, }; html = emailGenerator.generateAddedToTeam(options); emailGenerator.sendEmail( newUsers, - hdrukEmail, + constants.hdrukEmail, `You have been added as a manager to the team ${teamName} on the HDR UK Innovation Gateway`, html, false ); break; - case notificationTypes.MEMBERROLECHANGED: + case constants.notificationTypes.MEMBERROLECHANGED: break; } }; @@ -435,6 +424,5 @@ export default { deleteTeamMember: deleteTeamMember, checkTeamPermissions: checkTeamPermissions, getTeamMembersByRole: getTeamMembersByRole, - createNotifications: createNotifications, - roleTypes: roleTypes, + createNotifications: createNotifications }; diff --git a/src/resources/team/team.model.js b/src/resources/team/team.model.js index c4b1bda9..e8571a0e 100644 --- a/src/resources/team/team.model.js +++ b/src/resources/team/team.model.js @@ -1,38 +1,43 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -const TeamSchema = new Schema({ - id: { - type: Number, - unique: true - }, - members: [{ - memberid: { type: Schema.Types.ObjectId, ref: 'User', required: true }, - roles: { type: [String], enum: ['reviewer','manager'], required: true }, - dateCreated: Date, - dateUpdated: Date - }], - type: String, - active: { - type: Boolean, - default: true - }, -}, { - toJSON: { virtuals: true }, - toObject: { virtuals: true }, - timestamps: true -}); +const TeamSchema = new Schema( + { + id: { + type: Number, + unique: true, + }, + members: [ + { + memberid: { type: Schema.Types.ObjectId, ref: 'User', required: true }, + roles: { type: [String], enum: ['reviewer', 'manager'], required: true }, + dateCreated: Date, + dateUpdated: Date, + }, + ], + type: String, + active: { + type: Boolean, + default: true, + }, + }, + { + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + timestamps: true, + } +); TeamSchema.virtual('publisher', { - ref: 'Publisher', - foreignField: '_id', - localField: '_id', - justOne: true + ref: 'Publisher', + foreignField: '_id', + localField: '_id', + justOne: true, }); TeamSchema.virtual('users', { - ref: 'User', - foreignField: '_id', - localField: 'members.memberid' + ref: 'User', + foreignField: '_id', + localField: 'members.memberid', }); -export const TeamModel = model('Team', TeamSchema) \ No newline at end of file +export const TeamModel = model('Team', TeamSchema); diff --git a/src/resources/team/team.route.js b/src/resources/team/team.route.js index 9a630492..1983654e 100644 --- a/src/resources/team/team.route.js +++ b/src/resources/team/team.route.js @@ -30,4 +30,4 @@ router.put('/:id/members/:memberid', passport.authenticate('jwt'), teamControlle // @access Private router.delete('/:id/members/:memberid', passport.authenticate('jwt'), teamController.deleteTeamMember); -module.exports = router +module.exports = router; diff --git a/src/resources/tool/counter.route.js b/src/resources/tool/counter.route.js index bd4a8859..1aabb7f9 100644 --- a/src/resources/tool/counter.route.js +++ b/src/resources/tool/counter.route.js @@ -1,9 +1,9 @@ -import express from "express"; -import { Data } from "./data.model"; +import express from 'express'; +import { Data } from './data.model'; const router = express.Router(); -router.post("/update", async (req, res) => { +router.post('/update', async (req, res) => { const { id, counter } = req.body; if (isNaN(id)) { diff --git a/src/resources/tool/data.model.js b/src/resources/tool/data.model.js index 3e0084bc..8e37e21e 100644 --- a/src/resources/tool/data.model.js +++ b/src/resources/tool/data.model.js @@ -5,117 +5,140 @@ import { TeamModel } from '../team/team.model'; // this will be our data base's data structure const DataSchema = new Schema( - { - id: Number, - type: String, - name: String, - description: String, - resultsInsights: String, - link: String, - categories: { - category: { type: String }, - //tools related fields - programmingLanguage: { type: [String] }, - programmingLanguageVersion: { type: String }, - }, - license: String, - authors: [Number], - tags: { - features: [String], - topics: [String], - }, - activeflag: String, - updatedon: Date, - counter: Number, - discourseTopicId: Number, - relatedObjects: [ - { - objectId: String, - reason: String, - pid: String, - objectType: String, - user: String, - updated: String, - }, - ], - uploader: Number, - //tools related fields - programmingLanguage: [ - { - programmingLanguage: String, - version: String, - }, - ], - //paper related fields - journal: String, - journalYear: Number, - isPreprint: Boolean, + { + id: Number, + type: String, + name: String, + description: String, + resultsInsights: String, + link: String, + categories: { + category: { type: String }, + //tools related fields + programmingLanguage: { type: [String] }, + programmingLanguageVersion: { type: String }, + }, + license: String, + authors: [Number], + tags: { + features: [String], + topics: [String], + }, + activeflag: String, + updatedon: Date, + counter: Number, + discourseTopicId: Number, + relatedObjects: [ + { + objectId: String, + reason: String, + pid: String, + objectType: String, + user: String, + updated: String, + }, + ], + uploader: Number, + //tools related fields + programmingLanguage: [ + { + programmingLanguage: String, + version: String, + }, + ], + //paper related fields + journal: String, + journalYear: Number, + isPreprint: Boolean, + document_links: { + doi: [String], + pdf: [String], + html: [String], + }, - //person related fields - firstname: String, - lastname: String, - bio: String, //institution - showBio: Boolean, - orcid: String, - showOrcid: Boolean, - emailNotifications: Boolean, - terms: Boolean, - sector: String, - showSector: Boolean, - organisation: String, - showOrganisation: {type: Boolean, default: false }, - showLink: Boolean, - showDomain: Boolean, - profileComplete: Boolean, + //person related fields + firstname: String, + lastname: String, + bio: String, //institution + showBio: Boolean, + orcid: String, + showOrcid: Boolean, + emailNotifications: Boolean, + terms: Boolean, + sector: String, + showSector: Boolean, + organisation: String, + showOrganisation: { type: Boolean, default: false }, + showLink: Boolean, + showDomain: Boolean, + profileComplete: Boolean, - //dataset related fields - datasetid: String, - pid: String, - datasetVersion: String, - datasetfields: { - publisher: String, - geographicCoverage: [String], - physicalSampleAvailability: [String], - abstract: String, - releaseDate: String, - accessRequestDuration: String, - conformsTo: String, - accessRights: String, - jurisdiction: String, - datasetStartDate: String, - datasetEndDate: String, - statisticalPopulation: String, - ageBand: String, - contactPoint: String, - periodicity: String, - populationSize: String, - metadataquality: {}, - datautility: {}, - metadataschema: {}, - technicaldetails: [], - versionLinks: [], - phenotypes: [] - }, - datasetv2: {}, + //dataset related fields + datasetid: String, + pid: String, + datasetVersion: String, + datasetfields: { + publisher: String, + geographicCoverage: [String], + physicalSampleAvailability: [String], + abstract: String, + releaseDate: String, + accessRequestDuration: String, + conformsTo: String, + accessRights: String, + jurisdiction: String, + datasetStartDate: String, + datasetEndDate: String, + statisticalPopulation: String, + ageBand: String, + contactPoint: String, + periodicity: String, + populationSize: String, + metadataquality: {}, + datautility: {}, + metadataschema: {}, + technicaldetails: [], + versionLinks: [], + phenotypes: [], + }, + datasetv2: {}, - //not used - rating: Number, - toolids: [Number], - datasetids: [String] - }, - { - collection: 'tools', - timestamps: true, - toJSON: { virtuals: true }, - toObject: { virtuals: true } - } + //not used + rating: Number, + toolids: [Number], + datasetids: [String], + }, + { + collection: 'tools', + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } ); DataSchema.virtual('publisher', { - ref: 'Publisher', - foreignField: 'name', - localField: 'datasetfields.publisher', - justOne: true + ref: 'Publisher', + foreignField: 'name', + localField: 'datasetfields.publisher', + justOne: true, }); -export const Data = model('Data', DataSchema) \ No newline at end of file +DataSchema.virtual('reviews', { + ref: 'Reviews', + foreignField: 'reviewerID', + localField: 'id', +}); + +DataSchema.virtual('tools', { + ref: 'Data', + foreignField: 'authors', + localField: 'id', +}); + +DataSchema.virtual('persons', { + ref: 'Data', + foreignField: 'id', + localField: 'authors', +}); + +export const Data = model('Data', DataSchema); diff --git a/src/resources/tool/data.repository.js b/src/resources/tool/data.repository.js index b110640d..cc3a205f 100644 --- a/src/resources/tool/data.repository.js +++ b/src/resources/tool/data.repository.js @@ -1,447 +1,620 @@ import { Data } from './data.model'; -import { MessagesModel } from '../message/message.model' -import { UserModel } from '../user/user.model' -import { createDiscourseTopic } from '../discourse/discourse.service' +import { MessagesModel } from '../message/message.model'; +import { UserModel } from '../user/user.model'; +import { createDiscourseTopic } from '../discourse/discourse.service'; import emailGenerator from '../utilities/emailGenerator.util'; import helper from '../utilities/helper.util'; const asyncModule = require('async'); +import { utils } from '../auth'; +import { ROLES } from '../user/user.roles'; const hdrukEmail = `enquiry@healthdatagateway.org`; const urlValidator = require('../utilities/urlValidator'); const inputSanitizer = require('../utilities/inputSanitizer'); export async function getObjectById(id) { - return await Data.findOne({ id }).exec() + return await Data.findOne({ id }).exec(); } const addTool = async (req, res) => { - return new Promise(async(resolve, reject) => { - let data = new Data(); - const toolCreator = req.body.toolCreator; - const { type, name, link, description, resultsInsights, categories, license, authors, tags, journal, journalYear, relatedObjects, programmingLanguage, isPreprint } = req.body; - data.id = parseInt(Math.random().toString().replace('0.', '')); - data.type = inputSanitizer.removeNonBreakingSpaces(type); - data.name = inputSanitizer.removeNonBreakingSpaces(name); - data.link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(link)); - data.journal = inputSanitizer.removeNonBreakingSpaces(journal); - data.journalYear = inputSanitizer.removeNonBreakingSpaces(journalYear); - data.description = inputSanitizer.removeNonBreakingSpaces(description); - data.resultsInsights = inputSanitizer.removeNonBreakingSpaces(resultsInsights); - console.log(req.body) - if (categories && typeof categories !== undefined) data.categories.category = inputSanitizer.removeNonBreakingSpaces(categories.category); - data.license = inputSanitizer.removeNonBreakingSpaces(license); - data.authors = authors; - data.tags.features = inputSanitizer.removeNonBreakingSpaces(tags.features), - data.tags.topics = inputSanitizer.removeNonBreakingSpaces(tags.topics); - data.activeflag = 'review'; - data.updatedon = Date.now(); - data.relatedObjects = relatedObjects; - - if(programmingLanguage){ - programmingLanguage.forEach((p) => - { - p.programmingLanguage = inputSanitizer.removeNonBreakingSpaces(p.programmingLanguage); - p.version = (inputSanitizer.removeNonBreakingSpaces(p.version)); - }); - } - data.programmingLanguage = programmingLanguage; - - data.isPreprint = isPreprint; - data.uploader = req.user.id; - let newDataObj = await data.save(); - if(!newDataObj) - reject(new Error(`Can't persist data object to DB.`)); - - let message = new MessagesModel(); - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = 0; - message.messageObjectID = data.id; - message.messageType = 'add'; - message.messageDescription = `Approval needed: new ${data.type} added ${name}` - message.messageSent = Date.now(); - message.isRead = false; - let newMessageObj = await message.save(); - if(!newMessageObj) - reject(new Error(`Can't persist message to DB.`)); - - // 1. Generate URL for linking tool from email - const toolLink = process.env.homeURL + '/' + data.type + '/' + data.id - - // 2. Query Db for all admins who have opted in to email updates - var q = UserModel.aggregate([ - // Find all users who are admins - { $match: { role: 'Admin' } }, - // Reduce response payload size to required fields - { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1 } } - ]); - - // 3. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - `A new ${data.type} has been added and is ready for review`, - `Approval needed: new ${data.type} ${data.name}

${toolLink}`, - false - ); - }); - - if (data.type === 'tool') { - await sendEmailNotificationToAuthors(data, toolCreator); - } - await storeNotificationsForAuthors(data, toolCreator); - - resolve(newDataObj); - }) + return new Promise(async (resolve, reject) => { + let data = new Data(); + const toolCreator = req.body.toolCreator; + const { + type, + name, + link, + description, + resultsInsights, + categories, + license, + authors, + tags, + journal, + journalYear, + relatedObjects, + programmingLanguage, + isPreprint, + document_links, + } = req.body; + data.id = parseInt(Math.random().toString().replace('0.', '')); + data.type = inputSanitizer.removeNonBreakingSpaces(type); + data.name = inputSanitizer.removeNonBreakingSpaces(name); + data.link = urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(link)); + data.journal = inputSanitizer.removeNonBreakingSpaces(journal); + data.journalYear = inputSanitizer.removeNonBreakingSpaces(journalYear); + data.description = inputSanitizer.removeNonBreakingSpaces(description); + data.resultsInsights = inputSanitizer.removeNonBreakingSpaces(resultsInsights); + console.log(req.body); + if (categories && typeof categories !== undefined) + data.categories.category = inputSanitizer.removeNonBreakingSpaces(categories.category); + data.license = inputSanitizer.removeNonBreakingSpaces(license); + data.authors = authors; + (data.tags.features = inputSanitizer.removeNonBreakingSpaces(tags.features)), + (data.tags.topics = inputSanitizer.removeNonBreakingSpaces(tags.topics)); + data.activeflag = 'review'; + data.updatedon = Date.now(); + data.relatedObjects = relatedObjects; + + if (programmingLanguage) { + programmingLanguage.forEach(p => { + p.programmingLanguage = inputSanitizer.removeNonBreakingSpaces(p.programmingLanguage); + p.version = inputSanitizer.removeNonBreakingSpaces(p.version); + }); + } + data.programmingLanguage = programmingLanguage; + + data.isPreprint = isPreprint; + data.uploader = req.user.id; + + data.document_links = validateDocumentLinks(document_links); + + let newDataObj = await data.save(); + if (!newDataObj) reject(new Error(`Can't persist data object to DB.`)); + + let message = new MessagesModel(); + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = 0; + message.messageObjectID = data.id; + message.messageType = 'add'; + message.messageDescription = `Approval needed: new ${data.type} added ${name}`; + message.messageSent = Date.now(); + message.isRead = false; + let newMessageObj = await message.save(); + if (!newMessageObj) reject(new Error(`Can't persist message to DB.`)); + + // 1. Generate URL for linking tool from email + const toolLink = process.env.homeURL + '/' + data.type + '/' + data.id; + + // 2. Query Db for all admins who have opted in to email updates + var q = UserModel.aggregate([ + // Find all users who are admins + { $match: { role: 'Admin' } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1 } }, + ]); + + // 3. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail( + emailRecipients, + `${hdrukEmail}`, + `A new ${data.type} has been added and is ready for review`, + `Approval needed: new ${data.type} ${data.name}

${toolLink}`, + false + ); + }); + + if (data.type === 'tool') { + await sendEmailNotificationToAuthors(data, toolCreator); + } + await storeNotificationsForAuthors(data, toolCreator); + + resolve(newDataObj); + }); }; - const editTool = async (req, res) => { - return new Promise(async(resolve, reject) => { - - const toolCreator = req.body.toolCreator; - let { type, name, link, description, resultsInsights, categories, license, authors, tags, journal, journalYear, relatedObjects, isPreprint } = req.body; - let id = req.params.id; - let programmingLanguage = req.body.programmingLanguage; - - if (!categories || typeof categories === undefined) categories = {'category':'', 'programmingLanguage':[], 'programmingLanguageVersion':''} - - if(programmingLanguage){ - programmingLanguage.forEach((p) => - { - p.programmingLanguage = inputSanitizer.removeNonBreakingSpaces(p.programmingLanguage); - p.version = (inputSanitizer.removeNonBreakingSpaces(p.version)); - }); - } - - let data = { - id: id, - name: name, - authors: authors, - }; - - Data.findOneAndUpdate({ id: id }, - { - type: inputSanitizer.removeNonBreakingSpaces(type), - name: inputSanitizer.removeNonBreakingSpaces(name), - link: urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(link)), - description: inputSanitizer.removeNonBreakingSpaces(description), - resultsInsights: inputSanitizer.removeNonBreakingSpaces(resultsInsights), - journal: inputSanitizer.removeNonBreakingSpaces(journal), - journalYear: inputSanitizer.removeNonBreakingSpaces(journalYear), - categories: { - category: inputSanitizer.removeNonBreakingSpaces(categories.category), - programmingLanguage: categories.programmingLanguage, - programmingLanguageVersion: categories.programmingLanguageVersion - }, - license: inputSanitizer.removeNonBreakingSpaces(license), - authors: authors, - programmingLanguage: programmingLanguage, - tags: { - features: inputSanitizer.removeNonBreakingSpaces(tags.features), - topics: inputSanitizer.removeNonBreakingSpaces(tags.topics) - }, - relatedObjects: relatedObjects, - isPreprint: isPreprint - }, (err) => { - if (err) { - reject(new Error(`Failed to update.`)); - } - }).then((tool) => { - if(tool == null){ - reject(new Error(`No record found with id of ${id}.`)); - } - else if (type === 'tool') { - // Send email notification of update to all authors who have opted in to updates - sendEmailNotificationToAuthors(data, toolCreator); - storeNotificationsForAuthors(data, toolCreator); - } - resolve(tool); - }); - }) - }; - - const deleteTool = async(req, res) => { - return new Promise(async(resolve, reject) => { - const { id } = req.params.id; - Data.findOneAndDelete({ id: req.params.id }, (err) => { - if (err) reject(err); - - - }).then((tool) => { - if(tool == null){ - reject(`No Content`); - } - else{ - resolve(id); - } - } - ) - })}; - - const getToolsAdmin = async (req, res) => { - return new Promise(async (resolve, reject) => { - - let startIndex = 0; - let limit = 1000; - let typeString = ""; - let searchString = ""; - - if (req.query.offset) { - startIndex = req.query.offset; - } - if (req.query.limit) { - limit = req.query.limit; - } - if (req.params.type) { - typeString = req.params.type; - } - if (req.query.q) { - searchString = req.query.q || "";; - } - - let searchQuery = { $and: [{ type: typeString }] }; - let searchAll = false; - - if (searchString.length > 0) { - searchQuery["$and"].push({ $text: { $search: searchString } }); - } - else { - searchAll = true; - } - await Promise.all([ - getObjectResult(typeString, searchAll, searchQuery, startIndex, limit), - ]).then((values) => { - resolve(values[0]); - }); - }); - } - - const getTools = async (req, res) => { - return new Promise(async (resolve, reject) => { - let startIndex = 0; - let limit = 1000; - let typeString = ""; - let idString = req.user.id; - - if (req.query.startIndex) { - startIndex = req.query.startIndex; - } - if (req.query.limit) { - limit = req.query.limit; - } - if (req.params.type) { - typeString = req.params.type; - } - if (req.query.id) { - idString = req.query.id; - } - - let query = Data.aggregate([ - { $match: { $and: [{ type: typeString }, { authors: parseInt(idString) }] } }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $sort: { updatedAt : -1}} - ])//.skip(parseInt(startIndex)).limit(parseInt(maxResults)); - query.exec((err, data) => { - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - if (err) reject({ success: false, error: err }); - resolve(data); - }); - }); - } - - const setStatus = async (req, res) => { - return new Promise(async (resolve, reject) => { - try { - const { activeflag, rejectionReason } = req.body; - const id = req.params.id; - - let tool = await Data.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag } }); - if (!tool) { - reject(new Error('Tool not found')); - } - - if (tool.authors) { - tool.authors.forEach(async (authorId) => { - await createMessage(authorId, id, tool.name, tool.type, activeflag, rejectionReason); - }); - } - await createMessage(0, id, tool.name, tool.type, activeflag, rejectionReason); - - if (!tool.discourseTopicId && tool.activeflag === 'active') { - await createDiscourseTopic(tool); - } - - // Send email notification of status update to admins and authors who have opted in - await sendEmailNotifications(tool, activeflag, rejectionReason); - - resolve(id); - - } catch (err) { - console.log(err); - reject(new Error(err)); - } - }); - }; - - async function createMessage(authorId, toolId, toolName, toolType, activeflag, rejectionReason) { - let message = new MessagesModel(); - const toolLink = process.env.homeURL + '/' + toolType + '/' + toolId; - - if (activeflag === 'active') { - message.messageType = 'approved'; - message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}` - } else if (activeflag === 'archive') { - message.messageType = 'archive'; - message.messageDescription = `Your ${toolType} ${toolName} has been archived ${toolLink}` - } else if (activeflag === 'rejected') { - message.messageType = 'rejected'; - message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}` - message.messageDescription = (rejectionReason) ? message.messageDescription.concat(` Rejection reason: ${rejectionReason}`) : message.messageDescription - } - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = authorId; - message.messageObjectID = toolId; - message.messageSent = Date.now(); - message.isRead = false; - await message.save(); - } - - async function sendEmailNotifications(tool, activeflag, rejectionReason) { - let subject; - let html; - // 1. Generate tool URL for linking user from email - const toolLink = process.env.homeURL + '/' + tool.type + '/' + tool.id - - // 2. Build email body - if (activeflag === 'active') { - subject = `Your ${tool.type} ${tool.name} has been approved and is now live` - html = `Your ${tool.type} ${tool.name} has been approved and is now live

${toolLink}` - } else if (activeflag === 'archive') { - subject = `Your ${tool.type} ${tool.name} has been archived` - html = `Your ${tool.type} ${tool.name} has been archived

${toolLink}` - } else if (activeflag === 'rejected') { - subject = `Your ${tool.type} ${tool.name} has been rejected` - html = `Your ${tool.type} ${tool.name} has been rejected

Rejection reason: ${rejectionReason}

${toolLink}` - } - - // 3. Find all authors of the tool who have opted in to email updates - var q = UserModel.aggregate([ - // Find all authors of this tool - { $match: { $or: [{ role: 'Admin' }, { id: { $in: tool.authors } }] } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 4. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - subject, - html - ); - }); - } + return new Promise(async (resolve, reject) => { + const toolCreator = req.body.toolCreator; + let { + type, + name, + link, + description, + resultsInsights, + categories, + license, + authors, + tags, + journal, + journalYear, + relatedObjects, + isPreprint, + document_links, + } = req.body; + let id = req.params.id; + let programmingLanguage = req.body.programmingLanguage; + + if (!categories || typeof categories === undefined) + categories = { category: '', programmingLanguage: [], programmingLanguageVersion: '' }; + + if (programmingLanguage) { + programmingLanguage.forEach(p => { + p.programmingLanguage = inputSanitizer.removeNonBreakingSpaces(p.programmingLanguage); + p.version = inputSanitizer.removeNonBreakingSpaces(p.version); + }); + } + + let documentLinksValidated = validateDocumentLinks(document_links); + + let data = { + id: id, + name: name, + authors: authors, + }; + + Data.findOneAndUpdate( + { id: id }, + { + type: inputSanitizer.removeNonBreakingSpaces(type), + name: inputSanitizer.removeNonBreakingSpaces(name), + link: urlValidator.validateURL(inputSanitizer.removeNonBreakingSpaces(link)), + description: inputSanitizer.removeNonBreakingSpaces(description), + resultsInsights: inputSanitizer.removeNonBreakingSpaces(resultsInsights), + journal: inputSanitizer.removeNonBreakingSpaces(journal), + journalYear: inputSanitizer.removeNonBreakingSpaces(journalYear), + categories: { + category: inputSanitizer.removeNonBreakingSpaces(categories.category), + programmingLanguage: categories.programmingLanguage, + programmingLanguageVersion: categories.programmingLanguageVersion, + }, + license: inputSanitizer.removeNonBreakingSpaces(license), + authors: authors, + programmingLanguage: programmingLanguage, + tags: { + features: inputSanitizer.removeNonBreakingSpaces(tags.features), + topics: inputSanitizer.removeNonBreakingSpaces(tags.topics), + }, + relatedObjects: relatedObjects, + isPreprint: isPreprint, + document_links: documentLinksValidated, + }, + err => { + if (err) { + reject(new Error(`Failed to update.`)); + } + } + ).then(tool => { + if (tool == null) { + reject(new Error(`No record found with id of ${id}.`)); + } else if (type === 'tool') { + // Send email notification of update to all authors who have opted in to updates + sendEmailNotificationToAuthors(data, toolCreator); + storeNotificationsForAuthors(data, toolCreator); + } + resolve(tool); + }); + }); +}; + +const deleteTool = async (req, res) => { + return new Promise(async (resolve, reject) => { + const { id } = req.params.id; + Data.findOneAndDelete({ id: req.params.id }, err => { + if (err) reject(err); + }).then(tool => { + if (tool == null) { + reject(`No Content`); + } else { + resolve(id); + } + }); + }); +}; + +const getAllTools = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 1000; + let typeString = ''; + let searchString = ''; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.params.type) { + typeString = req.params.type; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + + let searchQuery = { $and: [{ type: typeString }] }; + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + await Promise.all([getObjectResult(typeString, searchAll, searchQuery, startIndex, limit)]).then(values => { + resolve(values[0]); + }); + }); +}; + +const getToolsAdmin = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let typeString = ''; + let searchString = ''; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.params.type) { + typeString = req.params.type; + } + if (req.query.q) { + searchString = req.query.q || ''; + } + if (req.query.status) { + status = req.query.status; + } + + let searchQuery; + if (status === 'all') { + searchQuery = { $and: [{ type: typeString }] }; + } else { + searchQuery = { $and: [{ type: typeString }, { activeflag: status }] }; + } + + let searchAll = false; + + if (searchString.length > 0) { + searchQuery['$and'].push({ $text: { $search: searchString } }); + } else { + searchAll = true; + } + + await Promise.all([getObjectResult(typeString, searchAll, searchQuery, startIndex, limit), getCountsByStatus(typeString)]).then( + values => { + resolve(values); + } + ); + }); +}; + +const getTools = async (req, res) => { + return new Promise(async (resolve, reject) => { + let startIndex = 0; + let limit = 40; + let typeString = ''; + let idString = req.user.id; + let status = 'all'; + + if (req.query.offset) { + startIndex = req.query.offset; + } + if (req.query.limit) { + limit = req.query.limit; + } + if (req.params.type) { + typeString = req.params.type; + } + if (req.query.id) { + idString = req.query.id; + } + if (req.query.status) { + status = req.query.status; + } + + let searchQuery; + if (status === 'all') { + searchQuery = [{ type: typeString }, { authors: parseInt(idString) }]; + } else { + searchQuery = [{ type: typeString }, { authors: parseInt(idString) }, { activeflag: status }]; + } + + let query = Data.aggregate([ + { $match: { $and: searchQuery } }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $sort: { updatedAt: -1 } }, + ]) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + + await Promise.all([getUserTools(query), getCountsByStatusCreator(typeString, idString)]).then(values => { + resolve(values); + }); + + function getUserTools(query) { + return new Promise((resolve, reject) => { + query.exec((err, data) => { + data && + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); + } + }); +}; + +const setStatus = async (req, res) => { + return new Promise(async (resolve, reject) => { + try { + const { activeflag, rejectionReason } = req.body; + const id = req.params.id; + const userId = req.user.id; + let tool; + + if (utils.whatIsRole(req) === ROLES.Admin) { + tool = await Data.findOneAndUpdate({ id: id }, { $set: { activeflag: activeflag } }); + if (!tool) { + reject(new Error('Tool not found')); + } + } else if (activeflag === 'archive') { + tool = await Data.findOneAndUpdate({ $and: [{ id: id }, { authors: userId }] }, { $set: { activeflag: activeflag } }); + if (!tool) { + reject(new Error('Tool not found or user not authorised to change Tool status')); + } + } else { + reject(new Error('Not authorised to change the status of this Tool')); + } + + if (tool.authors) { + tool.authors.forEach(async authorId => { + await createMessage(authorId, id, tool.name, tool.type, activeflag, rejectionReason); + }); + } + await createMessage(0, id, tool.name, tool.type, activeflag, rejectionReason); + + if (!tool.discourseTopicId && tool.activeflag === 'active') { + await createDiscourseTopic(tool); + } + + // Send email notification of status update to admins and authors who have opted in + await sendEmailNotifications(tool, activeflag, rejectionReason); + + resolve(id); + } catch (err) { + console.log(err); + reject(new Error(err)); + } + }); +}; + +async function createMessage(authorId, toolId, toolName, toolType, activeflag, rejectionReason) { + let message = new MessagesModel(); + const toolLink = process.env.homeURL + '/' + toolType + '/' + toolId; + + if (activeflag === 'active') { + message.messageType = 'approved'; + message.messageDescription = `Your ${toolType} ${toolName} has been approved and is now live ${toolLink}`; + } else if (activeflag === 'archive') { + message.messageType = 'archive'; + message.messageDescription = `Your ${toolType} ${toolName} has been archived ${toolLink}`; + } else if (activeflag === 'rejected') { + message.messageType = 'rejected'; + message.messageDescription = `Your ${toolType} ${toolName} has been rejected ${toolLink}`; + message.messageDescription = rejectionReason + ? message.messageDescription.concat(` Rejection reason: ${rejectionReason}`) + : message.messageDescription; + } + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = authorId; + message.messageObjectID = toolId; + message.messageSent = Date.now(); + message.isRead = false; + await message.save(); +} + +async function sendEmailNotifications(tool, activeflag, rejectionReason) { + let subject; + let html; + // 1. Generate tool URL for linking user from email + const toolLink = process.env.homeURL + '/' + tool.type + '/' + tool.id; + + // 2. Build email body + if (activeflag === 'active') { + subject = `Your ${tool.type} ${tool.name} has been approved and is now live`; + html = `Your ${tool.type} ${tool.name} has been approved and is now live

${toolLink}`; + } else if (activeflag === 'archive') { + subject = `Your ${tool.type} ${tool.name} has been archived`; + html = `Your ${tool.type} ${tool.name} has been archived

${toolLink}`; + } else if (activeflag === 'rejected') { + subject = `Your ${tool.type} ${tool.name} has been rejected`; + html = `Your ${tool.type} ${tool.name} has been rejected

Rejection reason: ${rejectionReason}

${toolLink}`; + } + + // 3. Find all authors of the tool who have opted in to email updates + var q = UserModel.aggregate([ + // Find all authors of this tool + { $match: { $or: [{ role: 'Admin' }, { id: { $in: tool.authors } }] } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 4. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail(emailRecipients, `${hdrukEmail}`, subject, html); + }); +} async function sendEmailNotificationToAuthors(tool, toolOwner) { - // 1. Generate tool URL for linking user from email - const toolLink = process.env.homeURL + '/tool/' + tool.id - - // 2. Find all authors of the tool who have opted in to email updates - var q = UserModel.aggregate([ - // Find all authors of this tool - { $match: { id: { $in: tool.authors } } }, - // Perform lookup to check opt in/out flag in tools schema - { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, - // Filter out any user who has opted out of email notifications - { $match: { 'tool.emailNotifications': true } }, - // Reduce response payload size to required fields - { $project: {_id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } } - ]); - - // 3. Use the returned array of email recipients to generate and send emails with SendGrid - q.exec((err, emailRecipients) => { - if (err) { - return new Error({ success: false, error: err }); - } - emailGenerator.sendEmail( - emailRecipients, - `${hdrukEmail}`, - `${toolOwner.name} added you as an author of the tool ${tool.name}`, - `${toolOwner.name} added you as an author of the tool ${tool.name}

${toolLink}` - ); - }); - }; + // 1. Generate tool URL for linking user from email + const toolLink = process.env.homeURL + '/tool/' + tool.id; + + // 2. Find all authors of the tool who have opted in to email updates + var q = UserModel.aggregate([ + // Find all authors of this tool + { $match: { id: { $in: tool.authors } } }, + // Perform lookup to check opt in/out flag in tools schema + { $lookup: { from: 'tools', localField: 'id', foreignField: 'id', as: 'tool' } }, + // Filter out any user who has opted out of email notifications + { $match: { 'tool.emailNotifications': true } }, + // Reduce response payload size to required fields + { $project: { _id: 1, firstname: 1, lastname: 1, email: 1, role: 1, 'tool.emailNotifications': 1 } }, + ]); + + // 3. Use the returned array of email recipients to generate and send emails with SendGrid + q.exec((err, emailRecipients) => { + if (err) { + return new Error({ success: false, error: err }); + } + emailGenerator.sendEmail( + emailRecipients, + `${hdrukEmail}`, + `${toolOwner.name} added you as an author of the tool ${tool.name}`, + `${toolOwner.name} added you as an author of the tool ${tool.name}

${toolLink}` + ); + }); +} async function storeNotificationsForAuthors(tool, toolOwner) { - //store messages to alert a user has been added as an author - const toolLink = process.env.homeURL + '/tool/' + tool.id - - //normal user - var toolCopy = JSON.parse(JSON.stringify(tool)); - - toolCopy.authors.push(0); - asyncModule.eachSeries(toolCopy.authors, async (author) => { - - let message = new MessagesModel(); - message.messageType = 'author'; - message.messageSent = Date.now(); - message.messageDescription = `${toolOwner.name} added you as an author of the ${toolCopy.type} ${toolCopy.name}` - message.isRead = false; - message.messageObjectID = toolCopy.id; - message.messageID = parseInt(Math.random().toString().replace('0.', '')); - message.messageTo = author; - - await message.save(async (err) => { - if (err) { - return new Error({ success: false, error: err }); - } - return { success: true, id: message.messageID }; - }); - }); -}; + //store messages to alert a user has been added as an author + const toolLink = process.env.homeURL + '/tool/' + tool.id; + + //normal user + var toolCopy = JSON.parse(JSON.stringify(tool)); + + toolCopy.authors.push(0); + asyncModule.eachSeries(toolCopy.authors, async author => { + let message = new MessagesModel(); + message.messageType = 'author'; + message.messageSent = Date.now(); + message.messageDescription = `${toolOwner.name} added you as an author of the ${toolCopy.type} ${toolCopy.name}`; + message.isRead = false; + message.messageObjectID = toolCopy.id; + message.messageID = parseInt(Math.random().toString().replace('0.', '')); + message.messageTo = author; + + await message.save(async err => { + if (err) { + return new Error({ success: false, error: err }); + } + return { success: true, id: message.messageID }; + }); + }); +} function getObjectResult(type, searchAll, searchQuery, startIndex, limit) { - let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); - let q = ''; - - if (searchAll) { - q = Data.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "objects" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "toolID", as: "reviews" } } - ]).sort({ updatedAt : -1}).skip(parseInt(startIndex)).limit(parseInt(limit)); - } - else{ - q = Data.aggregate([ - { $match: newSearchQuery }, - { $lookup: { from: "tools", localField: "authors", foreignField: "id", as: "persons" } }, - { $lookup: { from: "tools", localField: "id", foreignField: "authors", as: "objects" } }, - { $lookup: { from: "reviews", localField: "id", foreignField: "toolID", as: "reviews" } } - ]).sort({ score: { $meta: "textScore" } }).skip(parseInt(startIndex)).limit(parseInt(limit)); - } - return new Promise((resolve, reject) => { - q.exec((err, data) => { - data.map(dat => { - dat.persons = helper.hidePrivateProfileDetails(dat.persons); - }); - if (typeof data === "undefined") resolve([]); - else resolve(data); - }) - }) -}; + let newSearchQuery = JSON.parse(JSON.stringify(searchQuery)); + let q = ''; + + if (searchAll) { + q = Data.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'authors', as: 'objects' } }, + { $lookup: { from: 'reviews', localField: 'id', foreignField: 'toolID', as: 'reviews' } }, + ]) + .sort({ updatedAt: -1 }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } else { + q = Data.aggregate([ + { $match: newSearchQuery }, + { $lookup: { from: 'tools', localField: 'authors', foreignField: 'id', as: 'persons' } }, + { $lookup: { from: 'tools', localField: 'id', foreignField: 'authors', as: 'objects' } }, + { $lookup: { from: 'reviews', localField: 'id', foreignField: 'toolID', as: 'reviews' } }, + ]) + .sort({ score: { $meta: 'textScore' } }) + .skip(parseInt(startIndex)) + .limit(parseInt(limit)); + } + return new Promise((resolve, reject) => { + q.exec((err, data) => { + data.map(dat => { + dat.persons = helper.hidePrivateProfileDetails(dat.persons); + }); + if (typeof data === 'undefined') resolve([]); + else resolve(data); + }); + }); +} + +function getCountsByStatus(type) { + let q = Data.find({ type: type }, { id: 1, name: 1, activeflag: 1 }); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length; + const reviewCount = data.filter(dat => dat.activeflag === 'review').length; + const rejectedCount = data.filter(dat => dat.activeflag === 'rejected').length; + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length; + + let countSummary = { activeCount: activeCount, reviewCount: reviewCount, rejectedCount: rejectedCount, archiveCount: archiveCount }; + + resolve(countSummary); + }); + }); +} + +function getCountsByStatusCreator(type, idString) { + let q = Data.find({ $and: [{ type: type }, { authors: parseInt(idString) }] }, { id: 1, name: 1, activeflag: 1 }); + + return new Promise((resolve, reject) => { + q.exec((err, data) => { + const activeCount = data.filter(dat => dat.activeflag === 'active').length; + const reviewCount = data.filter(dat => dat.activeflag === 'review').length; + const rejectedCount = data.filter(dat => dat.activeflag === 'rejected').length; + const archiveCount = data.filter(dat => dat.activeflag === 'archive').length; + + let countSummary = { activeCount: activeCount, reviewCount: reviewCount, rejectedCount: rejectedCount, archiveCount: archiveCount }; + + resolve(countSummary); + }); + }); +} + +function validateDocumentLinks(document_links) { + let documentLinksValidated = { doi: [], pdf: [], html: [] }; + if (document_links) { + document_links.doi.forEach(url => { + if (urlValidator.isDOILink(url)) { + documentLinksValidated.doi.push(urlValidator.validateURL(url)); + } else { + documentLinksValidated.html.push(urlValidator.validateURL(url)); + } + }); + document_links.pdf.forEach(url => { + if (urlValidator.isDOILink(url)) { + documentLinksValidated.doi.push(urlValidator.validateURL(url)); + } else { + documentLinksValidated.pdf.push(urlValidator.validateURL(url)); + } + }); + document_links.html.forEach(url => { + if (urlValidator.isDOILink(url)) { + documentLinksValidated.doi.push(urlValidator.validateURL(url)); + } else { + documentLinksValidated.html.push(urlValidator.validateURL(url)); + } + }); + } + return documentLinksValidated; +} -export { addTool, editTool, deleteTool, setStatus, getTools, getToolsAdmin } \ No newline at end of file +export { addTool, editTool, deleteTool, setStatus, getTools, getToolsAdmin, getAllTools }; diff --git a/src/resources/tool/review.model.js b/src/resources/tool/review.model.js index 9e92502c..9109a15b 100644 --- a/src/resources/tool/review.model.js +++ b/src/resources/tool/review.model.js @@ -1,23 +1,23 @@ import { model, Schema } from 'mongoose'; const ReviewsSchema = new Schema( - { - reviewID: Number, - toolID: Number, - reviewerID: Number, - rating: Number, - projectName: String, - review: String, - activeflag: String, - date: Date, - replierID: Number, - reply: String, - replydate: Date - }, - { - collection: 'reviews', - timestamps: true - } + { + reviewID: Number, + toolID: Number, + reviewerID: Number, + rating: Number, + projectName: String, + review: String, + activeflag: String, + date: Date, + replierID: Number, + reply: String, + replydate: Date, + }, + { + collection: 'reviews', + timestamps: true, + } ); -export const Reviews = model('Reviews', ReviewsSchema); \ No newline at end of file +export const Reviews = model('Reviews', ReviewsSchema); diff --git a/src/resources/tool/review.route.js b/src/resources/tool/review.route.js index bd8e0980..a2a90878 100644 --- a/src/resources/tool/review.route.js +++ b/src/resources/tool/review.route.js @@ -1,101 +1,90 @@ -import express from 'express' -import { ROLES } from '../user/user.roles' +import express from 'express'; +import { ROLES } from '../user/user.roles'; import { Reviews } from './review.model'; -import passport from "passport"; -import { utils } from "../auth"; +import passport from 'passport'; +import { utils } from '../auth'; import helper from '../utilities/helper.util'; const router = express.Router(); /** * {get} /accountsearch Search tools - * + * * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search */ -router.get( - '/admin/pending', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - - var r = Reviews.aggregate([ - { $lookup: { from: "tools", localField: "reviewerID", foreignField: "id", as: "person" } }, - { $lookup: { from: "tools", localField: "toolID", foreignField: "id", as: "tool" } } - ]); - r.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); - - data.map(dat => { - dat.person = helper.hidePrivateProfileDetails(dat.person); - }); - return res.json({ success: true, data: data }); - }); - }); - - /** - * {get} /accountsearch Search tools - * - * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. - * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search - */ - router.get( - '/pending', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Creator), - async (req, res) => { - - var idString = ""; - - if (req.query.id) { - idString = parseInt(req.query.id); - } - - var r = Reviews.aggregate([ - { $match: { $and: [{ activeflag: 'review' }, { reviewerID: idString }] } }, - { $lookup: { from: "tools", localField: "reviewerID", foreignField: "id", as: "person" } }, - { $lookup: { from: "tools", localField: "toolID", foreignField: "id", as: "tool" } } - ]); - r.exec((err, data) => { - var a = Reviews.aggregate([ - { $match: { $and: [{ activeflag: 'active' }, { reviewerID: idString }] } }, - { $lookup: { from: "tools", localField: "reviewerID", foreignField: "id", as: "person" } }, - { $lookup: { from: "tools", localField: "toolID", foreignField: "id", as: "tool" } } - ]); - a.exec((err, allReviews) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: data, allReviews: allReviews }); - }); - }); - }); - - /** - * {get} /accountsearch Search tools - * - * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. - * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search - */ - router.get('/', async (req, res) => { - - var reviewIDString = ""; - - if (req.query.id) { - reviewIDString = parseInt(req.query.id); - } - - var r = Reviews.aggregate([ - { $match: { $and: [{ activeflag: 'active' }, { reviewID: reviewIDString }] } }, - { $lookup: { from: "tools", localField: "reviewerID", foreignField: "id", as: "person" } }, - { $lookup: { from: "tools", localField: "toolID", foreignField: "id", as: "tool" } } - ]); - r.exec((err, data) => { - if (err) return res.json({ success: false, error: err }); +router.get('/admin/pending', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + var r = Reviews.aggregate([ + { $lookup: { from: 'tools', localField: 'reviewerID', foreignField: 'id', as: 'person' } }, + { $lookup: { from: 'tools', localField: 'toolID', foreignField: 'id', as: 'tool' } }, + ]); + r.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); - data.map(dat => { - dat.person = helper.hidePrivateProfileDetails(dat.person); - }); - return res.json({ success: true, data: data }); - }); - }); - - module.exports = router; \ No newline at end of file + data.map(dat => { + dat.person = helper.hidePrivateProfileDetails(dat.person); + }); + return res.json({ success: true, data: data }); + }); +}); + +/** + * {get} /accountsearch Search tools + * + * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. + * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search + */ +router.get('/pending', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Creator), async (req, res) => { + var idString = ''; + + if (req.query.id) { + idString = parseInt(req.query.id); + } + + var r = Reviews.aggregate([ + { $match: { $and: [{ activeflag: 'review' }, { reviewerID: idString }] } }, + { $lookup: { from: 'tools', localField: 'reviewerID', foreignField: 'id', as: 'person' } }, + { $lookup: { from: 'tools', localField: 'toolID', foreignField: 'id', as: 'tool' } }, + ]); + r.exec((err, data) => { + var a = Reviews.aggregate([ + { $match: { $and: [{ activeflag: 'active' }, { reviewerID: idString }] } }, + { $lookup: { from: 'tools', localField: 'reviewerID', foreignField: 'id', as: 'person' } }, + { $lookup: { from: 'tools', localField: 'toolID', foreignField: 'id', as: 'tool' } }, + ]); + a.exec((err, allReviews) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: data, allReviews: allReviews }); + }); + }); +}); + +/** + * {get} /accountsearch Search tools + * + * Return list of tools, this can be with filters or/and search criteria. This will also include pagination on results. + * The free word search criteria can be improved on with node modules that specialize with searching i.e. js-search + */ +router.get('/', async (req, res) => { + var reviewIDString = ''; + + if (req.query.id) { + reviewIDString = parseInt(req.query.id); + } + + var r = Reviews.aggregate([ + { $match: { $and: [{ activeflag: 'active' }, { reviewID: reviewIDString }] } }, + { $lookup: { from: 'tools', localField: 'reviewerID', foreignField: 'id', as: 'person' } }, + { $lookup: { from: 'tools', localField: 'toolID', foreignField: 'id', as: 'tool' } }, + ]); + r.exec((err, data) => { + if (err) return res.json({ success: false, error: err }); + + data.map(dat => { + dat.person = helper.hidePrivateProfileDetails(dat.person); + }); + return res.json({ success: true, data: data }); + }); +}); + +module.exports = router; diff --git a/src/resources/tool/tool.route.js b/src/resources/tool/tool.route.js index aa1d5dfd..c03c9ee0 100644 --- a/src/resources/tool/tool.route.js +++ b/src/resources/tool/tool.route.js @@ -6,13 +6,7 @@ import passport from 'passport'; import { utils } from '../auth'; import { UserModel } from '../user/user.model'; import { MessagesModel } from '../message/message.model'; -import { - addTool, - editTool, - setStatus, - getTools, - getToolsAdmin, -} from '../tool/data.repository'; +import { addTool, editTool, setStatus, getTools, getToolsAdmin, getAllTools } from '../tool/data.repository'; import emailGenerator from '../utilities/emailGenerator.util'; import inputSanitizer from '../utilities/inputSanitizer'; import _ from 'lodash'; @@ -24,82 +18,67 @@ const router = express.Router(); // @router POST /api/v1/add // @desc Add tools user // @access Private -router.post( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await addTool(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } -); +router.post('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await addTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); +}); // @router PUT /api/v1/{id} // @desc Edit tools user // @access Private // router.put('/{id}', -router.put( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - await editTool(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, error: err.message }); - }); - } -); +router.put('/:id', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + await editTool(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, error: err.message }); + }); +}); // @router GET /api/v1/get/admin -// @desc Returns List of Tool objects +// @desc Returns List of Tool objects // @access Private -router.get( - '/getList', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - req.params.type = 'tool'; - let role = req.user.role; +router.get('/getList', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + req.params.type = 'tool'; + let role = req.user.role; - if (role === ROLES.Admin) { - await getToolsAdmin(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } else if (role === ROLES.Creator) { - await getTools(req) - .then((data) => { - return res.json({ success: true, data }); - }) - .catch((err) => { - return res.json({ success: false, err }); - }); - } + if (role === ROLES.Admin) { + await getToolsAdmin(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); + } else if (role === ROLES.Creator) { + await getTools(req) + .then(data => { + return res.json({ success: true, data }); + }) + .catch(err => { + return res.json({ success: false, err }); + }); } -); +}); -// @router GET /api/v1/ +// @router GET /api/v1/ // @desc Returns List of Tool Objects No auth // This unauthenticated route was created specifically for API-docs // @access Public router.get('/', async (req, res) => { req.params.type = 'tool'; - await getToolsAdmin(req) - .then((data) => { + await getAllTools(req) + .then(data => { return res.json({ success: true, data }); }) - .catch((err) => { + .catch(err => { return res.json({ success: false, err }); }); }); @@ -107,20 +86,15 @@ router.get('/', async (req, res) => { // @router PATCH /api/v1/status // @desc Set tool status // @access Private -router.patch( - '/:id', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - await setStatus(req) - .then((response) => { - return res.json({ success: true, response }); - }) - .catch((err) => { - return res.json({ success: false, error: err.message }); - }); - } -); +router.patch('/:id', passport.authenticate('jwt'), async (req, res) => { + await setStatus(req) + .then(response => { + return res.json({ success: true, response }); + }) + .catch(err => { + return res.json({ success: false, error: err.message }); + }); +}); /** * {get} /tool/:id Tool @@ -146,6 +120,13 @@ router.get('/:id', async (req, res) => { as: 'uploaderIs', }, }, + { + $addFields: { + uploader: { + $concat: [{ $arrayElemAt: ['$uploaderIs.firstname', 0] }, ' ', { $arrayElemAt: ['$uploaderIs.lastname', 0] }], + }, + }, + }, ]); query.exec((err, data) => { if (data.length > 0) { @@ -153,15 +134,13 @@ router.get('/:id', async (req, res) => { var p = Data.aggregate([ { $match: { - $and: [ - { relatedObjects: { $elemMatch: { objectId: req.params.id } } }, - ], + $and: [{ relatedObjects: { $elemMatch: { objectId: req.params.id } } }], }, }, ]); p.exec((err, relatedData) => { - relatedData.forEach((dat) => { - dat.relatedObjects.forEach((x) => { + relatedData.forEach(dat => { + dat.relatedObjects.forEach(x => { if (x.objectId === req.params.id && dat.id !== req.params.id) { let relatedObject = { objectId: dat.id, @@ -170,10 +149,7 @@ router.get('/:id', async (req, res) => { user: x.user, updated: x.updated, }; - data[0].relatedObjects = [ - relatedObject, - ...(data[0].relatedObjects || []), - ]; + data[0].relatedObjects = [relatedObject, ...(data[0].relatedObjects || [])]; } }); }); @@ -181,10 +157,7 @@ router.get('/:id', async (req, res) => { var r = Reviews.aggregate([ { $match: { - $and: [ - { toolID: parseInt(req.params.id) }, - { activeflag: 'active' }, - ], + $and: [{ toolID: parseInt(req.params.id) }, { activeflag: 'active' }], }, }, { $sort: { date: -1 } }, @@ -207,10 +180,10 @@ router.get('/:id', async (req, res) => { ]); r.exec(async (err, reviewData) => { if (err) return res.json({ success: false, error: err }); - + reviewData.map(reviewDat => { reviewDat.person = helper.hidePrivateProfileDetails(reviewDat.person); - reviewDat.owner= helper.hidePrivateProfileDetails(reviewDat.owner); + reviewDat.owner = helper.hidePrivateProfileDetails(reviewDat.owner); }); return res.json({ @@ -262,32 +235,27 @@ router.get('/edit/:id', async (req, res) => { * When they submit, authenticate the user, validate the data and add review data to the DB. * We will also check the review (Free word entry) for exclusion data (node module?) */ -router.post( - '/review/add', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - let reviews = new Reviews(); - const { toolID, reviewerID, rating, projectName, review } = req.body; +router.post('/review/add', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + let reviews = new Reviews(); + const { toolID, reviewerID, rating, projectName, review } = req.body; - reviews.reviewID = parseInt(Math.random().toString().replace('0.', '')); - reviews.toolID = toolID; - reviews.reviewerID = reviewerID; - reviews.rating = rating; - reviews.projectName = inputSanitizer.removeNonBreakingSpaces(projectName); - reviews.review = inputSanitizer.removeNonBreakingSpaces(review); - reviews.activeflag = 'review'; - reviews.date = Date.now(); + reviews.reviewID = parseInt(Math.random().toString().replace('0.', '')); + reviews.toolID = toolID; + reviews.reviewerID = reviewerID; + reviews.rating = rating; + reviews.projectName = inputSanitizer.removeNonBreakingSpaces(projectName); + reviews.review = inputSanitizer.removeNonBreakingSpaces(review); + reviews.activeflag = 'review'; + reviews.date = Date.now(); - reviews.save(async (err) => { - if (err) { - return res.json({ success: false, error: err }); - } else { - return res.json({ success: true, id: reviews.reviewID }); - } - }); - } -); + reviews.save(async err => { + if (err) { + return res.json({ success: false, error: err }); + } else { + return res.json({ success: true, id: reviews.reviewID }); + } + }); +}); /** * {post} /tool/reply/add Add reply @@ -296,94 +264,74 @@ router.post( * When they submit, authenticate the user, validate the data and add reply data to the DB. * We will also check the review (Free word entry) for exclusion data (node module?) */ -router.post( - '/reply', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - const { reviewID, replierID, reply } = req.body; - Reviews.findOneAndUpdate( - { reviewID: reviewID }, - { - replierID: replierID, - reply: inputSanitizer.removeNonBreakingSpaces(reply), - replydate: Date.now(), - }, - (err) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true }); - } - ); - } -); +router.post('/reply', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + const { reviewID, replierID, reply } = req.body; + Reviews.findOneAndUpdate( + { reviewID: reviewID }, + { + replierID: replierID, + reply: inputSanitizer.removeNonBreakingSpaces(reply), + replydate: Date.now(), + }, + err => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true }); + } + ); +}); /** * {post} /tool/review/approve Approve review * * Authenticate user to see if user can approve. */ -router.put( - '/review/approve', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - const { id, activeflag } = req.body; - Reviews.findOneAndUpdate( - { reviewID: id }, - { - activeflag: activeflag, - }, - (err) => { - if (err) return res.json({ success: false, error: err }); +router.put('/review/approve', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + const { id, activeflag } = req.body; + Reviews.findOneAndUpdate( + { reviewID: id }, + { + activeflag: activeflag, + }, + err => { + if (err) return res.json({ success: false, error: err }); - return res.json({ success: true }); - } - ).then(async (res) => { - const review = await Reviews.findOne({ reviewID: id }); + return res.json({ success: true }); + } + ).then(async res => { + const review = await Reviews.findOne({ reviewID: id }); - await storeNotificationMessages(review); + await storeNotificationMessages(review); - // Send email notififcation of approval to authors and admins who have opted in - await sendEmailNotifications(review); - }); - } -); + // Send email notififcation of approval to authors and admins who have opted in + await sendEmailNotifications(review); + }); +}); /** * {delete} /tool/review/reject Reject review * * Authenticate user to see if user can reject. */ -router.delete( - '/review/reject', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin), - async (req, res) => { - const { id } = req.body; - Reviews.findOneAndDelete({ reviewID: id }, (err) => { - if (err) return res.send(err); - return res.json({ success: true }); - }); - } -); +router.delete('/review/reject', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { + const { id } = req.body; + Reviews.findOneAndDelete({ reviewID: id }, err => { + if (err) return res.send(err); + return res.json({ success: true }); + }); +}); /** * {delete} /tool/review/delete Delete review * * When they delete, authenticate the user and remove the review data from the DB. */ -router.delete( - '/review/delete', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - const { id } = req.body; - Data.findOneAndDelete({ id: id }, (err) => { - if (err) return res.send(err); - return res.json({ success: true }); - }); - } -); +router.delete('/review/delete', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + const { id } = req.body; + Data.findOneAndDelete({ id: id }, err => { + if (err) return res.send(err); + return res.json({ success: true }); + }); +}); //Validation required if Delete is to be implemented // router.delete('/:id', @@ -403,41 +351,31 @@ router.delete( // @router GET /api/v1/project/tag/name // @desc Get tools by tag search // @access Private -router.get( - '/:type/tag/:name', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - try { - // 1. Destructure tag name parameter passed - let { type, name } = req.params; - // 2. Check if parameters are empty - if (_.isEmpty(name) || _.isEmpty(type)) { - return res - .status(400) - .json({ - success: false, - message: 'Entity type and tag are required', - }); - } - // 3. Find matching projects in MongoDb selecting name and id - let entities = await Data.find({ - $and: [ - { type }, - { $or: [{ 'tags.topics': name }, { 'tags.features': name }] }, - ], - }).select('id name'); - // 4. Return projects - return res.status(200).json({ success: true, entities }); - } catch (err) { - console.error(err.message); - return res.status(500).json({ +router.get('/:type/tag/:name', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + try { + // 1. Destructure tag name parameter passed + let { type, name } = req.params; + // 2. Check if parameters are empty + if (_.isEmpty(name) || _.isEmpty(type)) { + return res.status(400).json({ success: false, - message: 'An error occurred searching for tools by tag', + message: 'Entity type and tag are required', }); } + // 3. Find matching projects in MongoDb selecting name and id + let entities = await Data.find({ + $and: [{ type }, { $or: [{ 'tags.topics': name }, { 'tags.features': name }] }], + }).select('id name'); + // 4. Return projects + return res.status(200).json({ success: true, entities }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred searching for tools by tag', + }); } -); +}); module.exports = router; @@ -445,8 +383,7 @@ async function storeNotificationMessages(review) { const tool = await Data.findOne({ id: review.toolID }); //Get reviewer name const reviewer = await UserModel.findOne({ id: review.reviewerID }); - const toolLink = - process.env.homeURL + '/tool/' + review.toolID + '/' + tool.name; + const toolLink = process.env.homeURL + '/tool/' + review.toolID + '/' + tool.name; //admins let message = new MessagesModel(); message.messageID = parseInt(Math.random().toString().replace('0.', '')); @@ -457,16 +394,16 @@ async function storeNotificationMessages(review) { message.isRead = false; message.messageDescription = `${reviewer.firstname} ${reviewer.lastname} gave a ${review.rating}-star review to your tool ${tool.name} ${toolLink}`; - await message.save(async (err) => { + await message.save(async err => { if (err) { return new Error({ success: false, error: err }); } }); //authors const authors = tool.authors; - authors.forEach(async (author) => { + authors.forEach(async author => { message.messageTo = author; - await message.save(async (err) => { + await message.save(async err => { if (err) { return new Error({ success: false, error: err }); } diff --git a/src/resources/topic/topic.controller.js b/src/resources/topic/topic.controller.js index 41ec3997..6cdf41a2 100644 --- a/src/resources/topic/topic.controller.js +++ b/src/resources/topic/topic.controller.js @@ -3,199 +3,203 @@ import { TopicModel } from './topic.model'; import { Data as ToolModel } from '../tool/data.model'; import _ from 'lodash'; module.exports = { - buildRecipients: async (team, createdBy) => { - // 1. Cause error if no members found - if(_.isNull(team)) { - console.error('A topic cannot be created without a receiving team'); - return []; - } - let { members } = team; - if(_.isNull(members || members.length === 0)) { - console.error('A topic cannot be created with only the creating user'); - return []; - } - let recipients = members.map(m => m.memberid); - // 2. Return team recipients plus the user that created the message - recipients = [...recipients, createdBy]; - return recipients; - }, - buildTopic: async (context) => { - try { - let title = ''; - let subTitle = ''; - let datasets = []; - let tags = []; - const { createdBy, relatedObjectIds } = context; - // 1. Topic cannot be created without related object i.e. data/project/tool/paper - if(_.isEmpty(relatedObjectIds)) { - console.error('No related object Id passed to build topic'); - return undefined; - } - // 2. Find the related object(s) in MongoDb and include team data - const tools = await ToolModel.find().where('_id').in(relatedObjectIds).populate({ path: 'publisher', populate: { path: 'team' }}); - // 3. Return undefined if no object exists - if(_.isEmpty(tools)) { - console.error(`Failed to find related tool(s) with objectId(s): ${relatedObjectIds.join(', ')}`); - return undefined; - } - // 4. Iterate through each tool - tools.forEach(tool => { - // 5. Switch based on related object type - switch(tool.type) { - // 6. If dataset, we require the publisher - case 'dataset': - let { name: datasetTitle, datasetid = '', datasetfields: { publisher } } = tool; - // set title of topic which is publisher - title = publisher; - subTitle = _.isEmpty(subTitle) ? datasetTitle : `${subTitle}, ${datasetTitle}` - datasets.push({ datasetId: datasetid, publisher}); - tags.push(datasetTitle); - break; - default: - console.log('default'); - } - }); - // 7. Get recipients for topic/message using the first tool (same team exists as each publisher is the same) - let { publisher = '' } = tools[0]; - if(_.isEmpty(publisher)) { - console.error(`No publisher associated to this dataset`); - return undefined; - } - let { team = [] } = publisher; - if(_.isEmpty(team)) { - console.error(`No team associated to publisher, cannot message`); - return undefined; - } - const recipients = await module.exports.buildRecipients(team, createdBy); - if(_.isEmpty(recipients)) { - console.error('A topic cannot be created without recipients'); - return undefined; - } - // Future extension could be to iterate through tools at this point to generate a topic for each publisher - // This also requires refactor of above code to break down dataset titles into individual messages - // 8. Create new topic against related objects with recipients - const topic = await TopicModel.create({ - title, - subTitle, - relatedObjectIds, - createdBy, - createdDate: Date.now(), - recipients, - datasets, - tags - }); - // 9. Return created object - return topic; - } catch (err) { - console.error(err.message); - return undefined; - } - }, - findTopic: async (topicId, userId) => { - try { - const topic = await TopicModel.findOne({ - _id: new mongoose.Types.ObjectId(topicId), - recipients: { $elemMatch : { $eq: userId }} - }); - if (!topic) - return undefined - // Append property to indicate the number of unread messages - topic.topicMessages.forEach(message => { - if(!message.readBy.includes(userId)) { - topic.unreadMessages ++; - } - }) - return topic; - } - catch (err) { - console.error(err.message); - return undefined; - } - }, - // POST /api/v1/topics - createTopic: async (req, res) => { - try { - const { _id: createdBy } = req.user; - const { relatedObjectIds } = req.body; - const topic = await buildTopic({createdBy, relatedObjectIds }); - if(!topic) - return res.status(500).json({ success: false, message: 'Could not save topic to database.' }); - - return res.status(201).json({ success: true, topic }); - } catch (err) { - console.error(err.message); - return res.status(500).json(err); - } - }, - // DELETE api/v1/topics/:id - deleteTopic: async(req, res) => { - try { - const { id } = req.params; - if(!id) - return res.status(404).json({ success: false, message: 'Topic Id not found.' }); - const topic = await TopicModel.findByIdAndUpdate( id, { isDeleted: true, status: 'closed', expiryDate: Date.now() }, {new: true}); - console.log(topic); - return res.status(204).json({success: true}); - } catch (err) { - console.error(err.message); - return res.status(500).json(err); - } - }, - // GET api/v1/topics - getTopics: async(req, res) => { - // check if user / publisher - try { - let {_id: userId} = req.user; - const topics = await TopicModel.find({ - recipients: { $elemMatch : { $eq: userId }}, - status: 'active' - }); - // Append property to indicate the number of unread messages - topics.forEach(topic => { - topic.unreadMessages = 0; - topic.topicMessages.forEach(message => { - if(!message.readBy.includes(userId)) { - topic.unreadMessages ++; - } - // Calculate last unread message date at topic level - topic.lastUnreadMessage = topic.topicMessages.reduce((a, b) => { - console.log (Date(a.createdDate) > new Date(b.createdDate) ? a : b); - return (new Date(a.createdDate) > new Date(b.createdDate) ? a : b).createdDate; - }); - }) - }); - // Sort topics by most unread first followed by created date - topics.sort((a, b) => b.unreadMessages - a.unreadMessages || b.lastUnreadMessage - a.lastUnreadMessage || b.createdDate - a.createdDate); - return res.status(200).json({ success: true, topics }); - } catch(err) { - console.error(err.message); - return res.status(500).json(err); - } - }, - // GET api/v1/topics/:id - getTopicById: async(req, res) => { - try { - // 1. Get the topic from the database - const topic = await module.exports.findTopic(req.params.id, req.user._id); - // 2. Keep a copy of the unmodified topic for returning in this response - const dispatchTopic = topic.toJSON(); - if(!topic) - return res.status(404).json({ success: false, message: 'Could not find topic specified.' }); - // 3. If there any unread messages, mark them as read - if(topic.unreadMessages > 0) { - topic.topicMessages.forEach(async (message) => { - message.readBy.push(req.user._id) - await message.save(); - }); - topic.unreadMessages = 0; - // 4. Save topic to Mongo - await topic.save(); - } - // 5. Return original topic so unread messages are displayed correctly - return res.status(200).json({ success: true, topic: dispatchTopic }); - } catch (err) { - console.error(err.message); - return res.status(500).json(err); - } - } -} \ No newline at end of file + buildRecipients: async (team, createdBy) => { + // 1. Cause error if no members found + if (_.isNull(team)) { + console.error('A topic cannot be created without a receiving team'); + return []; + } + let { members } = team; + if (_.isNull(members || members.length === 0)) { + console.error('A topic cannot be created with only the creating user'); + return []; + } + let recipients = members.map(m => m.memberid); + // 2. Return team recipients plus the user that created the message + recipients = [...recipients, createdBy]; + return recipients; + }, + buildTopic: async context => { + try { + let title = ''; + let subTitle = ''; + let datasets = []; + let tags = []; + const { createdBy, relatedObjectIds } = context; + // 1. Topic cannot be created without related object i.e. data/project/tool/paper + if (_.isEmpty(relatedObjectIds)) { + console.error('No related object Id passed to build topic'); + return undefined; + } + // 2. Find the related object(s) in MongoDb and include team data + const tools = await ToolModel.find() + .where('_id') + .in(relatedObjectIds) + .populate({ path: 'publisher', populate: { path: 'team' } }); + // 3. Return undefined if no object exists + if (_.isEmpty(tools)) { + console.error(`Failed to find related tool(s) with objectId(s): ${relatedObjectIds.join(', ')}`); + return undefined; + } + // 4. Iterate through each tool + tools.forEach(tool => { + // 5. Switch based on related object type + switch (tool.type) { + // 6. If dataset, we require the publisher + case 'dataset': + let { + name: datasetTitle, + datasetid = '', + datasetfields: { publisher }, + } = tool; + // set title of topic which is publisher + title = publisher; + subTitle = _.isEmpty(subTitle) ? datasetTitle : `${subTitle}, ${datasetTitle}`; + datasets.push({ datasetId: datasetid, publisher }); + tags.push(datasetTitle); + break; + default: + console.log('default'); + } + }); + // 7. Get recipients for topic/message using the first tool (same team exists as each publisher is the same) + let { publisher = '' } = tools[0]; + if (_.isEmpty(publisher)) { + console.error(`No publisher associated to this dataset`); + return undefined; + } + let { team = [] } = publisher; + if (_.isEmpty(team)) { + console.error(`No team associated to publisher, cannot message`); + return undefined; + } + const recipients = await module.exports.buildRecipients(team, createdBy); + if (_.isEmpty(recipients)) { + console.error('A topic cannot be created without recipients'); + return undefined; + } + // Future extension could be to iterate through tools at this point to generate a topic for each publisher + // This also requires refactor of above code to break down dataset titles into individual messages + // 8. Create new topic against related objects with recipients + const topic = await TopicModel.create({ + title, + subTitle, + relatedObjectIds, + createdBy, + createdDate: Date.now(), + recipients, + datasets, + tags, + }); + // 9. Return created object + return topic; + } catch (err) { + console.error(err.message); + return undefined; + } + }, + findTopic: async (topicId, userId) => { + try { + const topic = await TopicModel.findOne({ + _id: new mongoose.Types.ObjectId(topicId), + recipients: { $elemMatch: { $eq: userId } }, + }); + if (!topic) return undefined; + // Append property to indicate the number of unread messages + topic.topicMessages.forEach(message => { + if (!message.readBy.includes(userId)) { + topic.unreadMessages++; + } + }); + return topic; + } catch (err) { + console.error(err.message); + return undefined; + } + }, + // POST /api/v1/topics + createTopic: async (req, res) => { + try { + const { _id: createdBy } = req.user; + const { relatedObjectIds } = req.body; + const topic = await buildTopic({ createdBy, relatedObjectIds }); + if (!topic) return res.status(500).json({ success: false, message: 'Could not save topic to database.' }); + + return res.status(201).json({ success: true, topic }); + } catch (err) { + console.error(err.message); + return res.status(500).json(err); + } + }, + // DELETE api/v1/topics/:id + deleteTopic: async (req, res) => { + try { + const { id } = req.params; + if (!id) return res.status(404).json({ success: false, message: 'Topic Id not found.' }); + const topic = await TopicModel.findByIdAndUpdate(id, { isDeleted: true, status: 'closed', expiryDate: Date.now() }, { new: true }); + console.log(topic); + return res.status(204).json({ success: true }); + } catch (err) { + console.error(err.message); + return res.status(500).json(err); + } + }, + // GET api/v1/topics + getTopics: async (req, res) => { + // check if user / publisher + try { + let { _id: userId } = req.user; + const topics = await TopicModel.find({ + recipients: { $elemMatch: { $eq: userId } }, + status: 'active', + }); + // Append property to indicate the number of unread messages + topics.forEach(topic => { + topic.unreadMessages = 0; + topic.topicMessages.forEach(message => { + if (!message.readBy.includes(userId)) { + topic.unreadMessages++; + } + // Calculate last unread message date at topic level + topic.lastUnreadMessage = topic.topicMessages.reduce((a, b) => { + console.log(Date(a.createdDate) > new Date(b.createdDate) ? a : b); + return (new Date(a.createdDate) > new Date(b.createdDate) ? a : b).createdDate; + }); + }); + }); + // Sort topics by most unread first followed by created date + topics.sort( + (a, b) => b.unreadMessages - a.unreadMessages || b.lastUnreadMessage - a.lastUnreadMessage || b.createdDate - a.createdDate + ); + return res.status(200).json({ success: true, topics }); + } catch (err) { + console.error(err.message); + return res.status(500).json(err); + } + }, + // GET api/v1/topics/:id + getTopicById: async (req, res) => { + try { + // 1. Get the topic from the database + const topic = await module.exports.findTopic(req.params.id, req.user._id); + // 2. Keep a copy of the unmodified topic for returning in this response + const dispatchTopic = topic.toJSON(); + if (!topic) return res.status(404).json({ success: false, message: 'Could not find topic specified.' }); + // 3. If there any unread messages, mark them as read + if (topic.unreadMessages > 0) { + topic.topicMessages.forEach(async message => { + message.readBy.push(req.user._id); + await message.save(); + }); + topic.unreadMessages = 0; + // 4. Save topic to Mongo + await topic.save(); + } + // 5. Return original topic so unread messages are displayed correctly + return res.status(200).json({ success: true, topic: dispatchTopic }); + } catch (err) { + console.error(err.message); + return res.status(500).json(err); + } + }, +}; diff --git a/src/resources/topic/topic.model.js b/src/resources/topic/topic.model.js index 00e54855..d60e1c68 100644 --- a/src/resources/topic/topic.model.js +++ b/src/resources/topic/topic.model.js @@ -1,93 +1,100 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -const TopicSchema = new Schema({ - title: { - type: String, - default: '', - trim: true - }, - subTitle: { - type: String, - default: '', - trim: true - }, - recipients: [{ - type: Schema.Types.ObjectId, - ref: 'User' - }], - status: { - type: String, - enum : ['active','closed'], - default: 'active' - }, - createdDate: { - type: Date, - default: Date.now - }, - exiryDate: { - type: Date - }, - createdBy: { - type: Schema.Types.ObjectId, - ref: 'User' - }, - relatedObjectIds: [{ - type: Schema.Types.ObjectId, - ref: 'Data' - }], - isDeleted: { - type: Boolean, - default: false - }, - unreadMessages: { - type: Number, - default: 0 - }, - lastUnreadMessage: { - type: Date - }, - datasets: [{ - datasetId: { - type: String - }, - publisher: { - type: String - } - }], - tags: [{ - type: String - }] -}, { - toJSON: { virtuals: true }, - toObject: { virtuals: true } -}); +const TopicSchema = new Schema( + { + title: { + type: String, + default: '', + trim: true, + }, + subTitle: { + type: String, + default: '', + trim: true, + }, + recipients: [ + { + type: Schema.Types.ObjectId, + ref: 'User', + }, + ], + status: { + type: String, + enum: ['active', 'closed'], + default: 'active', + }, + createdDate: { + type: Date, + default: Date.now, + }, + exiryDate: { + type: Date, + }, + createdBy: { + type: Schema.Types.ObjectId, + ref: 'User', + }, + relatedObjectIds: [ + { + type: Schema.Types.ObjectId, + ref: 'Data', + }, + ], + isDeleted: { + type: Boolean, + default: false, + }, + unreadMessages: { + type: Number, + default: 0, + }, + lastUnreadMessage: { + type: Date, + }, + datasets: [ + { + datasetId: { + type: String, + }, + publisher: { + type: String, + }, + }, + ], + tags: [ + { + type: String, + }, + ], + }, + { + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); // Virtual Populate - Topic to bring back messages if topics querried messages without persisting it to the db, it doesnt slow down the query - populate in route TopicSchema.virtual('topicMessages', { - ref: 'Messages', - foreignField: 'topic', - localField: '_id' + ref: 'Messages', + foreignField: 'topic', + localField: '_id', }); -TopicSchema.pre(/^find/, function(next) { - this.populate({ - path: 'createdBy', - select: 'firstname lastname', - path: 'topicMessages', - select: 'messageDescription createdDate isRead _id readBy', - options: { sort: '-createdDate' }, - populate: { - path: 'createdBy', - model: 'User', - select: '-_id firstname lastname' - } - }); +TopicSchema.pre(/^find/, function (next) { + this.populate({ + path: 'createdBy', + select: 'firstname lastname', + path: 'topicMessages', + select: 'messageDescription createdDate isRead _id readBy', + options: { sort: '-createdDate' }, + populate: { + path: 'createdBy', + model: 'User', + select: '-_id firstname lastname', + }, + }); - next(); + next(); }); - - - - -export const TopicModel = model('Topics', TopicSchema); \ No newline at end of file +export const TopicModel = model('Topics', TopicSchema); diff --git a/src/resources/topic/topic.route.js b/src/resources/topic/topic.route.js index c843482c..151e9496 100644 --- a/src/resources/topic/topic.route.js +++ b/src/resources/topic/topic.route.js @@ -24,5 +24,4 @@ router.get('/', passport.authenticate('jwt'), topicController.getTopics); // @access Private router.get('/:id', passport.authenticate('jwt'), topicController.getTopicById); - -module.exports = router \ No newline at end of file +module.exports = router; diff --git a/src/resources/user/user.model.js b/src/resources/user/user.model.js index f87fab3c..e68db2fe 100644 --- a/src/resources/user/user.model.js +++ b/src/resources/user/user.model.js @@ -1,40 +1,46 @@ -import { model, Schema } from 'mongoose' +import { model, Schema } from 'mongoose'; -const UserSchema = new Schema({ - id: { - type: Number, - unique: true - }, - email: String, - password: String, - businessName: String, - firstname: String, - lastname: String, - displayname: String, - providerId: { type: String, required: true }, - provider: String, - role: String, - redirectURL: String, - discourseUsername: String, - discourseKey: String -}, { - timestamps: true, - toJSON: { virtuals: true }, - toObject: { virtuals: true } -}); +const UserSchema = new Schema( + { + id: { + type: Number, + unique: true, + }, + email: String, + password: String, + businessName: String, + firstname: String, + lastname: String, + displayname: String, + providerId: { type: String, required: true }, + provider: String, + role: String, + redirectURL: String, + discourseUsername: String, + discourseKey: String, + isServiceAccount: { type: Boolean, default: false }, + clientId: String, + clientSecret: String + }, + { + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); UserSchema.virtual('additionalInfo', { - ref: 'Data', - foreignField: 'id', - localField: 'id', - justOne: true, - options: { select: 'bio link orcid activeflag organisation emailNotifications terms -id -_id' } + ref: 'Data', + foreignField: 'id', + localField: 'id', + justOne: true, + options: { select: 'bio link orcid activeflag organisation emailNotifications terms -id -_id' }, }); UserSchema.virtual('teams', { - ref: 'Team', - foreignField: 'members.memberid', - localField: '_id' + ref: 'Team', + foreignField: 'members.memberid', + localField: '_id', }); -export const UserModel = model('User', UserSchema) +export const UserModel = model('User', UserSchema); diff --git a/src/resources/user/user.register.route.js b/src/resources/user/user.register.route.js index c6260a5f..e67ad06f 100644 --- a/src/resources/user/user.register.route.js +++ b/src/resources/user/user.register.route.js @@ -1,112 +1,130 @@ -import express from 'express' -import { to } from 'await-to-js' -import { login } from '../auth/strategies/jwt' -import { updateUser } from '../user/user.service' -import { createPerson } from '../person/person.service' -import { getUserByUserId } from '../user/user.repository' -import { registerDiscourseUser } from '../discourse/discourse.service' +import express from 'express'; +import { to } from 'await-to-js'; +import { login } from '../auth/strategies/jwt'; +import { updateUser } from '../user/user.service'; +import { createPerson } from '../person/person.service'; +import { getUserByUserId } from '../user/user.repository'; +import { registerDiscourseUser } from '../discourse/discourse.service'; const urlValidator = require('../utilities/urlValidator'); const eventLogController = require('../eventlog/eventlog.controller'); -const router = express.Router() +const router = express.Router(); // @router Get /auth/register // @desc Pulls user details to complete registration // @access Public -router.get('/:personID', - async (req, res) => { - const [err, user] = await to(getUserByUserId(req.params.personID)) - - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, data: user }); +router.get('/:personID', async (req, res) => { + const [err, user] = await to(getUserByUserId(req.params.personID)); + + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, data: user }); }); // @router POST /auth/register // @desc Register user // @access Public -router.post('/', - async (req, res) => { - const { id, firstname, lastname, email, bio, showBio, showLink, showOrcid, redirectURL, sector, showSector, organisation, emailNotifications, terms, tags, showDomain, showOrganisation, profileComplete } = req.body - let link = urlValidator.validateURL(req.body.link); - let orcid = urlValidator.validateOrcidURL(req.body.orcid); - let username = `${firstname.toLowerCase()}.${lastname.toLowerCase()}`; - let discourseUsername, discourseKey = ''; - - if (!/\b[a-zA-Z0-9-_.]+\@[a-zA-Z0-9-_]+\.\w+(?:\.\w+)?\b/.test(email)) { - return res.status(500).json({ success: false, data: 'Enter a valid email address.' }) - } +router.post('/', async (req, res) => { + const { + id, + firstname, + lastname, + email, + bio, + showBio, + showLink, + showOrcid, + redirectURL, + sector, + showSector, + organisation, + emailNotifications, + terms, + tags, + showDomain, + showOrganisation, + profileComplete, + } = req.body; + let link = urlValidator.validateURL(req.body.link); + let orcid = urlValidator.validateOrcidURL(req.body.orcid); + let username = `${firstname.toLowerCase()}.${lastname.toLowerCase()}`; + let discourseUsername, + discourseKey = ''; + + if (!/\b[a-zA-Z0-9-_.]+\@[a-zA-Z0-9-_]+\.\w+(?:\.\w+)?\b/.test(email)) { + return res.status(500).json({ success: false, data: 'Enter a valid email address.' }); + } - // 1. Update existing user record created during login - let [userErr, user] = await to( - updateUser({ - id, - firstname, - lastname, - email, - discourseKey, - discourseUsername - }) - ) - - // 2. Create person entry in tools - let [personErr, person] = await to( - createPerson({ - id, - firstname, - lastname, - bio, - showBio, - link, - showLink, - orcid, - showOrcid, - emailNotifications, - terms, - sector, - showSector, - organisation, - tags, - showDomain, - showOrganisation, - profileComplete - }) - ) + // 1. Update existing user record created during login + let [userErr, user] = await to( + updateUser({ + id, + firstname, + lastname, + email, + discourseKey, + discourseUsername, + }) + ); - // 3. Create Discourse user with SSO enabled and generate API key - await registerDiscourseUser({ - id, - firstname, - lastname, - email - }); + // 2. Create person entry in tools + let [personErr, person] = await to( + createPerson({ + id, + firstname, + lastname, + bio, + showBio, + link, + showLink, + orcid, + showOrcid, + emailNotifications, + terms, + sector, + showSector, + organisation, + tags, + showDomain, + showOrganisation, + profileComplete, + }) + ); - const [loginErr, token] = await to(login(req, user)) + // 3. Create Discourse user with SSO enabled and generate API key + await registerDiscourseUser({ + id, + firstname, + lastname, + email, + }); - if (loginErr) { - console.error(loginErr) - return res.status(500).json({ success: false, data: 'Authentication error!' }) - } + const [loginErr, token] = await to(login(req, user)); - var redirectURLis = redirectURL; + if (loginErr) { + console.error(loginErr); + return res.status(500).json({ success: false, data: 'Authentication error!' }); + } - if (redirectURLis === null || redirectURLis === '') { - redirectURLis = '' - } + var redirectURLis = redirectURL; - //Build event object for user registered and log it to DB - let eventObj = { - userId: req.user.id, - event: `user_registered_${req.user.provider}`, - timestamp: Date.now() - } - await eventLogController.logEvent(eventObj); + if (redirectURLis === null || redirectURLis === '') { + redirectURLis = ''; + } - return res - .status(200) - .cookie('jwt', token, { - httpOnly: true - }) - .json({ success: false, data: redirectURLis }); + //Build event object for user registered and log it to DB + let eventObj = { + userId: req.user.id, + event: `user_registered_${req.user.provider}`, + timestamp: Date.now(), + }; + await eventLogController.logEvent(eventObj); + return res + .status(200) + .cookie('jwt', token, { + httpOnly: true, + secure: process.env.api_url ? true : false, + }) + .json({ success: false, data: redirectURLis }); }); -module.exports = router \ No newline at end of file +module.exports = router; diff --git a/src/resources/user/user.repository.js b/src/resources/user/user.repository.js index 472f4ece..a15a6cd0 100644 --- a/src/resources/user/user.repository.js +++ b/src/resources/user/user.repository.js @@ -1,26 +1,92 @@ +import _ from 'lodash'; +import bcrypt from 'bcrypt'; + import { UserModel } from './user.model'; +import { TeamModel } from '../team/team.model'; +import helper from '../utilities/helper.util'; export async function getUserById(id) { - const user = await UserModel.findById(id).populate({ - path: 'teams', - select: 'publisher type members -_id', - populate: { - path: 'publisher', - select: 'name' - } - } - ); - return user; + const user = await UserModel.findById(id).populate({ + path: 'teams', + select: 'publisher type members -_id', + populate: { + path: 'publisher', + select: 'name', + }, + }); + return user; } export async function getUserByEmail(email) { - return await UserModel.findOne({ email }).exec() + return await UserModel.findOne({ email }).exec(); } export async function getUserByProviderId(providerId) { - return await UserModel.findOne({ providerId }).exec() + return await UserModel.findOne({ providerId }).exec(); } export async function getUserByUserId(id) { - return await UserModel.findOne({ id }).exec() -} \ No newline at end of file + return await UserModel.findOne({ id }).exec(); +} + +export async function getServiceAccountByClientCredentials(clientId, clientSecret) { + // 1. Locate service account by clientId, return undefined if no document located + const id = clientId.toString(); + const serviceAccount = await UserModel.findOne({ clientId:id, isServiceAccount: true }); + if (_.isNil(serviceAccount)) { + return; + } + // 2. Extract hashed client secret from DB + const { clientSecret: hashedClientSecret = '' } = serviceAccount; + // 3. Compare client secret to hashed client secret to check for auth match + const match = await bcrypt.compare(clientSecret, hashedClientSecret); + // 4. Return the service account if matched + if (match) { + return serviceAccount; + } + // 5. Return undefined if secret did not match + return; +} + +export async function createServiceAccount(firstname, lastname, email, teamId) { + // 1. Set up default params + const isServiceAccount = true, + role = 'creator', + teamRole = 'manager', + providerId = 'Service Account'; + // 2. Ensure team is valid before creating service account + const id = teamId.toString(); + const team = await TeamModel.findById(id); + // 3. Return undefined if no team found + if (_.isNil(team)) { + return; + } + // 4. Generate Client Id and Client Secret + const clientId = helper.generateAlphaNumericString(15); + const clientSecret = helper.generateAlphaNumericString(50); + // 5. Hash Client Secret for storage in DB + const saltRounds = 10; + const hashedClientSecret = await bcrypt.hash(clientSecret, saltRounds); + // 6. Create service account user with the hashed Client Secret + const serviceAccount = await UserModel.create({ + role, + isServiceAccount, + providerId, + firstname, + lastname, + email, + clientId, + clientSecret: hashedClientSecret, + }); + // 7. Create membership for service account to team + const newMember = { + memberid: serviceAccount._id, + roles: [teamRole], + }; + // 8. Add membership for the service account to the team + TeamModel.update({ _id: team._id }, { $push: { members: newMember } }); + // 9. Reinstate unhashed client secret for single return instance + serviceAccount.clientSecret = clientSecret; + // 10. Return service account details + return serviceAccount; +} diff --git a/src/resources/user/user.roles.js b/src/resources/user/user.roles.js index 3727108c..50d49383 100644 --- a/src/resources/user/user.roles.js +++ b/src/resources/user/user.roles.js @@ -1,8 +1,8 @@ const ROLES = { - Admin: 'Admin', - DataCustodian: 'DataCustodian', - Creator: 'Creator', - System: 'System' - } - -export { ROLES } \ No newline at end of file + Admin: 'Admin', + DataCustodian: 'DataCustodian', + Creator: 'Creator', + System: 'System', +}; + +export { ROLES }; diff --git a/src/resources/user/user.route.js b/src/resources/user/user.route.js index b1d26d84..178ced9e 100644 --- a/src/resources/user/user.route.js +++ b/src/resources/user/user.route.js @@ -1,88 +1,121 @@ -import express from 'express' -import { ROLES } from '../user/user.roles' -import passport from "passport"; -import { utils } from "../auth"; -import { UserModel } from './user.model' -import { Data } from '../tool/data.model' +import express from 'express'; +import _ from 'lodash'; +import passport from 'passport'; + +import { ROLES } from '../user/user.roles'; +import { utils } from '../auth'; +import { UserModel } from './user.model'; +import { Data } from '../tool/data.model'; import helper from '../utilities/helper.util'; +import { createServiceAccount } from './user.repository'; const router = express.Router(); // @router GET /api/v1/users/:userID // @desc find user by id // @access Private -router.get( - '/:userID', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - //req.params.id is how you get the id from the url - var q = UserModel.find({ id: req.params.userID }); +router.get('/:userID', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + //req.params.id is how you get the id from the url + var q = UserModel.find({ id: req.params.userID }); - q.exec((err, userdata) => { - if (err) return res.json({ success: false, error: err }); - return res.json({ success: true, userdata: userdata }); - }); - } -); + q.exec((err, userdata) => { + if (err) return res.json({ success: false, error: err }); + return res.json({ success: true, userdata: userdata }); + }); +}); // @router GET /api/v1/users // @desc get all // @access Private -router.get( - '/', - passport.authenticate('jwt'), - utils.checkIsInRole(ROLES.Admin, ROLES.Creator), - async (req, res) => { - var q = Data.aggregate([ - // Find all tools with type of person - { $match: { type: 'person' } }, - // Perform lookup to users - { - $lookup: { - from: 'users', - localField: 'id', - foreignField: 'id', - as: 'user', - }, +router.get('/', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin, ROLES.Creator), async (req, res) => { + var q = Data.aggregate([ + // Find all tools with type of person + { $match: { type: 'person' } }, + // Perform lookup to users + { + $lookup: { + from: 'users', + localField: 'id', + foreignField: 'id', + as: 'user', }, - // select fields to use - { - $project: { - _id: '$user._id', - id: 1, - firstname: 1, - lastname: 1, - orcid: { - $cond: [{ - $eq: [ true, "$showOrcid" ]}, - "$orcid", "$$REMOVE"] - }, - bio: { - $cond: [{ - $eq: [ true, "$showBio" ]}, - "$bio", "$$REMOVE"] - }, - email: '$user.email', + }, + // select fields to use + { + $project: { + _id: '$user._id', + id: 1, + firstname: 1, + lastname: 1, + orcid: { + $cond: [ + { + $eq: [true, '$showOrcid'], + }, + '$orcid', + '$$REMOVE', + ], + }, + bio: { + $cond: [ + { + $eq: [true, '$showBio'], + }, + '$bio', + '$$REMOVE', + ], }, + email: '$user.email', }, - ]); + }, + ]); - q.exec((err, data) => { - if (err) { - return new Error({ success: false, error: err }); - } + q.exec((err, data) => { + if (err) { + return new Error({ success: false, error: err }); + } - const users = []; - data.map((dat) => { - let { _id, id, firstname, lastname, orcid = '', bio = '', email = '' } = dat; - if (email.length !== 0) email = helper.censorEmail(email[0]); - users.push({ _id, id, orcid, name: `${firstname} ${lastname}`, bio, email }); - }); - - return res.json({ success: true, data: users }); + const users = []; + data.map(dat => { + let { _id, id, firstname, lastname, orcid = '', bio = '', email = '' } = dat; + if (email.length !== 0) email = helper.censorEmail(email[0]); + users.push({ _id, id, orcid, name: `${firstname} ${lastname}`, bio, email }); }); - } -); -module.exports = router \ No newline at end of file + return res.json({ success: true, data: users }); + }); +}); + +// @router POST /api/v1/users/serviceaccount +// @desc create service account +// @access Private +// router.post('/serviceaccount', passport.authenticate('jwt'), utils.checkIsInRole(ROLES.Admin), async (req, res) => { +// try { +// // 1. Validate request body params +// let { firstname = '', lastname = '', email = '', teamId = '' } = req.body; +// if (_.isEmpty(firstname) || _.isEmpty(lastname) || _.isEmpty(email) || _.isEmpty(teamId)) { +// return res.status(400).json({ +// success: false, +// message: 'You must supply a first name, last name, email address and teamId', +// }); +// } +// // 2. Create service account +// const serviceAccount = await createServiceAccount(firstname, lastname, email, teamId); +// if(_.isNil(serviceAccount)) { +// return res.status(400).json({ +// success: false, +// message: 'Unable to create service account with provided details', +// }); +// } +// // 3. Return service account details +// return res.status(200).json({ +// success: true, +// serviceAccount +// }); +// } catch (err) { +// console.error(err.message); +// return res.status(500).json(err); +// } +// }); + +module.exports = router; diff --git a/src/resources/user/user.service.js b/src/resources/user/user.service.js index 8d590a5d..0e4de56d 100644 --- a/src/resources/user/user.service.js +++ b/src/resources/user/user.service.js @@ -1,62 +1,49 @@ -import { UserModel } from './user.model' +import { UserModel } from './user.model'; -export async function createUser({ - firstname, - lastname, - email, - providerId, - provider, - role -}) { - return new Promise(async (resolve, reject) => { - var id = parseInt(Math.random().toString().replace('0.', '')); +export async function createUser({ firstname, lastname, email, providerId, provider, role }) { + return new Promise(async (resolve, reject) => { + var id = parseInt(Math.random().toString().replace('0.', '')); - return resolve( - await UserModel.create({ - id, - providerId, - provider, - firstname, - lastname, - email, - role - }) - ) - }) + return resolve( + await UserModel.create({ + id, + providerId, + provider, + firstname, + lastname, + email, + role, + }) + ); + }); } -export async function updateUser({ - id, - firstname, - lastname, - email, - discourseKey, - discourseUsername -}) { - return new Promise(async (resolve, reject) => { - return resolve( - await UserModel.findOneAndUpdate({ id: id }, - { - firstname, - lastname, - email, - discourseKey, - discourseUsername - }) - ) - }) +export async function updateUser({ id, firstname, lastname, email, discourseKey, discourseUsername }) { + return new Promise(async (resolve, reject) => { + return resolve( + await UserModel.findOneAndUpdate( + { id: id }, + { + firstname, + lastname, + email, + discourseKey, + discourseUsername, + } + ) + ); + }); } -export async function updateRedirectURL({ - id, - redirectURL -}) { - return new Promise(async (resolve, reject) => { - return resolve( - await UserModel.findOneAndUpdate({ id: id }, - { - redirectURL: redirectURL, - }) - ) - }) -} \ No newline at end of file +export async function updateRedirectURL({ id, redirectURL }) { + return new Promise(async (resolve, reject) => { + return resolve( + await UserModel.findOneAndUpdate( + { id: id }, + { + redirectURL: redirectURL, + } + ) + ); + }); +} diff --git a/src/resources/utilities/__tests__/helper.util.test.js b/src/resources/utilities/__tests__/helper.util.test.js new file mode 100644 index 00000000..6cb41298 --- /dev/null +++ b/src/resources/utilities/__tests__/helper.util.test.js @@ -0,0 +1,24 @@ +import helperUtil from '../helper.util'; + +describe('Helper Utility functions', () => { + test('Censorword function email test, 1@1.co.uk > *@1*****k', () => { + let testEmail = '1@1.co.uk'; + let expectedEmail = '*@1*****k'; + let resultEmail = helperUtil.censorEmail(testEmail); + expect(resultEmail).toEqual(expectedEmail); + }); + + test('Censorword function email test, 12@1.co.uk > 1*@1*****k', () => { + let testEmail = '12@1.co.uk'; + let expectedEmail = '1*@1*****k'; + let resultEmail = helperUtil.censorEmail(testEmail); + expect(resultEmail).toEqual(expectedEmail); + }); + + test('Censorword function email test, jamie@1234.co.uk > j***e@1********k', () => { + let testEmail = 'jamie@1234.co.uk'; + let expectedEmail = 'j***e@1********k'; + let resultEmail = helperUtil.censorEmail(testEmail); + expect(resultEmail).toEqual(expectedEmail); + }); +}); \ No newline at end of file diff --git a/src/resources/utilities/__tests__/urlValidator.test.js b/src/resources/utilities/__tests__/urlValidator.test.js new file mode 100644 index 00000000..654a7132 --- /dev/null +++ b/src/resources/utilities/__tests__/urlValidator.test.js @@ -0,0 +1,33 @@ +const urlValidator = require('../urlValidator'); + +const validDOILinks = [ + 'https://doi.org/10.1136/bmjresp-2020-000644', + 'https://dx.doi.org/123', + 'http://doi.org/123', + 'http://dx.doi.org/123', + 'doi.org/123', + 'dx.doi.org/', +]; +const inValidDOILinks = [ + 'http://www.doi.org/123', + 'www.dx.doi.org/', + 'doi.com.org/4', + 'https://dx.doi.com/123', + 'www.bbc.co.uk', + 'doi', + '123', + '', +]; +describe('should validate DOI links', () => { + test('Valid DOI links return true', () => { + validDOILinks.forEach(link => { + expect(urlValidator.isDOILink(link)).toEqual(true); + }); + }); + + test('Invalid DOI links return false', () => { + inValidDOILinks.forEach(link => { + expect(urlValidator.isDOILink(link)).toEqual(false); + }); + }); +}); diff --git a/src/resources/utilities/cloudStorage.util.js b/src/resources/utilities/cloudStorage.util.js index 250b6806..fd5fe27d 100644 --- a/src/resources/utilities/cloudStorage.util.js +++ b/src/resources/utilities/cloudStorage.util.js @@ -1,51 +1,57 @@ -import {Storage} from '@google-cloud/storage'; +import { Storage } from '@google-cloud/storage'; import fs from 'fs'; const bucketName = process.env.SCAN_BUCKET; const sourceBucket = process.env.DESTINATION_BUCKET; export const fileStatus = { UPLOADED: 'UPLOADED', - ERROR: 'ERROR', - SCANNED: 'SCANNED' + ERROR: 'ERROR', + SCANNED: 'SCANNED', }; -export const processFile = (file, id, uniqueId) => new Promise(async (resolve, reject) => { - const storage = new Storage(); - let { originalname, path } = file; - storage.bucket(bucketName).upload(path, { - gzip: true, - destination: `dar-${id}-${uniqueId}_${originalname}`, - metadata: { cacheControl: 'none-cache'} - }, (err, file) => { - if(!err) { - // remove temp dir / path = dir - fs.unlinkSync(path); - // resolve - resolve({status: fileStatus.UPLOADED, file}); - } else { - resolve({status: fileStatus.ERROR, file}); - } - }); -}); +export const processFile = (file, id, uniqueId) => + new Promise(async (resolve, reject) => { + const storage = new Storage(); + let { originalname, path } = file; + storage.bucket(bucketName).upload( + path, + { + gzip: true, + destination: `dar-${id}-${uniqueId}_${originalname}`, + metadata: { cacheControl: 'none-cache' }, + }, + (err, file) => { + if (!err) { + // remove temp dir / path = dir + fs.unlinkSync(path); + // resolve + resolve({ status: fileStatus.UPLOADED, file }); + } else { + resolve({ status: fileStatus.ERROR, file }); + } + } + ); + }); -export const getFile = (file, fileId, id) => new Promise(async (resolve) => { - // 1. new storage obj - const storage = new Storage(); - // 2. set option for file dest - let options = { - // The path to which the file should be downloaded - destination: `${process.env.TMPDIR}${id}/${fileId}_${file}`, - }; - // create tmp - const sanitisedId = id.replace( /[^0-9a-z]/ig,''); +export const getFile = (file, fileId, id) => + new Promise(async resolve => { + // 1. new storage obj + const storage = new Storage(); + // 2. set option for file dest + let options = { + // The path to which the file should be downloaded + destination: `${process.env.TMPDIR}${id}/${fileId}_${file}`, + }; + // create tmp + const sanitisedId = id.replace(/[^0-9a-z]/gi, ''); - const filePath = `${process.env.TMPDIR}${sanitisedId}`; + const filePath = `${process.env.TMPDIR}${sanitisedId}`; - if (!fs.existsSync(filePath)) { - fs.mkdirSync(filePath); - } - // 3. set path - const path = `dar/${sanitisedId}/${fileId}_${file}`; - // 4. get file from GCP - resolve(storage.bucket(sourceBucket).file(path).download(options)); -}); + if (!fs.existsSync(filePath)) { + fs.mkdirSync(filePath); + } + // 3. set path + const path = `dar/${sanitisedId}/${fileId}_${file}`; + // 4. get file from GCP + resolve(storage.bucket(sourceBucket).file(path).download(options)); + }); diff --git a/src/resources/utilities/constants.util.js b/src/resources/utilities/constants.util.js index 59fea5cf..dbfb7fbb 100644 --- a/src/resources/utilities/constants.util.js +++ b/src/resources/utilities/constants.util.js @@ -4,10 +4,257 @@ const _userTypes = { APPLICANT: 'applicant', }; -const _submissionEmailRecipientTypes = [ - 'applicant', - 'dataCustodian' -]; +const _userQuestionActions = { + custodian: { + reviewer: { + submitted: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + inReview: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + approved: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + ['approved with conditions']: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + rejected: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + withdrawn: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + }, + manager: { + submitted: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + { + key: 'requestAmendment', + icon: 'fas fa-exclamation-circle', + color: '#F0BB24', + toolTip: 'Request applicant updates answer', + order: 2, + }, + ], + inReview: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + { + key: 'requestAmendment', + icon: 'fas fa-exclamation-circle', + color: '#F0BB24', + toolTip: 'Request applicant updates answer', + order: 2, + }, + ], + approved: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + ['approved with conditions']: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + rejected: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + withdrawn: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + }, + }, + applicant: { + inProgress: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + submitted: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + inReview: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + approved: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + ['approved with conditions']: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + rejected: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + withdrawn: [ + { + key: 'guidance', + icon: 'far fa-question-circle', + color: '#475da7', + toolTip: 'Guidance', + order: 1, + }, + ], + }, +}; + +const _navigationFlags = { + custodian: { + submitted: { + completed: { status: 'SUCCESS', options: [], text: '#NAME# updated this answer on #DATE#' }, + }, + returned: { + completed: { status: 'WARNING', options: [], text: '#NAME# requested an update on #DATE#' }, + incomplete: { status: 'WARNING', options: [], text: '#NAME# requested an update on #DATE#' }, + }, + inProgress: { + incomplete: { + status: 'WARNING', + options: [ + { + text: 'Cancel request', + action: 'cancelRequest', + icon: '', + displayOrder: 1, + }, + ], + text: '#NAME# requested an update on #DATE#', + }, + }, + }, + applicant: { + submitted: { + completed: { status: 'SUCCESS', options: [], text: '#NAME# updated this answer on #DATE#' }, + incomplete: { status: 'DANGER', options: [], text: '#NAME# requested an update on #DATE#' }, + }, + returned: { + completed: { + status: 'SUCCESS', + options: [ + { + text: 'Revert to previous answer', + action: 'revertToPreviousAnswer', + icon: '', + displayOrder: 1, + }, + ], + text: '#NAME# updated this answer on #DATE#', + }, + incomplete: { status: 'DANGER', options: [], text: '#NAME# requested an update on #DATE#' }, + }, + }, +}; + +const _submissionEmailRecipientTypes = ['applicant', 'dataCustodian']; + +const _amendmentStatuses = { + AWAITINGUPDATES: 'AWAITINGUPDATES', + UPDATESSUBMITTED: 'UPDATESSUBMITTED', + UPDATESREQUESTED: 'UPDATESREQUESTED', + UPDATESRECEIVED: 'UPDATESRECEIVED', +}; const _notificationTypes = { STATUSCHANGE: 'StatusChange', @@ -19,6 +266,12 @@ const _notificationTypes = { FINALDECISIONREQUIRED: 'FinalDecisionRequired', DEADLINEWARNING: 'DeadlineWarning', DEADLINEPASSED: 'DeadlinePassed', + RETURNED: 'Returned', + MEMBERADDED: 'MemberAdded', + MEMBERREMOVED: 'MemberRemoved', + MEMBERROLECHANGED: 'MemberRoleChanged', + WORKFLOWASSIGNED: 'WorkflowAssigned', + WORKFLOWCREATED: 'WorkflowCreated' }; const _applicationStatuses = { @@ -33,40 +286,54 @@ const _applicationStatuses = { const _amendmentModes = { ADDED: 'added', - REMOVED: 'removed' + REMOVED: 'removed', + REVERTED: 'reverted', }; const _submissionTypes = { INITIAL: 'initial', - RESUBMISSION: 'resubmission' + RESUBMISSION: 'resubmission', }; +const _formActions = { + ADDREPEATABLESECTION: 'addRepeatableSection', + REMOVEREPEATABLESECTION: 'removeRepeatableSection', + ADDREPEATABLEQUESTIONS: 'addRepeatableQuestions', + REMOVEREPEATABLEQUESTIONS: 'removeRepeatableQuestions', +} + const _darPanelMapper = { safesettings: 'Safe settings', safeproject: 'Safe project', safepeople: 'Safe people', safedata: 'Safe data', - safeoutputs: 'Safe outputs' + safeoutputs: 'Safe outputs', }; - // // const _roleTypes = { MANAGER: 'manager', REVIEWER: 'reviewer', -} +}; // +const _hdrukEmail = 'enquiry@healthdatagateway.org'; + export default { userTypes: _userTypes, + userQuestionActions: _userQuestionActions, + navigationFlags: _navigationFlags, + amendmentStatuses: _amendmentStatuses, notificationTypes: _notificationTypes, applicationStatuses: _applicationStatuses, amendmentModes: _amendmentModes, submissionTypes: _submissionTypes, + formActions: _formActions, roleTypes: _roleTypes, darPanelMapper: _darPanelMapper, - submissionEmailRecipientTypes: _submissionEmailRecipientTypes -}; \ No newline at end of file + submissionEmailRecipientTypes: _submissionEmailRecipientTypes, + hdrukEmail: _hdrukEmail, +}; diff --git a/src/resources/utilities/dynamicForms/__mocks__/formSchema.js b/src/resources/utilities/dynamicForms/__mocks__/formSchema.js new file mode 100644 index 00000000..9a4834d6 --- /dev/null +++ b/src/resources/utilities/dynamicForms/__mocks__/formSchema.js @@ -0,0 +1,2573 @@ +module.exports = [ + { + pages: [ + { + active: true, + description: + 'Who is going to be accessing the data?\\n\\nSafe People should have the right motivations for accessing research data and understand the legal and ethical considerations when using data that may be sensitive or confidential. Safe People should also have sufficient skills, knowledge and experience to work with the data effectively. Researchers may need to undergo specific training or accreditation before accessing certain data or research environments and demonstrate that they are part of a bona fide research organisation.\\n\\nThe purpose of this section is to ensure that:\\n- details of people who will be accessing the data and the people who are responsible for completing the application are identified\\n- any individual or organisation that intends to access the data requested is identified\\n- all identified individuals have the necessary accreditation and/or expertise to work with the data effectively.', + pageId: 'safepeople', + title: 'Safe people', + }, + { + pageId: 'safeproject', + title: 'Safe project', + active: false, + description: + 'What is the purpose of accessing the data?\\n\\nSafe projects are those that have a valid research purpose with a defined public benefit. \\nFor access to data to be granted the researchers need to demonstrate that their proposal is an appropriate and ethical use of the data, and that it is intended to deliver clear public benefits. The purpose of this section is to ensure that:\\n- the project rationale is explained in lay terms\\n- the research purpose has a defined public benefit. This can be new knowledge, new treatments, improved pathways of care, new techniques of training staff. \\n- how the data requested will be used to achieve the project objectives is articulated.', + }, + { + description: + 'Safe data ensure that researchers have a clear legal basis for accessing the data and do not inadvertently learn something about the data subjects during the course of their analysis, minimising the risks of re-identification.\\nThe minimisation of this risk could be achieved by removing direct identifiers, aggregating values, banding variables, or other statistical techniques that may make re-identification more difficult. Sensitive or confidential data could not be considered to be completely safe because of the residual risk to a data subject’s confidentiality. Hence other limitations on access will need to be applied.\\n\\nThe purpose of this section is to ensure that: \\n- there is a clear legal basis for accessing the requested data\\n- the data requested is proportionate to the requirement of the project \\n- all data requested is necessary in order to achieve the public benefit declared \\n- data subjects cannot be identified by your team by cross-referencing data sets from anywhere else.', + active: false, + title: 'Safe data', + pageId: 'safedata', + }, + { + active: false, + description: + 'Safe settings are analytics environments where researchers can access and analyse the requested datasets in a safe and ethical way. Safe settings encompass the physical environment and procedural arrangements such as the supervision and auditing regimes. For safe settings, the likelihood of both deliberate and accidental disclosure needs to be explicitly considered.\\n\\nThe purpose of this section is to ensure that:\\n\\n- researchers access requested data in a secure and controlled setting such as a Trusted Research Environment (TRE) that limits the unauthorised use of the data\\n- practical controls and appropriate restrictions are in place if researchers access data though non-TRE environment. There may be requirements that data is held on restricted access servers, encrypted and only decrypted at the point of use.', + pageId: 'safesettings', + title: 'Safe settings', + }, + { + active: false, + description: + 'Safe outputs ensure that all research outputs cannot be used to identity data subjects. They typically include ‘descriptive statistics’ that have been sufficiently aggregated such that identification is near enough impossible, and modelled outputs which are inherently non-confidential.\\nThe purpose of this section is to ensure that:\\n\\n- controls are in place to minimise risks associated with planned outputs and publications \\n\\n- the researchers aim to openly publish their results to enable use, scrutiny and further research.', + pageId: 'safeoutputs', + title: 'Safe outputs', + }, + ], + formPanels: [ + { + pageId: 'safepeople', + index: 1, + panelId: 'applicant', + }, + { + pageId: 'safepeople', + panelId: 'safepeople-otherindividuals', + index: 2, + }, + { + panelId: 'safeproject-aboutthisapplication', + index: 3, + pageId: 'safeproject', + }, + { + panelId: 'safeproject-projectdetails', + index: 4, + pageId: 'safeproject', + }, + { + panelId: 'safeproject-funderinformation', + index: 5, + pageId: 'safeproject', + }, + { + index: 6, + panelId: 'safeproject-sponsorinformation', + pageId: 'safeproject', + }, + { + pageId: 'safeproject', + index: 7, + panelId: 'safeproject-declarationofinterest', + }, + { + panelId: 'safeproject-intellectualproperty', + index: 8, + pageId: 'safeproject', + }, + { + panelId: 'safedata-datafields', + index: 9, + pageId: 'safedata', + }, + { + pageId: 'safedata', + panelId: 'safedata-otherdatasetsintentiontolinkdata', + index: 10, + }, + { + pageId: 'safedata', + panelId: 'safedata-lawfulbasis', + index: 11, + }, + { + panelId: 'safedata-confidentialityavenue', + index: 12, + pageId: 'safedata', + }, + { + panelId: 'safedata-ethicsapproval', + index: 13, + pageId: 'safedata', + }, + { + pageId: 'safesettings', + index: 14, + panelId: 'safesettings-storageandprocessing', + }, + { + index: 15, + panelId: 'safesettings-dataflow', + pageId: 'safesettings', + }, + { + pageId: 'safeoutputs', + index: 16, + panelId: 'safeoutputs-outputsdisseminationplans', + }, + { + pageId: 'safeoutputs', + index: 17, + panelId: 'safeoutputs-retention', + }, + ], + questionPanels: [ + { + pageId: 'safepeople', + panelHeader: + "Please list the individuals who will have access to the data requested, or are responsible for helping complete this application form. \\n\\nThis section should include key contact details for the person who is leading the project; key contact details for the person(s) who (are) leading the project from other organisations. Only one contact from each organisation is needed. \\n\\nThe 'Primary applicant' is the person filling out the application form and principal contact for the application. This is usually the person with operational responsibility for the proposal. Each application must have details for at least one person.\\n\\nPlease use the file upload function if you're not able to add all individuals via the form.", + navHeader: 'Applicant', + questionSets: [ + { + questionSetId: 'applicant', + index: 1, + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'applicant', + }, + { + panelHeader: + "Please list the individuals who will have access to the data requested, or are responsible for helping complete this application form. \\n\\nThis section should include key contact details for the person who is leading the project; key contact details for the person(s) who (are) leading the project from other organisations. Only one contact from each organisation is needed. \\n\\nThe 'Primary applicant' is the person filling out the application form and principal contact for the application. This is usually the person with operational responsibility for the proposal. Each application must have details for at least one person.\\n\\nPlease use the file upload function if you're not able to add all individuals via the form.", + pageId: 'safepeople', + navHeader: 'Other individuals', + questionSets: [ + { + index: 1, + questionSetId: 'safepeople-otherindividuals', + }, + { + index: 100, + questionSetId: 'add-safepeople-otherindividuals', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safepeople-otherindividuals', + }, + { + navHeader: 'About this application', + pageId: 'safeproject', + panelHeader: '', + panelId: 'safeproject-aboutthisapplication', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + questionSetId: 'safeproject-aboutthisapplication', + index: 1, + }, + ], + }, + { + questionSets: [ + { + index: 1, + questionSetId: 'safeproject-projectdetails', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safeproject-projectdetails', + panelHeader: '', + pageId: 'safeproject', + navHeader: 'Project details', + }, + { + pageId: 'safeproject', + panelHeader: + "A funder is the organisation or body providing the financial resource to make the project possible, and may be different to the organisation detailed in the Safe people section. Please provide details of the main funder organisations supporting this project.\\n\\nPlease use the file upload function if you're not able to add all funders via the form.", + navHeader: 'Funder information', + questionSets: [ + { + index: 1, + questionSetId: 'safeproject-funderinformation', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safeproject-funderinformation', + }, + { + panelHeader: + "The sponsor is usually, but does not have to be, the main funder of the research. The sponsor takes primary responsibility for ensuring that the design of the project meets appropriate standards and that arrangements are in place to ensure appropriate conduct and reporting. \\n\\nPlease use the file upload function if you're not able to add all sponsors via the form.", + pageId: 'safeproject', + navHeader: 'Sponsor information', + panelId: 'safeproject-sponsorinformation', + questionSets: [ + { + index: 1, + questionSetId: 'safeproject-sponsorinformation', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + }, + { + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + index: 1, + questionSetId: 'safeproject-declarationofinterest', + }, + ], + panelId: 'safeproject-declarationofinterest', + navHeader: 'Declaration of interest', + pageId: 'safeproject', + panelHeader: + 'All interests that might unduly influence an individual’s judgement and objectivity in the use of the data being requested are of relevance, particularly if it involves payment or financial inducement. \\r\\n\\nThese might include any involvement of commercial organisations at arm’s-length to the project, or likely impact on commercial organisations, individually or collectively, that might result from the outcomes or methodology of the project.\\n\\nAll individuals named in this application who have an interest this application must declare their interest. ', + }, + { + navHeader: 'Intellectual property', + pageId: 'safeproject', + panelHeader: + 'All interests that might unduly influence an individual’s judgement and objectivity in the use of the data being requested are of relevance, particularly if it involves payment or financial inducement. \\r\\n\\nThese might include any involvement of commercial organisations at arm’s-length to the project, or likely impact on commercial organisations, individually or collectively, that might result from the outcomes or methodology of the project.\\n\\nAll individuals named in this application who have an interest this application must declare their interest. ', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + index: 1, + questionSetId: 'safeproject-intellectualproperty', + }, + ], + panelId: 'safeproject-intellectualproperty', + }, + { + navHeader: 'Data fields', + pageId: 'safedata', + panelHeader: + 'These are the Information assets which your proposal seeks to access and use.\\n\\nYou should consider this definition to be wide in scope and include any source of information which you propose to access and use. The data may be highly structured or less structured in nature, already existing or to be newly collected or gathered. \\n\\nExamples may include national datasets, local data sets, national or local extracts from systems, national or local registries or networks, patient records, or new information to be gathered from patients, families or other cohorts. \\n\\nNew data” should only include data that is being specifically gathered for the first time for the purposes of this proposal. i.e. data already held in case notes and transferred to a form is not “new” data, but a survey filled out by clinicians in order to gather information not recorded anywhere else is “new”.\\n\\n', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + questionSetId: 'safedata-datafields', + index: 1, + }, + ], + panelId: 'safedata-datafields', + }, + { + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + questionSetId: 'safedata-otherdatasetsintentiontolinkdata', + index: 1, + }, + ], + panelId: 'safedata-otherdatasetsintentiontolinkdata', + navHeader: 'Other datasets - Intention to link data', + pageId: 'safedata', + panelHeader: + 'This section should include information on the planned use of datasets not already included in this application. The following information is required:\\n\\nA descriptive name so that it is clear what the dataset is. \\n\\nSufficient information to explain the content of the dataset. \\n\\nWhether the proposal requires linkage of data, the use of matched controls, or the extraction of anonymised data.\\n\\nPlease indicate which organisation or body is undertaking these processes and which variables from the data sources requested will be used to achieve the proposed linkage. This should cover every dataset and variable you will require. ', + }, + { + navHeader: 'Lawful basis', + pageId: 'safedata', + panelHeader: + 'General Data Protection Regulation (GDPR) applies to ‘controllers’ and ‘processors’. \\n\\nA controller determines the purposes and means of processing personal data.\\n\\nA processor is responsible for processing personal data on behalf of a controller.\\n \\nGDPR applies to processing carried out by organisations operating within the EU. It also applies to organisations outside the EU that offer goods or services to individuals in the EU.\\nGDPR does not apply to certain activities including processing covered by the Law Enforcement Directive, processing for national security purposes and processing carried out by individuals purely for personal/household activities. \\n \\nGDPR only applies to information which relates to an identifiable living individual. Information relating to a deceased person does not constitute personal data and therefore is not subject to the GDPR.', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + questionSetId: 'safedata-lawfulbasis', + index: 1, + }, + ], + panelId: 'safedata-lawfulbasis', + }, + { + questionSets: [ + { + questionSetId: 'safedata-confidentialityavenue', + index: 1, + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safedata-confidentialityavenue', + pageId: 'safedata', + panelHeader: + 'If confidential information is being disclosed , the organisations holding this data (both the organisation disclosing the information and the recipient organisation) must also have a lawful basis to hold and use this information, and if applicable, have a condition to hold and use special categories of confidential information, and be fair and transparent about how they hold and use this data. \\n\\nIn England and Wales, if you are using section 251 of the NHS Act 2006 (s251) as a legal basis for identifiable data, you will need to ensure that you have the latest approval letter and application. \\n\\nFor Scotland this application will be reviewed by the Public Benefit and Privacy Panel.\\n\\nIn Northern Ireland it will be considered by the Privacy Advisory Committee. If you are using patient consent as the legal basis, you will need to provide all relevant consent forms and information leaflets. ', + navHeader: 'Confidentiality avenue', + }, + { + navHeader: 'Ethics approval', + pageId: 'safedata', + panelHeader: + 'This section details the research and ethics approval which you have obtained or sought for your project, or otherwise provides evidence as to why such approval is not necessary. \\nWhere such approval is not in place, it is important that you demonstrate why this is the case and provide assurances if approval is pending. If you need advice on whether ethics approval is necessary, you should approach your local ethics services in the first instance. Information about UK research ethics committees and ethical opinions can be found on the Health Research Authority (HRA) website.\\n', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + questionSetId: 'safedata-ethicsapproval', + index: 1, + }, + ], + panelId: 'safedata-ethicsapproval', + }, + { + questionSets: [ + { + index: 1, + questionSetId: 'safesettings-storageandprocessing', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safesettings-storageandprocessing', + pageId: 'safesettings', + panelHeader: + "This section details in what way the proposal aims to store and use data, and controls in place to minimise risks associated with this storage and use. If you have indicated that your proposal seeks to store and use data exclusively through a recognised trusted research environment, then you do not need to complete this section.\\n \\nIn relation to personal data, means any operation or set of operations which is performed on personal data or on sets of personal data (whether or not by automated means, such as collection, recording, organisation, structuring, storage, alteration, retrieval, consultation, use, disclosure, dissemination, restriction, erasure or destruction).\\n \\nAll Locations where processing will be undertaken, for the avoidance of doubt storage is considered processing. For each separate organisation processing data which is not fully anonymous a separate partner organisation form must also be completed.\\n \\n Processing, in relation to information or data means obtaining, recording or holding the information or data or carrying out any operation or set of operations on the information or data, including—\\n a) organisation, adaptation or alteration of the information or data,\\n b) retrieval, consultation or use of the information or data,\\n c) disclosure of the information or data by transmission,\\n dissemination or otherwise making available, or\\n d) alignment, combination, blocking, erasure or destruction of the information or data.\\n\\nPlease use the file upload function if you're not able to add all organisations via the form.", + navHeader: 'Storage and processing', + }, + { + questionSets: [ + { + index: 1, + questionSetId: 'safesettings-dataflow', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + panelId: 'safesettings-dataflow', + pageId: 'safesettings', + panelHeader: '', + navHeader: 'Dataflow', + }, + { + panelId: 'safeoutputs-outputsdisseminationplans', + questionSets: [ + { + index: 1, + questionSetId: 'safeoutputs-outputsdisseminationplans', + }, + ], + questionPanelHeaderText: 'TODO: We need a description for this panel', + pageId: 'safeoutputs', + panelHeader: + 'Please include any plans for dissemination and publication of the data and results arising from your proposal. Please also specify any controls in place to minimise risks associated with publication. Dissemination can take place in a variety of ways and through many mechanisms, including through electronic media, print media or word of mouth.', + navHeader: 'Outputs dissemination plans', + }, + { + panelId: 'safeoutputs-retention', + questionPanelHeaderText: 'TODO: We need a description for this panel', + questionSets: [ + { + index: 1, + questionSetId: 'safeoutputs-retention', + }, + ], + navHeader: 'Retention', + pageId: 'safeoutputs', + panelHeader: + 'This section details how the project will treat data being processed after it has been used for the purpose of the proposal outlined, including governance in place to determine how long it will be retained, and controls to manage its subsequent disposal if required. Please reference any relevant policies and procedures which are in place to govern retention and disposal of data as outlined in the proposal.', + }, + ], + questionSets: [ + { + questions: [ + { + guidance: 'Please insert your full name.', + questionId: 'fullname-a218cf35b0847b14d5f6d565b01e2f8c', + question: 'Full name', + validations: [ + { + message: 'Please enter a value', + type: 'isLength', + params: [1], + }, + ], + input: { + required: true, + type: 'textInput', + }, + }, + { + guidance: 'Job Title is the name of the position the applicant holds within their organisation.', + input: { + required: true, + type: 'textInput', + }, + validations: [ + { + message: 'Please enter a value', + type: 'isLength', + params: [1], + }, + ], + question: 'Job title', + questionId: 'jobtitle-6ddd85c18e8da4ac08f376073932128f', + }, + { + guidance: 'Please include a contact telephone number that the applicant can be contacted on.', + questionId: 'telephone-7b9d3b160c86a77c842503904ffdf7e6', + input: { + type: 'textInput', + }, + question: 'Telephone', + }, + { + questionId: 'orcid-7c5167922d97afe681f4b7c388b0a70a', + input: { + type: 'textInput', + }, + question: 'ORCID', + guidance: + 'ORCID provides a persistent digital identifier (an ORCID iD) that you own and control, and that distinguishes you from every other researcher. You can create an ORCID profile at https://orcid.org/. If you have an ORCID iD please include it here. ', + }, + { + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + { + type: 'isEmail', + }, + ], + question: 'Email', + input: { + required: true, + type: 'textInput', + }, + questionId: 'email-7d977b9e170e992b5cb48d407304406d', + guidance: 'Please include an email address that the applicant can receive communications through.', + }, + { + guidance: 'Please confirm whether the applicant will be accessing the data that is being requested.', + question: 'Will you access the data requested?', + input: { + label: 'Will you access the data requested?', + type: 'radioOptionsInput', + options: [ + { + text: 'Yes', + value: 'yes', + }, + { + value: 'no', + text: 'No', + }, + ], + }, + questionId: 'willyouaccessthedatarequested-765aee4e52394857f7cb902bddeafe04', + }, + { + input: { + type: 'radioOptionsInput', + options: [ + { + text: 'Yes', + value: 'yes', + conditionalQuestions: [ + { + questionId: 'ifyespleaseprovideyouraccreditedresearchernumber-7a87ef841f884a7aad6f48252f9fc670', + input: { + type: 'textareaInput', + }, + question: 'If yes, please provide your accredited researcher number.', + }, + ], + }, + { + conditionalQuestions: [ + { + input: { + type: 'textareaInput', + }, + question: 'Please specify if you are planning to become an accredited researcher.', + questionId: 'pleasespecifyifyouareplanningtobecomeanaccreditedresearcher-d93e3edff26a69fb961a28032719960c', + }, + ], + text: 'No', + value: 'no', + }, + ], + label: 'Are you an accredited researcher under the Digital Economy Act 2017?', + }, + question: 'Are you an accredited researcher under the Digital Economy Act 2017?', + questionId: 'areyouanaccreditedresearcherunderthedigitaleconomyact2017-16c0422c22522e7e83dd0143242cbdda', + guidance: + 'Depending on the type of data you are requesting, you might be required to become an accredited researcher. Most access to data in the Secure Research Service (SRS) will be by researchers accredited under the Digital Economy Act 2017 (DEA). \\n\\nThe UK Statistics Authority has published further information on the criteria to be met in a Research Code of Practice and Accreditation criteria. Researchers can apply for accreditation through the Research Accreditation Service (RAS).\\n\\nFull accredited researcher status is valid for five years. Provisional accredited researcher status is valid for one year.\\n\\nMore information here: https://www.gov.uk/government/publications/digital-economy-act-2017-part-5-codes-of-practice/research-code-of-practice-and-accreditation-criteria#section-b-accreditation-of-researchers-and-peer-reviewers', + }, + { + questionId: + 'haveyouundertakenprofessionaltrainingoreducationonthetopicofinformationgovernance-0ffaac7080cdb73e7a6c2fcdd979697d', + question: 'Have you undertaken professional training or education on the topic of Information Governance?', + input: { + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + input: { + type: 'textareaInput', + }, + question: 'Please provide full details regarding the most recent training', + questionId: 'pleaseprovidefulldetailsregardingthemostrecenttraining-437c573445144812c0268b624d94dd61', + guidance: + 'Evidence of Information Governance training is an important aspect of most applications, giving assurance that individuals are aware of the privacy, confidentiality, data protection and Caldicott implications of working with personal data. \\n\\nPlease ensure you have checked with the data custodian if training is required for your application.', + }, + ], + }, + { + conditionalQuestions: [ + { + guidance: + 'Evidence of Information Governance training is an important aspect of most applications, giving assurance that individuals are aware of the privacy, confidentiality, data protection and Caldicott implications of working with personal data. \\n\\nPlease ensure you have checked with the data custodian if training is required for your application.', + input: { + type: 'textareaInput', + }, + question: 'Please provide any details of plans to attend training, if applicable', + questionId: 'pleaseprovideanydetailsofplanstoattendtrainingifapplicable-8fc8a15b9aa5f6220e2904910a8827f2', + }, + ], + text: 'No', + value: 'no', + }, + ], + type: 'radioOptionsInput', + label: 'Have you undertaken professional training or education on the topic of Information Governance?', + }, + guidance: + 'Evidence of Information Governance training is an important aspect of most applications, giving assurance that individuals are aware of the privacy, confidentiality, data protection and Caldicott implications of working with personal data. \\n\\nPlease ensure you have checked with the data custodian if training is required for your application.', + }, + { + guidance: + 'Please give the full name of the organisation on whose behalf you are making the application or within which you work in your professional capacity as an applicant. This should include a parent organisation, and sub-division or department if appropriate (for example University of Edinburgh, Department of Informatics).', + questionId: 'yourorganisationname-e4afdb97925cc69c23c576fce197ef55', + question: 'Your organisation name', + validations: [ + { + message: 'Please enter a value', + type: 'isLength', + params: [1], + }, + ], + input: { + required: true, + type: 'textInput', + }, + }, + { + questionId: + 'doesyourorganisationhaveacurrentdatasecurityandprotectiontoolkitdsptpublishedassessment-0b6c42f4c32b2f347c1f6f20329ad486', + question: 'Does your organisation have a current Data Security and Protection Toolkit (DSPT) published assessment?', + validations: [ + { + message: 'Please select an option', + type: 'isLength', + params: [1], + }, + { + type: 'isLength', + params: [1], + message: 'Please enter a value', + }, + ], + input: { + label: 'Does your organisation have a current Data Security and Protection Toolkit (DSPT) published assessment?', + required: true, + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + input: { + type: 'textInput', + }, + question: 'If yes, please provide the current status', + questionId: 'ifyespleaseprovidethecurrentstatus-fa877511570f68283d658d9458f36867', + }, + { + validations: [ + { + type: 'isCustomDate', + format: 'dd/MM/yyyy', + }, + ], + question: 'If yes, please provide the date published', + input: { + type: 'datePickerCustom', + }, + questionId: 'ifyespleaseprovidethedatepublished', + }, + ], + }, + { + text: 'No', + value: 'no', + }, + ], + type: 'radioOptionsInput', + }, + guidance: + 'The Data Security and Protection Toolkit (DSPT) is an online self-assessment tool that allows organisations to measure their performance against the National Data Guardian’s 10 data security standards.\\n\\nAll organisations that have access to NHS patient data and systems must use the DSPT to provide assurance that they are practising good data security and that personal information is handled correctly.\\n\\nThe DSPT is an annual assessment.\\n\\nYou can find out the status of your organisation here https://www.dsptoolkit.nhs.uk/OrganisationSearch', + }, + { + questionId: 'willyourorganisationactasdatacontroller-1caf3bd728742c4b7ff66633ab23b0d6', + input: { + label: 'Will your organisation act as data controller?', + type: 'radioOptionsInput', + options: [ + { + conditionalQuestions: [ + { + questionId: 'icoregisterednumber-6f3e1d2b5b613a9fb0a4823754a921b3', + question: 'ICO registered number', + input: { + type: 'textInput', + }, + }, + { + guidance: "Please include the organisation's business address.", + questionId: 'registeredaddressline1-cabcaab378687adbcc1b7042e8f4db9c', + input: { + type: 'textInput', + }, + question: 'Registered address (line 1)', + }, + { + guidance: "Please include the organisation's business address.", + question: 'Registered address (line 2)', + input: { + type: 'textInput', + }, + questionId: 'registeredaddressline2-6d779c3a667c3f026c668bf01c857fbe', + }, + { + questionId: 'city-8b14019051e5eb5b527d50705fe83299', + input: { + type: 'textInput', + }, + question: 'City', + guidance: 'Please specify the city where the organisation is located', + }, + { + guidance: "Please include the organisation's business address postcode", + question: 'Postcode', + input: { + type: 'textInput', + }, + questionId: 'postcode-65a92b863281a9ef883108ad6a2d96c6', + }, + { + input: { + type: 'textInput', + }, + question: 'Country', + questionId: 'country-f424a05d6890c1bfdb770e0c840359f7', + guidance: 'Please specify the country where the organisation is located.', + }, + { + question: 'Organisation type', + input: { + options: [ + { + text: 'Academic institution', + value: 'academicinstitution', + }, + { + text: 'National body', + value: 'nationalbody', + }, + { + text: 'Healthcare provider', + value: 'healthcareprovider', + }, + { + value: 'healthcarecomissioner', + text: 'Healthcare comissioner', + }, + { + value: 'commercialbody', + text: 'Commercial body', + }, + { + value: 'localauthority', + text: 'Local Authority', + }, + { + value: 'other', + text: 'Other', + }, + ], + type: 'checkboxOptionsInput', + }, + questionId: 'organisationtype-c34f26803e2083c3108c4b6bb967bab4', + guidance: 'Please select type of organisation, unique purpose or role of the organisation.', + label: 'Organisation type', + }, + { + guidance: + 'If your organisation is a data controller please details whether it is the sole data controller or joint data controller with other organisations.', + questionId: 'pleaseprovidedetails-bd518a29d7e4cdae3e11f40e109a93f1', + input: { + type: 'textareaInput', + }, + question: 'Please provide details', + }, + ], + value: 'yes', + text: 'Yes', + }, + { + text: 'No', + value: 'no', + }, + { + text: 'Unsure', + value: 'unsure', + }, + ], + }, + question: 'Will your organisation act as data controller?', + guidance: + 'Please specify if your organisation will act as a data controller. If your organisation is not the sole data controller, please provide details of other data controllers. ', + }, + ], + questionSetHeader: 'Applicant', + questionSetId: 'applicant', + }, + { + questionSetId: 'safepeople-otherindividuals', + questions: [ + { + guidance: "Full name is the individual's first and last name", + question: 'Full name', + input: { + type: 'textInput', + }, + questionId: 'fullname-892140ec730145dc5a28b8fe139c2876', + }, + { + guidance: 'Job Title is the name of the position the individual holds within their organisation.', + questionId: 'jobtitle-ff1d692a04b4bb9a2babe9093339136f', + input: { + type: 'textInput', + }, + question: 'Job title', + }, + { + input: { + type: 'textInput', + }, + question: 'Organisation', + questionId: 'organisation-65c06905b8319ffa29919732a197d581', + guidance: "Please include the individual's organisation.", + }, + { + question: 'Role', + input: { + label: 'Role', + type: 'checkboxOptionsInput', + options: [ + { + value: 'principalinvestigator', + text: 'Principal investigator', + }, + { + text: 'Collaborator', + value: 'collaborator', + }, + { + value: 'teammember', + text: 'Team member', + }, + { + value: 'other', + text: 'Other', + conditionalQuestions: [ + { + questionId: 'ifotherpleasespecify-fa9e063fd5f253ae6dc76080db560bcc', + question: 'If other, please specify', + input: { + type: 'textareaInput', + }, + }, + ], + }, + ], + }, + questionId: 'role-22ddd99eee5c9dbc3175df5e0369082b', + guidance: + 'A role is a function that the applicant plays. It might include role types and accreditation for those that are accessing the secure data and those that are not but would see cleared outputs from the project. \\r\\n (i.e. project lead, deputy lead, accrediter, researcher, peer reviewer)', + }, + { + guidance: 'Please confirm whether this person will be accessing the data that is being requested.', + input: { + options: [ + { + value: 'yes', + text: 'Yes', + }, + { + value: 'no', + text: 'No', + }, + ], + type: 'radioOptionsInput', + label: 'Will this person access the data requested?', + }, + question: 'Will this person access the data requested?', + questionId: 'willthispersonaccessthedatarequested-20cba67ec4242f64c6f1f975af332b48', + }, + { + questionId: 'isthispersonanaccreditedresearcherunderthedigitaleconomyact2017-fab5a4a0cafd2f8889a40b942d7fc6c0', + question: 'Is this person an accredited researcher under the Digital Economy Act 2017?', + input: { + label: 'Is this person an accredited researcher under the Digital Economy Act 2017?', + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + input: { + type: 'textareaInput', + }, + question: 'If yes, please provide details', + questionId: 'ifyespleaseprovidedetails-8e5c491c36c07ba9a5a1a15569ba9127', + }, + ], + }, + { + text: 'No', + value: 'no', + }, + ], + type: 'radioOptionsInput', + }, + guidance: 'Please confirm whether this person is an accredited researcher under the Digital Economy Act 2017.', + }, + { + input: { + label: 'Has this person undertaken professional training or education on the topic of Information Governance?', + type: 'radioOptionsInput', + options: [ + { + conditionalQuestions: [ + { + questionId: 'pleaseprovidefulldetailsregardingthemostrecenttraining-0389fbcb937c035b4b99939e6057f6b1', + question: 'Please provide full details regarding the most recent training', + input: { + type: 'textareaInput', + }, + }, + ], + text: 'Yes', + value: 'yes', + }, + { + conditionalQuestions: [ + { + question: 'Please provide any details of plans to attend training, if applicable', + input: { + type: 'textareaInput', + }, + questionId: 'pleaseprovideanydetailsofplanstoattendtrainingifapplicable-d7904ad9d3975890f74466dfd9e03249', + }, + ], + value: 'no', + text: 'No', + }, + ], + }, + question: 'Has this person undertaken professional training or education on the topic of Information Governance?', + questionId: + 'hasthispersonundertakenprofessionaltrainingoreducationonthetopicofinformationgovernance-eb6cc4e210247305a5f8d49a7f9725dd', + guidance: + 'Please confirm whether this person has undertaken professional training or education on the topic of Information Governance.', + }, + ], + questionSetHeader: 'Other individuals', + }, + { + questionSetId: 'safeproject-aboutthisapplication', + questionSetHeader: 'About this application', + questions: [ + { + questionId: 'thisapplicationis-41ad7589e5f28c20d0714799174cfb54', + question: 'This application is...', + input: { + options: [ + { + value: 'anewapplication', + text: 'A new application', + }, + { + text: 'An amendment to an existing application', + value: 'anamendmenttoanexistingapplication', + conditionalQuestions: [ + { + input: { + type: 'textareaInput', + }, + question: 'Reference or details of previous application', + questionId: 'referenceordetailsofpreviousapplication-9566d8b4523d9357623c0dc13750dca4', + }, + ], + }, + { + conditionalQuestions: [ + { + question: 'Reference or details of previous application', + input: { + type: 'textareaInput', + }, + questionId: 'referenceordetailsofpreviousapplication-68ecab62047e082c600be559026b765d', + }, + ], + value: 'anextensionofanexistingapproval', + text: 'An extension of an existing approval', + }, + { + conditionalQuestions: [ + { + question: 'Reference or details of previous application', + input: { + type: 'textareaInput', + }, + questionId: 'referenceordetailsofpreviousapplication-fc163f4f2ca56168ad26b467969ef3ab', + }, + ], + value: 'arenewalofanexistingapproval', + text: 'A renewal of an existing approval', + }, + { + conditionalQuestions: [ + { + questionId: 'referenceordetailsofpreviousapplication-d027df857d58f165a9be86c20ab2a6b4', + question: 'Reference or details of previous application', + input: { + type: 'textareaInput', + }, + }, + ], + text: 'Related to a previous application (approved or not)', + value: 'relatedtoapreviousapplicationapprovedornot', + }, + ], + type: 'radioOptionsInput', + label: 'This application is...', + }, + guidance: + 'The application could be a new application, an extension, a renewal or amendment. For extensions or amendments, you must highlight the specific information within this form that has been updated, provide an original application number and approval date, any subsequent amendment approval dates and a summary of changes and rationale for the change to your original application and and updated approvals signatures in order for the request to be processed..', + }, + ], + }, + { + questionSetId: 'safeproject-projectdetails', + questions: [ + { + guidance: + 'The title should identify the main area of your research so that another researcher could understand if it might be relevant to their area of study. \\n \\nThe titles of all Accredited Research projects are published on the UK Statistics Authority website as part of the public record of DEA Accredited Researchers.', + questionId: 'titleofproject-d1871bb2722198a540054857521fdc0b', + question: 'Title of project', + validations: [ + { + type: 'isLength', + params: [3, 300], + }, + ], + input: { + required: true, + type: 'textInput', + }, + }, + { + questionId: 'whatisthetypeofproject-f177460d399000fa2f5cca4540bd4754', + input: { + options: [ + { + value: 'research', + text: 'Research', + }, + { + value: 'clinicaudit', + text: 'Clinic audit', + }, + { + text: 'Service evaluation', + value: 'serviceevaluation', + }, + { + conditionalQuestions: [ + { + questionId: 'ifotherpleasespecify-453820818cc5378e47db48b940dd206a', + input: { + type: 'textInput', + }, + question: 'If other, please specify', + }, + ], + text: 'Other', + value: 'other', + }, + ], + type: 'radioOptionsInput', + required: true, + label: 'What is the type of project?', + }, + question: 'What is the type of project?', + validations: [ + { + message: 'Please select an option', + params: [1], + type: 'isLength', + }, + ], + guidance: + 'A research project is a discrete scientific endeavor to answer a research question or a set of research questions. \\n\\nA clinic audit project is designed and conducted to produce information to inform delivery of best care. It aims to find out if healthcare is being provided in line with standards to inform care providers and patients about where a service is doing well, and where there could be improvements.\\n\\nA service evaluation project is designed and conducted solely to define or judge current care. It seeks to assess current service to assess how well a service is achieving its intended aims.', + }, + { + questionId: 'isthisanewstudyorsupportinganexistingstudy-fd29fdb85107f798615a6a7d94a63212', + input: { + label: 'Is this a new study or supporting an existing study?', + type: 'radioOptionsInput', + options: [ + { + text: 'New study', + value: 'newstudy', + }, + { + value: 'existingstudy', + text: 'Existing study', + conditionalQuestions: [ + { + question: 'Evidence of existing outputs', + input: { + options: [ + { + text: 'I have enclosed evidence of existing outputs', + value: 'ihaveenclosedevidenceofexistingoutputs', + }, + ], + type: 'checkboxOptionsInput', + }, + questionId: 'evidenceofexistingoutputs-cb279f6601396c68f41f98d81948fd69', + label: 'Evidence of existing outputs', + }, + ], + }, + ], + }, + question: 'Is this a new study or supporting an existing study?', + }, + { + question: 'Please provide a lay summary of the project (300 words)', + validations: [ + { + type: 'isLength', + params: [10, 2000], + }, + ], + input: { + required: true, + type: 'textareaInput', + }, + questionId: 'pleaseprovidealaysummaryoftheproject300words-18d6084cf053be12331aa7c4463012e8', + guidance: + 'Please provide a summary of the study in language suitable for non-experts in the field and ensure that all abbreviations in technical terminology are explained.\\n \\nThe summary must make clear what the specific purpose is, who will be using the data (organisations rather than individual names), what will happen to the data, whether the expected outputs are in record level form, what is known to date about your chosen project including any preliminary/related analysis and background literature reviews. Please include any potential disclosure risks and how these will be addressed.', + }, + { + guidance: + 'This is required to allow us to know when and for how long the data will be required. If the project has already begun but this data is only required for a future element use the date from when it will be required here.', + questionId: 'What is the anticipated start date of the project?', + question: 'What is the anticipated start date of the project?', + validations: [ + { + format: 'dd/MM/yyyy', + type: 'isCustomDate', + }, + ], + input: { + type: 'datePickerCustom', + }, + }, + { + questionId: 'Please provide anticipated end date of the project?', + input: { + type: 'datePickerCustom', + }, + question: 'Please provide anticipated end date of the project?', + validations: [ + { + type: 'isCustomDate', + format: 'dd/MM/yyyy', + }, + ], + }, + { + questionId: 'whataretheprojectaimsobjectivesandrationale-215c3780393fadf7584659eab0e834fc', + input: { + required: true, + type: 'textareaInput', + }, + question: 'What are the project aims, objectives and rationale?', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + ], + guidance: + 'Please Include the background to the project by describing why you are conducting the study, the specific aims and the hypotheses that you hope to test. Summarise how the data requested are required to help address these aims. Please include whether the project has used peer review and if applicable the nature of that review. ', + }, + { + guidance: + 'Provide full details of your research methodology. This must include justification of sample size, analyses proposed, statistical methods, additional data sources such as linked data and any plans for collaborative work. \\n\\nThis information will be key to assessing whether your proposal will be feasible, deliver clear public good and be an appropriate use of data. \\n\\nEnsure you: \\n\\nSpecify the method(s) of analysis you plan to use (such as regression);\\n\\nAs far as possible, try to articulate the outcome or dependent variable(s). \\n\\nIndicate the starting point for the modelling process - acknowledging that the model may evolve.\\n\\nExplain (where relevant) how any potential selection/causal bias will be addressed (e.g. by including a control group with information on how this control group will be created); \\n\\nProvide methodology references, if a non-standard methodology is proposed;\\n\\nInclude information about any contribution to the field of research methodology that you believe may result from your research;\\n\\nInclude an explanation of how your methodological approach will answer the research question(s) set out in the project when employing methods not covered by any of the above (e.g. correlation or basic descriptive analysis will only be used, noting that such analysis might be more applicable for exploratory research).', + questionId: 'howwillthedatarequestedbeusedtoachievetheprojectobjectives-66b542d3041790be46cbd0093a1c0157', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + ], + question: 'How will the data requested be used to achieve the project objectives?', + input: { + type: 'textareaInput', + required: true, + }, + }, + { + guidance: + 'Use these section to give the background and justification of your proposal, to demonstrate how your project will benefit the public, as well as show your understanding of the Information Governance issues specific and inherent to your project. Please make it clear how the data requested will contribute. \\n\\nPlease also show that you have considered how to balance the privacy risks and public benefits when designing the study. The requirement for the datasets requested should be fully justified in the light of the aims and objectives of the proposal.', + input: { + required: true, + type: 'textareaInput', + }, + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + ], + question: 'How will your project benefit the public and what is the anticipated impact?', + questionId: 'howwillyourprojectbenefitthepublicandwhatistheanticipatedimpact-cbde5d9d5237f8309c3238af111a1f9e', + }, + { + guidance: + 'Provide full details of proposed public engagement plans for patient and/or user group involvement. If you have no plans, please elaborate why there will not be public engagement.', + questionId: + 'canyouprovideanoutlineofthepublicandpatientinvolvementandengagementppiestrategiesofthestudyorabriefexplanationofwhytheyarenotplanned-31372d627ddf8fc499dbc3b833a79abf', + question: + 'Can you provide an outline of the public and patient involvement and engagement (PPIE*) strategies of the study or a brief explanation of why they are not planned?', + input: { + type: 'textareaInput', + }, + }, + ], + questionSetHeader: 'Project details', + }, + { + questionSetId: 'safeproject-funderinformation', + questions: [ + { + guidance: 'Please confirm if your project has a funder.', + questionId: 'doesyourprojecthaveafunder-5f2de3ab7973f34fc94979b3604f9835', + input: { + required: true, + label: 'Does your project have a funder?', + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + questionId: 'ifyespleaseprovidetheorganisationname-6b6d945420c4064bfcc00100f1f964ce', + question: 'If yes, please provide the organisation name', + input: { + type: 'textInput', + }, + guidance: 'Please confirm funder organisation name.', + }, + { + questionId: 'addFunderDetails', + input: { + type: 'buttonInput', + action: 'addRepeatableQuestions', + questionIds: ['ifyespleaseprovidetheorganisationname-6b6d945420c4064bfcc00100f1f964ce'], + text: '+ Add another organisation', + class: 'btn btn-primary addButton', + separatorText: 'Additional organisation details', + }, + guidance: + "If there are other orgnisations to be specified as part of this application, click 'Add another organisation' as required.", + }, + ], + }, + { + conditionalQuestions: [ + { + question: 'If no, please provide details of how you intend to fund the study', + input: { + type: 'textareaInput', + }, + questionId: 'ifnopleaseprovidedetailsofhowyouintendtofundthestudy-69b1e8d4a8eabfd64ec006d711377095', + }, + { + questionId: 'pleaseprovideevidenceofindependentpeerreview-7285fe5f3380956072993c4d44fa99a9', + input: { + options: [ + { + text: 'I confirm I have provided evidence of independent peer review.', + value: 'iconfirmihaveprovidedevidenceofindependentpeerreview', + }, + ], + type: 'checkboxOptionsInput', + }, + question: 'Please provide evidence of independent peer review', + label: 'Please provide evidence of independent peer review', + }, + ], + text: 'No', + value: 'no', + }, + ], + type: 'radioOptionsInput', + }, + question: 'Does your project have a funder?', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please select an option', + }, + ], + }, + ], + questionSetHeader: 'Funder information', + }, + { + questionSetHeader: 'Sponsor information', + questions: [ + { + questionId: 'doesyourprojecthaveasponsor-6222cfdc0db4de7b26ed18cb8ff2de1f', + input: { + required: true, + label: 'Does your project have a sponsor?', + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + guidance: 'Please provide legal name; to appear on legal documents.', + input: { + type: 'textInput', + }, + question: 'Organisation name', + questionId: 'organisationname-e798ec22cc019a90c667d0879434d746', + }, + { + questionId: 'registeredaddressline1-768e71b8a621031ff767738336404c75', + input: { + type: 'textInput', + }, + question: 'Registered address (line 1)', + guidance: 'Please confirm sponsor organisation address.', + }, + { + guidance: 'Please confirm sponsor organisation address.', + input: { + type: 'textInput', + }, + question: 'Registered address (line 2)', + questionId: 'registeredaddressline2-ad9cde7d35d303607780655af5d6fe1d', + }, + { + guidance: 'Please confirm sponsor organisation city.', + question: 'City', + input: { + type: 'textInput', + }, + questionId: 'city-d1d39f3ce1e8680603b6bd46bbbd2fa5', + }, + { + question: 'Postcode', + input: { + type: 'textInput', + }, + questionId: 'postcode-9fff28c663f524a18fc57d07d49142b6', + guidance: 'Please confirm sponsor organisation postcode.', + }, + { + guidance: "Please confirm sponsor organisation's country.", + questionId: 'country-7da3f75bb002e9f444282fe6eba9bfa2', + question: 'Country', + input: { + type: 'textInput', + }, + }, + { + questionId: 'sector-1e0611964625066c52da1ff3dfb5c5e9', + question: 'Sector', + input: { + type: 'textInput', + }, + guidance: "Please provide details of the sponsor's sector e.g. NHS, Academia, Charity, Industry.", + }, + { + guidance: 'Please specify the size of the organisation (small, medium, large).', + questionId: 'size-fb6e4688e5e99e365c5299baff249239', + input: { + type: 'textInput', + }, + question: 'Size', + }, + { + guidance: 'Please provide additional details, if applicable.', + input: { + type: 'textInput', + }, + question: 'Additional details', + questionId: 'additionaldetails-0c91e78a36ebf985fc0be9a420e58d14', + }, + { + questionId: 'contactemailaddress-9dffde669046f6c2225eda75786ce5ca', + input: { + type: 'textInput', + }, + question: 'Contact email address', + guidance: 'Please provide a contact email address for the sponsor organisation', + }, + { + questionId: 'addSponsorDetails', + input: { + type: 'buttonInput', + action: 'addRepeatableQuestions', + questionIds: [ + 'organisationname-e798ec22cc019a90c667d0879434d746', + 'registeredaddressline1-768e71b8a621031ff767738336404c75', + 'registeredaddressline2-ad9cde7d35d303607780655af5d6fe1d', + 'city-d1d39f3ce1e8680603b6bd46bbbd2fa5', + 'postcode-9fff28c663f524a18fc57d07d49142b6', + 'country-7da3f75bb002e9f444282fe6eba9bfa2', + 'sector-1e0611964625066c52da1ff3dfb5c5e9', + 'size-fb6e4688e5e99e365c5299baff249239', + 'additionaldetails-0c91e78a36ebf985fc0be9a420e58d14', + 'contactemailaddress-9dffde669046f6c2225eda75786ce5ca', + ], + text: '+ Add another sponsor', + class: 'btn btn-primary addButton', + separatorText: 'Additional sponsor details', + }, + guidance: + "If there are other sponsors to be specified as part of this application, click 'Add another sponsor' as required.", + }, + ], + }, + { + value: 'no', + text: 'No', + }, + ], + type: 'radioOptionsInput', + }, + question: 'Does your project have a sponsor?', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please select an option', + }, + ], + guidance: 'Please confirm if your project has a sponsor.', + }, + ], + questionSetId: 'safeproject-sponsorinformation', + }, + { + questionSetHeader: 'Declaration of interest', + questions: [ + { + guidance: 'Please indicate if there is any commercial aspect or dimension to the project or its outcomes.', + questionId: 'isthereacommercialinterestinthisproject-aeca9152dcaa8f68b5dd1d81456093bf', + validations: [ + { + message: 'Please select an option', + type: 'isLength', + params: [1], + }, + ], + question: 'Is there a commercial interest in this project?', + input: { + type: 'radioOptionsInput', + options: [ + { + text: 'Yes', + value: 'yes', + conditionalQuestions: [ + { + input: { + type: 'textInput', + }, + question: 'Organisation name', + questionId: 'organisationname-580cfbc664ccadc36159bb1c9f6c52ca', + guidance: 'Please confirm organisation name.', + }, + { + guidance: 'Please confirm organisation address.', + questionId: 'registeredaddressline1-30c1a89c33f1dd35421730c16b6a4a47', + input: { + type: 'textInput', + }, + question: 'Registered address (line 1)', + }, + { + questionId: 'registeredaddressline2-7fc23fb3bb766f3ca9cb007ab0a1571e', + input: { + type: 'textInput', + }, + question: 'Registered address (line 2)', + }, + { + questionId: 'postcode-e5c2e97319d3412ec904227dabf06605', + input: { + type: 'textInput', + }, + question: 'Postcode', + guidance: 'Please confirm organisation postcode.', + }, + { + question: 'City', + input: { + type: 'textInput', + }, + questionId: 'city-47356a357f172612cffdd4082dc6f705', + guidance: 'Please confirm organisation city.', + }, + { + guidance: 'Please confirm organisation country.', + question: 'Country', + input: { + type: 'textInput', + }, + questionId: 'country-42d004957eba55e8f5e3c1408c5874e6', + }, + { + input: { + type: 'textareaInput', + }, + question: 'Describe the nature of interest', + questionId: 'describethenatureofinterest-006fb10a759eb7caaba1d8c6f1797478', + }, + { + questionId: 'publicinterest-b7bebe7b9ed48dd6aee5e13c1dfa0220', + input: { + options: [ + { + value: 'iconfirmthatanycommercialinterestispublicinterestrelated', + text: 'I confirm that any commercial interest is public interest related.', + }, + ], + type: 'checkboxOptionsInput', + }, + question: 'Public interest', + label: 'Public interest', + }, + { + questionId: 'addDeclarationOfInterestDetails', + input: { + type: 'buttonInput', + action: 'addRepeatableQuestions', + questionIds: [ + 'organisationname-580cfbc664ccadc36159bb1c9f6c52ca', + 'registeredaddressline1-30c1a89c33f1dd35421730c16b6a4a47', + 'registeredaddressline2-7fc23fb3bb766f3ca9cb007ab0a1571e', + 'postcode-e5c2e97319d3412ec904227dabf06605', + 'city-47356a357f172612cffdd4082dc6f705', + 'country-42d004957eba55e8f5e3c1408c5874e6', + 'describethenatureofinterest-006fb10a759eb7caaba1d8c6f1797478', + 'publicinterest-b7bebe7b9ed48dd6aee5e13c1dfa0220', + ], + text: '+ Add another organisation', + class: 'btn btn-primary addButton', + separatorText: 'Additional organisation details', + }, + guidance: + "If there are other orgnisations to be specified as part of this application, click 'Add another organisation' as required.", + }, + ], + }, + { + text: 'No', + value: 'no', + }, + ], + label: 'Is there a commercial interest in this project?', + required: true, + }, + }, + ], + questionSetId: 'safeproject-declarationofinterest', + }, + { + questionSetId: 'safeproject-intellectualproperty', + questions: [ + { + guidance: + 'Intellectual Property is the tangible output of any intellectual activity that is new or previously undescribed. It has an owner; it can be bought, sold or licensed and must be adequately protected. It can include inventions, industrial processes, software, data, written work, designs and images. \\nAny research which could potentially lead to intellectual property rights for you or your employer should be discussed with your employer and your R&D office as early as possible in the planning of the research.\\n', + questionId: + 'pleaseindicateiftheresearchcouldleadtothedevelopmentofanewproductprocessorthegenerationofintellectualproperty-4bd0161d3097f24464f6a4cf8b8a0b0e', + question: + 'Please indicate if the research could lead to the development of a new product/process or the generation of intellectual property.', + input: { + type: 'textareaInput', + }, + }, + ], + questionSetHeader: 'Intellectual property', + }, + { + questionSetId: 'safedata-datafields', + questions: [ + { + input: { + type: 'textareaInput', + }, + question: + 'Please indicate the data necessary to conduct the study, the data fields required and the justifications for each field.', + questionId: + 'pleaseindicatethedatanecessarytoconductthestudythedatafieldsrequiredandthejustificationsforeachfield-b3e68aa6ffddc36c1919be39b09049ef', + guidance: + 'NHS Digital will require information about each dataset that you would like to have access to including and field names and justifications for each field.\\n\\nPlease contact NHS Digital to find out which data items are available within your selected datasets.\\n\\nYou can submit your application initially without the data items listed and NHS Digital will contact you to discuss the specific data items you require as part of the continued application process.', + }, + { + question: 'Data fields indicated via file upload', + input: { + options: [ + { + value: 'iconfirmthatihaveenclosedalistofdatasetsfieldsandvariablesrequiredforthestudyaswellasjustificationforeachfield', + text: + 'I confirm that I have enclosed a list of datasets, fields and variables required for the study as well as justification for each field.', + }, + ], + type: 'checkboxOptionsInput', + label: 'Data fields indicated via file upload', + }, + questionId: 'datafieldsindicatedviafileupload-0b02aa93afe7ba3a34d4fcffea9106fd', + guidance: + 'A description of precisely the criteria which define the patients to be included and to be excluded from the data extract you are requesting should be provided. \\n\\nThis should include precise date parameters for the start and end of the range requested (dd/mm/yy) and explain which dated project field will be used to define the requested cohort (e.g. date of admission or date of operation).', + }, + { + questionId: 'inclusionandexclusioncriteriaincludingdateparameters-481e42b8bdf56c82c14f3cb38d3facb4', + validations: [ + { + type: 'isLength', + params: [1], + message: 'Please enter a value', + }, + ], + question: 'Inclusion and exclusion criteria (including date parameters)', + input: { + type: 'textareaInput', + required: true, + }, + guidance: + 'A description of precisely the criteria which define the patients to be included and to be excluded from the data extract you are requesting should be provided. \\n\\nThis should include precise date parameters for the start and end of the range requested (dd/mm/yy) and explain which dated project field will be used to define the requested cohort (e.g. date of admission or date of operation).', + }, + { + questionId: 'willyourequireperiodicrefreshesofthedata-66b38ed7294bb5e1fb54e269987a6ee4', + question: 'Will you require periodic refreshes of the data?', + validations: [ + { + message: 'Please select an option', + params: [1], + type: 'isLength', + }, + ], + input: { + label: 'Will you require periodic refreshes of the data?', + required: true, + options: [ + { + conditionalQuestions: [ + { + questionId: 'howoftenwillthedatarefreshesbeneeded-4c7982108251204fcf91838c189a8dd6', + question: 'How often will the data refreshes be needed?', + input: { + label: 'How often will the data refreshes be needed?', + options: [ + { + text: 'Every month', + value: 'everymonth', + }, + { + text: 'Every 3 months', + value: 'every3months', + }, + { + text: 'Every 6 months', + value: 'every6months', + }, + { + text: 'Every 12 months', + value: 'every12months', + }, + { + value: 'other', + text: 'Other', + conditionalQuestions: [ + { + input: { + type: 'textInput', + }, + question: 'If other, please specify', + questionId: 'ifotherpleasespecify-faac222bc9033318dceb5ba458b1ab5e', + }, + ], + }, + ], + type: 'radioOptionsInput', + }, + guidance: 'Please indicate how often data refreshes will be needed. ', + }, + ], + text: 'Yes', + value: 'yes', + }, + { + value: 'no', + text: 'No', + }, + ], + type: 'radioOptionsInput', + }, + guidance: 'Please indicate if data refreshers will be required.', + }, + { + question: 'Do you require aggregated or record level data?', + input: { + type: 'textareaInput', + }, + questionId: 'doyourequireaggregatedorrecordleveldata-2d17aa88363da07c028b8bae68f04e9e', + guidance: + "Record level data typically relates to a single individual. There may be one or many records per individual. Such data would usually carry a risk of re-identification, and use of such data would be subject to strict controls.\\n\\nAggregate data would typically be 'counts' of an event - for example how many people had a particular operation over a specific time period. Aggregate data is not always anonymous data, and therefore may also be subject to specific controls.\\n\\n\\n\\n", + }, + ], + questionSetHeader: 'Data fields', + }, + { + questionSetId: 'safedata-otherdatasetsintentiontolinkdata', + questionSetHeader: 'Other datasets - Intention to link data', + questions: [ + { + guidance: + 'Please specify if you intend for the datasets to be linked with any additional datasets. Please also provide relevant information on the organisations undertaking linkages and provide a data flow diagram where applicable.', + questionId: + 'doyouintendforthedatasetsrequestedtobelinkedwithanyadditionaldatasetsotherthanthedatasetslistedinthisapplication-9015996e05b57f8588ffdb9306cd6140', + input: { + required: true, + label: + 'Do you intend for the datasets requested to be linked with any additional datasets, other than the datasets listed in this application?', + type: 'radioOptionsInput', + options: [ + { + conditionalQuestions: [ + { + questionId: + 'specifyalldatasetsorganisationswhichwillperformthelinkageandhowthelinkagewilltakeplace-ca84d4e177000cbf6269ba17e816061a', + question: 'Specify all datasets, organisations which will perform the linkage and how the linkage will take place.', + input: { + type: 'textareaInput', + }, + guidance: 'Please include details of the organisations undertaking the process of linkage.', + }, + ], + value: 'yes', + text: 'Yes', + }, + { + text: 'No', + value: 'no', + }, + ], + }, + question: + 'Do you intend for the datasets requested to be linked with any additional datasets, other than the datasets listed in this application?', + validations: [ + { + type: 'isLength', + params: [1], + message: 'Please select an option', + }, + ], + }, + ], + }, + { + questionSetId: 'safedata-lawfulbasis', + questionSetHeader: 'Lawful basis', + questions: [ + { + guidance: + 'The lawful bases for processing are set out in Article 6 of the GDPR. At least one of legal basis must apply whenever you process personal data. Please select appropriate Article 6 lawful basis. Processing shall be lawful only if and to the extent that at least one of the following applies.', + questionId: 'article6lawfulbasis-7d69315e9144498ac04a342d15b86102', + input: { + required: true, + label: 'Article 6 lawful basis', + type: 'radioOptionsInput', + options: [ + { + text: 'Not applicable', + value: 'notapplicable', + }, + { + text: + '(a) the data subject has given consent to the processing of his or her personal data for one or more specific purposes;', + value: 'athedatasubjecthasgivenconsenttotheprocessingofhisorherpersonaldataforoneormorespecificpurposes', + }, + { + value: + 'bprocessingisnecessaryfortheperformanceofacontracttowhichthedatasubjectispartyorinordertotakestepsattherequestofthedatasubjectpriortoenteringintoacontract', + text: + '(b) processing is necessary for the performance of a contract to which the data subject is party or in order to take steps at the request of the data subject prior to entering into a contract;', + }, + { + text: '(c) processing is necessary for compliance with a legal obligation to which the controller is subject;', + value: 'cprocessingisnecessaryforcompliancewithalegalobligationtowhichthecontrollerissubject', + }, + { + value: 'dprocessingisnecessaryinordertoprotectthevitalinterestsofthedatasubjectorofanothernaturalperson', + text: + '(d) processing is necessary in order to protect the vital interests of the data subject or of another natural person;', + }, + { + value: + 'eprocessingisnecessaryfortheperformanceofataskcarriedoutinthepublicinterestorintheexerciseofofficialauthorityvestedinthecontroller', + text: + '(e) processing is necessary for the performance of a task carried out in the public interest or in the exercise of official authority vested in the controller;', + }, + { + value: + 'fprocessingisnecessaryforthepurposesofthelegitimateinterestspursuedbythecontrollerorbyathirdpartyexceptwheresuchinterestsareoverriddenbytheinterestsorfundamentalrightsandfreedomsofthedatasubjectwhichrequireprotectionofpersonaldatainparticularwherethedatasubjectisachild', + text: + '(f) processing is necessary for the purposes of the legitimate interests pursued by the controller or by a third party, except where such interests are overridden by the interests or fundamental rights and freedoms of the data subject which require protection of personal data, in particular where the data subject is a child.', + }, + ], + }, + validations: [ + { + message: 'Please select an option', + type: 'isLength', + params: [1], + }, + ], + question: 'Article 6 lawful basis', + }, + { + guidance: 'Please provide justification for selected Article 6 lawful basis.', + questionId: 'article6legalbasisjustification-4a99eddf3110298ab536b1f13429b8b6', + validations: [ + { + type: 'isLength', + params: [1], + message: 'Please enter a value', + }, + ], + question: 'Article 6 legal basis justification', + input: { + type: 'textareaInput', + required: true, + }, + }, + { + guidance: + "Please select appropriate Article 9 conditions. \\n \\nProcessing of personal data revealing racial or ethnic origin, political opinions, religious or philosophical beliefs, or trade union membership, and the processing of genetic data, biometric data for the purpose of uniquely identifying a natural person, data concerning health or data concerning a natural person's sex life or sexual orientation shall be prohibited. This does shall not apply if one of the following applies.", + question: 'Article 9 conditions', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please select an option', + }, + ], + input: { + type: 'radioOptionsInput', + options: [ + { + value: 'notapplicable', + text: 'Not applicable', + }, + { + text: + '(a) the data subject has given explicit consent to the processing of those personal data for one or more specified purposes, except where Union or Member State law provide that the prohibition referred to in paragraph 1 may not be lifted by the data subject;', + value: + 'athedatasubjecthasgivenexplicitconsenttotheprocessingofthosepersonaldataforoneormorespecifiedpurposesexceptwhereunionormemberstatelawprovidethattheprohibitionreferredtoinparagraph1maynotbeliftedbythedatasubject', + }, + { + value: + 'bprocessingisnecessaryforthepurposesofcarryingouttheobligationsandexercisingspecificrightsofthecontrollerorofthedatasubjectinthefieldofemploymentandsocialsecurityandsocialprotectionlawinsofarasitisauthorisedbyunionormemberstatelaworacollectiveagreementpursuanttomemberstatelawprovidingforappropriatesafeguardsforthefundamentalrightsandtheinterestsofthedatasubject', + text: + '(b) processing is necessary for the purposes of carrying out the obligations and exercising specific rights of the controller or of the data subject in the field of employment and social security and social protection law in so far as it is authorised by Union or Member State law or a collective agreement pursuant to Member State law providing for appropriate safeguards for the fundamental rights and the interests of the data subject;', + }, + { + value: + 'cprocessingisnecessarytoprotectthevitalinterestsofthedatasubjectorofanothernaturalpersonwherethedatasubjectisphysicallyorlegallyincapableofgivingconsent', + text: + '(c) processing is necessary to protect the vital interests of the data subject or of another natural person where the data subject is physically or legally incapable of giving consent;', + }, + { + value: + 'dprocessingiscarriedoutinthecourseofitslegitimateactivitieswithappropriatesafeguardsbyafoundationassociationoranyothernotforprofitbodywithapoliticalphilosophicalreligiousortradeunionaimandonconditionthattheprocessingrelatessolelytothemembersortoformermembersofthebodyortopersonswhohaveregularcontactwithitinconnectionwithitspurposesandthatthepersonaldataarenotdisclosedoutsidethatbodywithouttheconsentofthedatasubjects', + text: + '(d) processing is carried out in the course of its legitimate activities with appropriate safeguards by a foundation, association or any other not-for-profit body with a political, philosophical, religious or trade union aim and on condition that the processing relates solely to the members or to former members of the body or to persons who have regular contact with it in connection with its purposes and that the personal data are not disclosed outside that body without the consent of the data subjects;', + }, + { + value: 'eprocessingrelatestopersonaldatawhicharemanifestlymadepublicbythedatasubject', + text: '(e) processing relates to personal data which are manifestly made public by the data subject;', + }, + { + value: + 'fprocessingisnecessaryfortheestablishmentexerciseordefenceoflegalclaimsorwhenevercourtsareactingintheirjudicialcapacity', + text: + '(f) processing is necessary for the establishment, exercise or defence of legal claims or whenever courts are acting in their judicial capacity;', + }, + { + value: + 'gprocessingisnecessaryforreasonsofsubstantialpublicinterestonthebasisofunionormemberstatelawwhichshallbeproportionatetotheaimpursuedrespecttheessenceoftherighttodataprotectionandprovideforsuitableandspecificmeasurestosafeguardthefundamentalrightsandtheinterestsofthedatasubject', + text: + '(g) processing is necessary for reasons of substantial public interest, on the basis of Union or Member State law which shall be proportionate to the aim pursued, respect the essence of the right to data protection and provide for suitable and specific measures to safeguard the fundamental rights and the interests of the data subject;', + }, + { + value: + 'hprocessingisnecessaryforthepurposesofpreventiveoroccupationalmedicinefortheassessmentoftheworkingcapacityoftheemployeemedicaldiagnosistheprovisionofhealthorsocialcareortreatmentorthemanagementofhealthorsocialcaresystemsandservicesonthebasisofunionormemberstatelaworpursuanttocontractwithahealthprofessionalandsubjecttotheconditionsandsafeguardsreferredtoinparagraph3', + text: + '(h) processing is necessary for the purposes of preventive or occupational medicine, for the assessment of the working capacity of the employee, medical diagnosis, the provision of health or social care or treatment or the management of health or social care systems and services on the basis of Union or Member State law or pursuant to contract with a health professional and subject to the conditions and safeguards referred to in paragraph 3;', + }, + { + value: + 'iprocessingisnecessaryforreasonsofpublicinterestintheareaofpublichealthsuchasprotectingagainstseriouscrossborderthreatstohealthorensuringhighstandardsofqualityandsafetyofhealthcareandofmedicinalproductsormedicaldevicesonthebasisofunionormemberstatelawwhichprovidesforsuitableandspecificmeasurestosafeguardtherightsandfreedomsofthedatasubjectinparticularprofessionalsecrecy', + text: + '(i) processing is necessary for reasons of public interest in the area of public health, such as protecting against serious cross-border threats to health or ensuring high standards of quality and safety of health care and of medicinal products or medical devices, on the basis of Union or Member State law which provides for suitable and specific measures to safeguard the rights and freedoms of the data subject, in particular professional secrecy;', + }, + { + value: + 'jprocessingisnecessaryforarchivingpurposesinthepublicinterestscientificorhistoricalresearchpurposesorstatisticalpurposesinaccordancewitharticle891basedonunionormemberstatelawwhichshallbeproportionatetotheaimpursuedrespecttheessenceoftherighttodataprotectionandprovideforsuitableandspecificmeasurestosafeguardthefundamentalrightsandtheinterestsofthedatasubject', + text: + '(j) processing is necessary for archiving purposes in the public interest, scientific or historical research purposes or statistical purposes in accordance with Article 89(1) based on Union or Member State law which shall be proportionate to the aim pursued, respect the essence of the right to data protection and provide for suitable and specific measures to safeguard the fundamental rights and the interests of the data subject.', + }, + ], + label: 'Article 9 conditions', + required: true, + }, + questionId: 'article9conditions-92f54ef199bb231b800dff4df8f8c87f', + }, + { + question: 'Article 9 legal basis justification', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + ], + input: { + required: true, + type: 'textareaInput', + }, + questionId: 'article9legalbasisjustification-2f94a840058a4b37483daa89e92bbb19', + }, + ], + }, + { + questionSetId: 'safedata-confidentialityavenue', + questions: [ + { + guidance: + 'Please confirm if consent is in place for all disclosures of confidential information, if you have Section 251 exemption, or any other legal basis that you require for the project.\\n\\nFor England and Wales, please specify if Section 251 exemption is currently being sought and if so, please provide a Confidentiality Advisory group reference code.\\n\\nIn Scotland applications are required for the consented and unconsented use of data.', + input: { + type: 'radioOptionsInput', + options: [ + { + text: 'Not applicable', + value: 'notapplicable', + }, + { + text: 'Informed consent', + value: 'informedconsent', + conditionalQuestions: [ + { + questionId: 'informedconsentevidence-8c66fdb84ae00669e7e4b3131035f17c', + input: { + type: 'checkboxOptionsInput', + options: [ + { + value: + 'ihaveenclosedablankcopyofthepatientconsentformsandallrelatedinformationsheetsrelevanttothetimeperiodinthedatarequested', + text: + 'I have enclosed a blank copy of the patient consent form(s) and all related information sheets relevant to the time period in the data requested', + }, + ], + }, + question: 'Informed consent evidence', + label: 'Informed consent evidence', + guidance: 'Please ensure a copy of the consent form(s) and patient information sheet have been provided.', + }, + ], + }, + { + value: 'section251support', + text: 'Section 251 support', + conditionalQuestions: [ + { + question: 'Section 251 exemption evidence', + input: { + type: 'checkboxOptionsInput', + options: [ + { + value: 'ihaveenclosedacopyofthes251approvedamendmentsandanyrenewalletters', + text: 'I have enclosed a copy of the S251 approved amendments and any renewal letters', + }, + ], + }, + questionId: 'section251exemptionevidence-2ca3ab72621e1d05d2a27aa541669955', + label: 'Section 251 exemption evidence', + guidance: 'Please ensure a copy of the Section 251 exemption has been provided.', + }, + { + question: 'CAG reference', + input: { + type: 'textInput', + }, + questionId: 'cagreference-1d557ba11b79eef6790edda8cfa084ab', + }, + { + input: { + options: [ + { + text: 'Hold/receive personal data', + value: 'holdreceivepersonaldata', + }, + { + value: 'transferaccesspersonaldata', + text: 'Transfer/access personal data', + }, + { + value: 'operateonandlinkpersonaldata', + text: 'Operate on and link personal data', + }, + { + conditionalQuestions: [ + { + questionId: 'ifotherpleasespecify-dde51ff375a7e792fe6109dbbd71897e', + question: 'If other, please specify', + input: { + type: 'textInput', + }, + }, + ], + text: 'Other', + value: 'other', + }, + ], + type: 'checkboxOptionsInput', + }, + question: 'The section 251 approval enables the applicant to', + questionId: 'thesection251approvalenablestheapplicantto-15c79dfc700083e333915ad4b9d91038', + label: 'The section 251 approval enables the applicant to', + guidance: 'Please indicate what the Section 251 exemption permits you to do as part of your project.', + }, + ], + }, + { + conditionalQuestions: [ + { + question: 'If other, please specify', + input: { + type: 'textInput', + }, + questionId: 'ifotherpleasespecify-d556c56c393b1a086c94b6ce8a8b412c', + }, + ], + text: 'Other', + value: 'other', + }, + ], + required: true, + label: 'Please provide the legal basis to process confidential information', + }, + question: 'Please provide the legal basis to process confidential information', + validations: [ + { + message: 'Please select an option', + params: [1], + type: 'isLength', + }, + ], + questionId: 'pleaseprovidethelegalbasistoprocessconfidentialinformation-31315424250097e6266f1ebd6ddc619a', + }, + ], + questionSetHeader: 'Confidentiality avenue', + }, + { + questionSetHeader: 'Ethics approval', + questions: [ + { + questionId: 'hasethicsapprovalbeenobtained-700c42da3de684b16420d1556d6ca86b', + input: { + label: 'Has ethics approval been obtained?', + required: true, + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + input: { + type: 'textInput', + }, + question: 'Approval - REC committee name', + questionId: 'approvalreccommitteename-1d5c260746636251354fd8c81288f8ce', + guidance: 'Please provide REC or other committee details.', + }, + { + question: 'Approval - REC reference number', + input: { + type: 'textInput', + }, + questionId: 'approvalrecreferencenumber-3ebc7100dd0ecd3bdf24003e1157dbf4', + }, + { + question: 'Approval - Other committee', + input: { + type: 'textInput', + }, + questionId: 'approvalothercommittee-7eddf6d37ff8154c372f51634e0c05c6', + }, + { + guidance: 'Please confirm a copy of the REC referenced above has been enclosed.', + label: 'Evidence of REC approval', + questionId: 'evidenceofrecapproval-de13946031ff7802a6aeb563174185df', + question: 'Evidence of REC approval', + input: { + options: [ + { + value: 'ihaveenclosedacopyofthefinalrecapprovalletterandlettersdocumentinganyrecapprovedamendments', + text: + 'I have enclosed a copy of the final REC approval letter and letters documenting any REC approved amendments', + }, + ], + type: 'checkboxOptionsInput', + }, + }, + ], + }, + { + text: 'No', + value: 'no', + conditionalQuestions: [ + { + question: 'If not, please provide more details', + input: { + type: 'textInput', + }, + questionId: 'ifnotpleaseprovidemoredetails-6703121361cf22ec74a3246bbca827ca', + }, + ], + }, + { + text: 'Approval pending', + value: 'approvalpending', + conditionalQuestions: [ + { + guidance: 'If approval is pending, please provide details.', + input: { + type: 'textInput', + }, + question: 'If approval is pending, please provide more details', + questionId: 'ifapprovalispendingpleaseprovidemoredetails-368bfcd052089018e98adc36d5e31b21', + }, + ], + }, + { + conditionalQuestions: [ + { + guidance: 'If ethics approval is not required, please explain why this is the case.', + questionId: 'ifnotrequiredpleaseprovidedetails-1e70f424ee6e2584b00b4115a4c32437', + question: 'If not required, please provide details', + input: { + type: 'textInput', + }, + }, + ], + value: 'notrequired', + text: 'Not required', + }, + ], + type: 'radioOptionsInput', + }, + question: 'Has ethics approval been obtained?', + validations: [ + { + message: 'Please select an option', + type: 'isLength', + params: [1], + }, + ], + guidance: + 'Please confirm if ethics approval has been obtained. Request for research purposes must include enclose evidence of ethics approval or evidence that this is not required.', + }, + ], + questionSetId: 'safedata-ethicsapproval', + }, + { + questionSetId: 'safesettings-storageandprocessing', + questions: [ + { + guidance: 'Please specify if the data will be accessed within a Trusted Research Environment. ', + questionId: 'willthedatabeaccessedwithinatrustedresearchenvironment-dd7ac65a76e29f3df9d741a04176df38', + question: 'Will the data be accessed within a trusted research environment?', + input: { + label: 'Will the data be accessed within a trusted research environment?', + type: 'radioOptionsInput', + options: [ + { + value: 'yes', + text: 'Yes', + conditionalQuestions: [ + { + input: { + type: 'radioOptionsInput', + options: [ + { + text: 'Secure e-Research Platform (SeRP)', + value: 'secureeresearchplatformserp', + }, + { + value: 'nihonestbrokerservicenihbs', + text: 'NI Honest Broker Service (NI HBS)', + }, + { + value: 'scottishnationalsafehavensnsh', + text: 'Scottish National Safe Haven (SNSH)', + }, + { + text: 'NHS Digital', + value: 'nhsdigital', + conditionalQuestions: [ + { + questionId: + 'doestheapplicantorganisationhaveadsptoolkitifsopleaseprovidedetailsincludingcodescoreandversioncompleted-f45752ad4e835f9f1e222349b8e3ebca', + input: { + type: 'textInput', + }, + question: + 'Does the applicant organisation have a DSP Toolkit? If so, please provide details including code, score and version completed.', + }, + ], + }, + { + text: 'SAIL Databank', + value: 'saildatabank', + }, + { + value: 'onssecureresearchservicesrs', + text: 'ONS Secure Research Service (SRS)', + }, + { + conditionalQuestions: [ + { + guidance: "If you have selected 'Other', please specify the Trusted Research Environment.", + question: 'If other, please specify', + input: { + type: 'textInput', + }, + questionId: 'ifotherpleasespecify-59df0d64b73cc54a6a59749c24394302', + }, + ], + text: 'Other', + value: 'other', + }, + ], + label: 'In which Trusted Research Environment will the data be accessed?', + }, + question: 'In which Trusted Research Environment will the data be accessed?', + questionId: 'inwhichtrustedresearchenvironmentwillthedatabeaccessed-8f40c649e9dfc6d286b85b0eb041aa7d', + guidance: 'Please indicate the Trusted Research Environment where the data will be accessed.', + }, + ], + }, + { + conditionalQuestions: [ + { + input: { + type: 'textInput', + }, + question: 'Registered name of organisation', + questionId: 'registerednameoforganisation-7529d2095115a155f4cbb1ca0a140ba0', + }, + { + guidance: 'Please provide ICO registration details.', + input: { + type: 'textInput', + }, + question: 'Registered number', + questionId: 'registerednumber-684ff1488ac289ab88dbef33ec45f806', + }, + { + label: 'Will this organisation be storing or processing the data?', + input: { + options: [ + { + text: 'Storage', + value: 'storage', + }, + { + text: 'Processing', + value: 'processing', + }, + ], + type: 'checkboxOptionsInput', + }, + question: 'Will this organisation be storing or processing the data?', + questionId: 'willthisorganisationbestoringorprocessingthedata-0b0117822908ebcf01930d05a2015cf9', + }, + { + questionId: 'whattypeofsecurityassurancedoesthisorganisationhaveinplace-e1e2552299764eaa1c27809efb8abc6c', + input: { + options: [ + { + guidance: + 'Adequate security assurance must be provided for all processing locations. Each organisation processing data that is not fully anonymous as part of this project must demonstrate that they have appropriate security arrangements are in place. Please confirm whether the applicant organisation has a compliant Data Security and Protection Toolkit.', + value: 'datasecurityandprotectiontoolkitdsptoolkit', + text: 'Data security and Protection Toolkit (DSP Toolkit)', + conditionalQuestions: [ + { + guidance: + "If you have selected 'Other', please specify the type of security assurance the organisation has put in place.", + questionId: 'dsptoolkitorganisationcode-c521d50968cda9e7fba09e9eb4611379', + input: { + type: 'textInput', + }, + question: 'DSP Toolkit organisation code', + }, + { + questionId: 'dsptoolkitscore-1785f9ff139c7a563da26fd4a85b6cdc', + question: 'DSP Toolkit score', + input: { + type: 'textInput', + }, + guidance: + "As a data controller, the applicant's organisation should be registered with the Information Commissioner's Office (ICO). Please provide Security and Protection Toolkit (DSP Toolkit) details.", + }, + { + questionId: 'dsptoolkitversioncompleted-e8c56687e00c8ed3d4c64b06ee5fd1e4', + question: 'DSP Toolkit version completed', + input: { + type: 'textInput', + }, + }, + ], + }, + { + conditionalQuestions: [ + { + question: 'Evidence of ISO 27001', + input: { + options: [ + { + guidance: 'Please confirm that you have enclosed a copy of your ISO 27001 certificate.', + value: 'ihaveenclosedacopyofmycertificate', + text: 'I have enclosed a copy of my certificate', + }, + ], + type: 'checkboxOptionsInput', + }, + questionId: 'evidenceofiso27001-44e1246414a73510f3083c988c43e0de', + label: 'Evidence of ISO 27001', + }, + ], + value: 'iso27001', + text: 'ISO 27001', + }, + { + value: 'slsp', + text: 'SLSP', + conditionalQuestions: [ + { + label: 'Evidence of SLSP', + questionId: 'evidenceofslsp-8a275e840a90dafa50b2f0399069da64', + input: { + type: 'checkboxOptionsInput', + options: [ + { + text: 'I have enclosed a completed system level security policy for ODR review', + value: 'ihaveenclosedacompletedsystemlevelsecuritypolicyforodrreview', + }, + ], + }, + question: 'Evidence of SLSP', + }, + ], + }, + { + conditionalQuestions: [ + { + questionId: 'ifotherpleasespecify-c0aa313639a78028e2079d8857527cff', + input: { + type: 'textInput', + }, + question: 'If other, please specify', + }, + ], + text: 'Other', + value: 'other', + }, + ], + type: 'checkboxOptionsInput', + }, + question: 'What type of security assurance does this organisation have in place?', + label: 'What type of security assurance does this organisation have in place?', + }, + { + questionId: 'addStorageAndProcessingDetails', + input: { + type: 'buttonInput', + action: 'addRepeatableQuestions', + questionIds: [ + 'registerednameoforganisation-7529d2095115a155f4cbb1ca0a140ba0', + 'registerednumber-684ff1488ac289ab88dbef33ec45f806', + 'willthisorganisationbestoringorprocessingthedata-0b0117822908ebcf01930d05a2015cf9', + 'whattypeofsecurityassurancedoesthisorganisationhaveinplace-e1e2552299764eaa1c27809efb8abc6c', + ], + text: '+ Add another organisation', + class: 'btn btn-primary addButton', + separatorText: 'Additional organisation details', + }, + guidance: + "If there are other orgnisations to be specified as part of this application, click 'Add another organisation' as required.", + }, + ], + text: 'No (Please provide details of the processing/storage organisations below)', + value: 'nopleaseprovidedetailsoftheprocessingstorageorganisationsbelow', + }, + ], + }, + }, + ], + questionSetHeader: 'Storage and processing', + }, + { + questionSetId: 'safesettings-dataflow', + questions: [ + { + guidance: + 'Jurisdiction (coverage) is defined as the location of the healthcare services who originated / initially provided the extract of data you are requesting. \\n A description of the following must be provided:\\n - All locations where data is processed\\n - All transfers that take place between locations and organisations\\n - Data linkages to other data sets.', + questionId: 'willthedatabetransferredoutsideoftheunitedkingdom-7a69ac1db4747aac21f40d1440372ee8', + question: 'Will the data be transferred outside of the United Kingdom?', + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please select an option', + }, + ], + input: { + label: 'Will the data be transferred outside of the United Kingdom?', + required: true, + options: [ + { + text: 'Yes', + value: 'yes', + conditionalQuestions: [ + { + input: { + type: 'textareaInput', + }, + question: 'If yes, please provide more details', + questionId: 'ifyespleaseprovidemoredetails-e13d902d5669329df88e6bace3c58398', + }, + ], + }, + { + text: 'No', + value: 'no', + }, + ], + type: 'radioOptionsInput', + }, + }, + { + questionId: 'pleasespecifytheregionswheredatawillbeprocessed-9e52bf589375cddbce25a7d51ae2e67d', + question: 'Please specify the regions where data will be processed.', + input: { + label: 'Please specify the regions where data will be processed.', + type: 'checkboxOptionsInput', + options: [ + { + value: 'englandwales', + text: 'England/Wales', + }, + { + value: 'unitedkingdom', + text: 'United Kingdom', + }, + { + value: 'europeaneconomicarea', + text: 'European Economic Area', + }, + { + text: 'Other', + value: 'other', + }, + ], + }, + guidance: + 'Please indicate if data will be transferred outside of the European Economic Area, it must be stated where to and details given of how that will be in compliance with the Data Protection Act 2018.\\n\\n If data are to be stored or processed outside of England/Wales, it may be that you will need to provide further assurance to support your application', + }, + { + question: 'Please provide detailed information on data flows', + input: { + type: 'textareaInput', + }, + questionId: 'pleaseprovidedetailedinformationondataflows-f3334c7e743fad5b3d148bea4bf2f0f2', + guidance: + 'In this section you should confirm that you have enclosed a data flow diagram in your application. Please send us flow diagram via email. A data flow diagram is helpful in showing planned data flows of how data will move through the project, whether the data are identifiable or pseudonymised, who has access to, who is responsible for the data at any point, the permissions/consent in place and how it will be kept secure at every stage.\\n \\n The data flow should describe which organisation (if more than one) will be receiving the data and in what form (anonymised/limited access de-identified/personal). Your data flow should include:\\n \\n - All locations where the data will be housed/stored\\n \\n - All transfers of data that will take place between organisations (and premises if an organisation has more than one remises where the data will be housed/stored)\\n \\n - The format of the data as part of each transfer (anonymised/limited access de-identified/personal)\\n \\n - If applicable, where the data will undergo any linkages to other data sets\\n \\n Please display only the data requested and any linked datasets, and not the entire project data flow.', + }, + { + questionId: + 'pleaseincludeadataflowdiagramfortherequesteddataandanyadditionaldatasetsintendedtobelinked-406fb1e63a0c77760222bd32cdf055e5', + input: { + label: 'Please include a data flow diagram for the requested data and any additional datasets intended to be linked.', + type: 'checkboxOptionsInput', + options: [ + { + value: 'ihaveenclosedacopyofthedataflow', + text: 'I have enclosed a copy of the dataflow', + }, + ], + }, + question: 'Please include a data flow diagram for the requested data and any additional datasets intended to be linked.', + }, + ], + questionSetHeader: 'Dataflow', + }, + { + questionSetId: 'safeoutputs-outputsdisseminationplans', + questions: [ + { + guidance: + 'Please describe how you plan to disseminate the results from your proposal. \\n\\nAs the public might not read scientific literature or attend conferences, please consider how the results or findings will be disseminated to the wider public and how this fits with the public benefit of the proposal. \\n\\nPlease indicate if you plan to publish your findings in an open access journal. Reference should also be made to policy documents, service frameworks or strategies that are relevant.', + question: 'How will proposal findings be disseminated, to what audience and in what format?', + validations: [ + { + message: 'Please enter a value', + type: 'isLength', + params: [1], + }, + ], + input: { + required: true, + type: 'textareaInput', + }, + questionId: 'howwillproposalfindingsbedisseminatedtowhataudienceandinwhatformat-cb1bb8cc88200378b5711fc3e4504e4c', + }, + { + guidance: + 'Provide an outline of your plan, on what data, and how this will be done, with the anticipated outcomes and outputs and whether the outputs are in record level form.', + input: { + type: 'textareaInput', + required: true, + }, + validations: [ + { + params: [1], + type: 'isLength', + message: 'Please enter a value', + }, + ], + question: 'Please include any milestones for outputs dissemination.', + questionId: 'pleaseincludeanymilestonesforoutputsdissemination-fb1d923bcad197405b8c98256588f1cd', + }, + { + input: { + type: 'textareaInput', + required: true, + }, + validations: [ + { + message: 'Please enter a value', + params: [1], + type: 'isLength', + }, + ], + question: + 'What steps will be taken to ensure that individuals cannot be identified? Please describe what disclosure control policy will be applied.', + questionId: + 'whatstepswillbetakentoensurethatindividualscannotbeidentifiedpleasedescribewhatdisclosurecontrolpolicywillbeapplied-9268ebc3ed695f413cf4f09af52e2c95', + guidance: + 'Please describe the steps you will take to ensure the confidentiality of the data when disseminating or publishing your findings. This may include the application of disclosure control procedures, aggregation of data or other approaches.', + }, + ], + questionSetHeader: 'Outputs dissemination plans', + }, + { + questionSetId: 'safeoutputs-retention', + questionSetHeader: 'Retention', + questions: [ + { + validations: [ + { + message: 'Please enter a value', + type: 'isLength', + format: 'dd/MM/yyyy', + params: [1], + }, + ], + question: 'Please state the date until which you will retain the data', + input: { + type: 'datePickerCustom', + required: true, + }, + questionId: 'Please state the date until which you will retain the data', + guidance: 'Please confirm how long you intend to retain the data relating to your proposal.', + }, + { + question: 'Please indicate the reason for this date', + input: { + type: 'textareaInput', + }, + questionId: 'pleaseindicatethereasonforthisdate-f95628ddfa8a84e9d078fd81daeb7df9', + }, + { + question: + 'Please provide details of any permissions that will need to apply for an extension to during this period in order to retain a legal basis to hold the data (e.g. section 251)', + input: { + type: 'textareaInput', + }, + questionId: + 'pleaseprovidedetailsofanypermissionsthatwillneedtoapplyforanextensiontoduringthisperiodinordertoretainalegalbasistoholdthedataegsection251-aef776a8ab341880e6413bd0125c476f', + }, + ], + }, + { + questionSetId: 'add-safepeople-otherindividuals', + questions: [ + { + questionId: 'add-safepeople-otherindividual', + input: { + type: 'buttonInput', + action: 'addRepeatableSection', + panelId: 'safepeople-otherindividuals', + text: '+ Add another individual', + class: 'btn btn-primary addButton', + }, + guidance: + "If there are other individuals to be specified as part of this application, click 'Add another individual' as required.", + }, + ], + }, + ], + }, +]; diff --git a/src/resources/utilities/dynamicForms/__tests__/dynamicForm.util.test.js b/src/resources/utilities/dynamicForms/__tests__/dynamicForm.util.test.js new file mode 100644 index 00000000..bd776b6c --- /dev/null +++ b/src/resources/utilities/dynamicForms/__tests__/dynamicForm.util.test.js @@ -0,0 +1,128 @@ +import constants from '../../../utilities/constants.util'; +import dynamicFormUtil from '../dynamicForm.util'; + +import _ from 'lodash'; + +const dataSchema = require('../__mocks__/formSchema'); + +describe('findQuestionSet', () => { + // Arrange + let data = _.cloneDeep(dataSchema[0]); + const cases = [ + ['invalidId', data, {}], + ['', data, {}], + ['', {}, {}], + ['applicant', data, data.questionSets[0]], + ['safeproject-aboutthisapplication', data, data.questionSets[2]], + ['safeproject-funderinformation', data, data.questionSets[4]], + ['safeproject-declarationofinterest', data, data.questionSets[6]], + ['safedata-datafields', data, data.questionSets[8]] + ]; + test.each(cases)( + 'given a valid question set identifier and json schema, then the correct question set is returned', + (questionSetId, schema, expectedResult) => { + // Act + const result = dynamicFormUtil.findQuestionSet(questionSetId, schema); + // Assert + expect(result).toEqual(expectedResult); + } + ); +}); + +describe('findQuestionPanel', () => { + // Arrange + let data = _.cloneDeep(dataSchema[0].questionPanels); + const cases = [ + ['invalidId', data, {}], + ['', data, {}], + ['', {}, {}], + ['applicant', data, data[0]], + ['safepeople-otherindividuals', data, data[1]], + ['safeproject-aboutthisapplication', data, data[2]], + ['safeproject-projectdetails', data, data[3]], + ['safeproject-funderinformation', data, data[4]] + ]; + test.each(cases)( + 'given a valid panel identifier and panel array, then the correct panel is returned', + (panelId, questionPanels, expectedResult) => { + // Act + const result = dynamicFormUtil.findQuestionPanel(panelId, questionPanels); + // Assert + expect(result).toEqual(expectedResult); + } + ); +}); + +describe('findQuestion', () => { + // Arrange + let data = _.cloneDeep(dataSchema[0].questionSets); + const cases = [ + ['invalidId', data, {}], + ['', data, {}], + ['', {}, {}], + ['fullname-a218cf35b0847b14d5f6d565b01e2f8c', data[0], data[0].questions[0]], + ['jobtitle-6ddd85c18e8da4ac08f376073932128f', data[0], data[0].questions[1]], + ['orcid-7c5167922d97afe681f4b7c388b0a70a', data[0], data[0].questions[3]], + ['willyouaccessthedatarequested-765aee4e52394857f7cb902bddeafe04', data[0], data[0].questions[5]], + ['areyouanaccreditedresearcherunderthedigitaleconomyact2017-16c0422c22522e7e83dd0143242cbdda', data[0], data[0].questions[6]] + ]; + test.each(cases)( + 'given a valid question identifier and parent question set, then the correct question is returned', + (questionId, questionSet, expectedResult) => { + // Act + const result = dynamicFormUtil.findQuestion(questionId, questionSet); + // Assert + expect(result).toEqual(expectedResult); + } + ); +}); + +describe('findQuestionRecursive', () => { + // Arrange + let data = _.cloneDeep(dataSchema[0].questionSets); + const cases = [ + ['invalidId', data[0].questions, undefined], + ['', data[0].questions, undefined], + ['', {}, undefined], + ['fullname-a218cf35b0847b14d5f6d565b01e2f8c', data[0].questions, data[0].questions[0]], + ['jobtitle-6ddd85c18e8da4ac08f376073932128f', data[0].questions, data[0].questions[1]], + ['orcid-7c5167922d97afe681f4b7c388b0a70a', data[0].questions, data[0].questions[3]], + ['willyouaccessthedatarequested-765aee4e52394857f7cb902bddeafe04', data[0].questions, data[0].questions[5]], + ['areyouanaccreditedresearcherunderthedigitaleconomyact2017-16c0422c22522e7e83dd0143242cbdda', data[0].questions, data[0].questions[6]], + ['ifyespleaseprovideyouraccreditedresearchernumber-7a87ef841f884a7aad6f48252f9fc670', data[0].questions, data[0].questions[6].input.options[0].conditionalQuestions[0]], + ['pleasespecifyifyouareplanningtobecomeanaccreditedresearcher-d93e3edff26a69fb961a28032719960c', data[0].questions, data[0].questions[6].input.options[1].conditionalQuestions[0]], + ['ifotherpleasespecify-fa9e063fd5f253ae6dc76080db560bcc', data[1].questions, data[1].questions[3].input.options[3].conditionalQuestions[0]], + ['ifyespleaseprovidedetails-8e5c491c36c07ba9a5a1a15569ba9127', data[1].questions, data[1].questions[5].input.options[0].conditionalQuestions[0]], + ['ifotherpleasespecify-faac222bc9033318dceb5ba458b1ab5e', data[8].questions, data[8].questions[3].input.options[0].conditionalQuestions[0].input.options[4].conditionalQuestions[0]] + ]; + test.each(cases)( + 'given a valid question identifier and parent question set, then the correct question is returned', + (questionId, questionsArr, expectedResult) => { + // Act + const result = dynamicFormUtil.findQuestionRecursive(questionsArr, questionId); + // Assert + expect(result).toEqual(expectedResult); + } + ); +}); + +describe('insertQuestionSeparator', () => { + // Arrange + let data = _.cloneDeep(dataSchema[0].questionSets); + const cases = [ + [undefined, undefined, []], + ['', [], []], + ['Additional organisation details', undefined, []], + ['Additional organisation details', data[0].questions, [{ ...data[0].questions[0], question : `\nAdditional organisation details\n\nFull name`}, ...data[0].questions.slice(1)]] + ]; + test.each(cases)( + 'given a question array, and a string of text, the title in first question of the array is modified to include the separator and new line formatting', + (separatorText, questionsArr, expectedResult) => { + // Act + const result = dynamicFormUtil.insertQuestionSeparator(questionsArr, separatorText); + // Assert + expect(result).toEqual(expectedResult); + } + ); +}); + diff --git a/src/resources/utilities/dynamicForms/dynamicForm.util.js b/src/resources/utilities/dynamicForms/dynamicForm.util.js new file mode 100644 index 00000000..af55e5b4 --- /dev/null +++ b/src/resources/utilities/dynamicForms/dynamicForm.util.js @@ -0,0 +1,393 @@ +import randomstring from 'randomstring'; +import _ from 'lodash'; + +let findQuestion = (questionId = '', questionSet = []) => { + if (!_.isEmpty(questionId) && !_.isEmpty(questionSet)) { + let { questions } = questionSet; + if (!_.isEmpty(questions)) { + return questions.find(q => q.questionId === questionId) || {}; + } + } + return {}; +}; + +let findQuestionRecursive = (questionsArr, questionId) => { + let child; + + if (!questionsArr || _.isEmpty(questionsArr)) return; + + for (const questionObj of questionsArr) { + if (questionObj.questionId === questionId) + { + return questionObj; + } + + if (typeof questionObj.input === 'object' && typeof questionObj.input.options !== 'undefined') { + questionObj.input.options + .filter(option => { + return typeof option.conditionalQuestions !== 'undefined' && option.conditionalQuestions.length > 0; + }) + .forEach(option => { + if(!child) { + child = findQuestionRecursive(option.conditionalQuestions, questionId); + } + }); + } + + if (child) return child; + } +}; + +let findQuestionSet = (questionSetId = '', schema = {}) => { + if (!_.isEmpty(questionSetId) && !_.isEmpty(schema)) { + let { questionSets } = schema; + return [...questionSets].find(q => q.questionSetId === questionSetId) || {}; + } + return {}; +}; + +let findQuestionPanel = (panelId = '', questionPanels = []) => { + if (!_.isEmpty(panelId) && !_.isEmpty(questionPanels)) { + return [...questionPanels].find(qp => qp.panelId === panelId) || {}; + } + return {}; +}; + +let duplicateQuestionSet = (questionSetId, schema) => { + let { questionSets } = schema; + // 1. find questionSet + let qSet = findQuestionSet(questionSetId, schema); + if (!_.isEmpty(qSet)) { + // 2. find the questionSet to duplicate for the qSet + let { + questions: [question], + } = { ...qSet }; + // 3. duplicate questionSet ensure we take a copy + let qSetDuplicate = [...questionSets].find(q => q.questionSetId === question.input.panelId); + // 4. modify the questions array questionIds + let qSetModified = modifyQuestionSetIds(qSetDuplicate); + // 5. return the modified questionSet + return qSetModified; + } + return {}; +}; + +let duplicateQuestions = (questionSetId, questionIdsToDuplicate, separatorText = '', schema) => { + // 1. find question set containing questions to duplicate + let qSet = findQuestionSet(questionSetId, schema); + // 2. map array of questions to duplicate + let duplicatedQuestions = questionIdsToDuplicate.map((questionId) => { + // 3. find each question within question set + let question = findQuestionRecursive(qSet.questions, questionId); + if(question) { + return question; + } + }); + // 4. modify question ids with unique values + let modifiedQuestions = modifyQuestionIds(questionSetId, duplicatedQuestions); + // 5. insert separator text before new duplicated questions + if(!_.isEmpty(separatorText)) { + modifiedQuestions = insertQuestionSeparator(modifiedQuestions, separatorText); + } + // 6. return array of questions + return modifiedQuestions; +}; + +let modifyQuestionSetIds = questionSet => { + let { questionSetId, questions } = { ...questionSet }; + let uniqueId = randomstring.generate(5); + questionSetId = `${questionSetId}_${uniqueId}`; + // 1.loop over each qObj and if questionId update + let questionsModified = [...questions].reduce((arr, qValue) => { + // 2. ensure we copy the original question deep + let question = _.cloneDeep(qValue); + // 3. if there is a questionId update + if (typeof question.questionId !== 'undefined') { + question.questionId = `${qValue.questionId.toLowerCase()}_${uniqueId}`; + } + // 4. if qObj has input and input.options meaning potential nest, loop over nested options + if (typeof question.input === 'object' && typeof question.input.options !== 'undefined') { + modifyNestedQuestionIds([...question.input.options], uniqueId); + } + return [...arr, question]; + }, []); + + questionsModified = [ + ...questionsModified, + { + input: { + type: 'buttonInput', + action: 'removeRepeatableSection', + panelId: questionSetId, + text: 'Remove', + class: 'btn btn-light', + }, + question: '', + questionId: `remove${questionSetId}_${uniqueId}`, + }, + ]; + return { + ...questionSet, + questionSetId: questionSetId, + questions: questionsModified, + }; +}; + +let modifyQuestionIds = (questionSetId, questions) => { + let uniqueId = randomstring.generate(5); + // 1.loop over each qObj and if questionId update + let questionsModified = [...questions].reduce((arr, qValue) => { + // 2. ensure we copy the original question deep + let question = _.cloneDeep(qValue); + // 3. if there is a questionId update + if (typeof question.questionId !== 'undefined') { + question.questionId = `${qValue.questionId.toLowerCase()}_${uniqueId}`; + } + // 4. if qObj has input and input.options meaning potential nest, loop over nested options + if (typeof question.input === 'object' && typeof question.input.options !== 'undefined') { + modifyNestedQuestionIds([...question.input.options], uniqueId); + } + return [...arr, question]; + }, []); + // 5. append remove button for repeated questions + questionsModified = [ + ...questionsModified, + { + input: { + type: 'buttonInput', + action: 'removeRepeatableQuestions', + questionIds: questions.map((q) => { return `${q.questionId.toLowerCase()}_${uniqueId}` }), + text: 'Remove', + class: 'btn btn-light', + }, + question: '', + questionId: `remove${questionSetId}_${uniqueId}`, + }, + ]; + // 6. return the updated questions array + return questionsModified; +}; + +let modifyNestedQuestionIds = (questionsArr, uniqueId) => { + let child; + let qArr = [...questionsArr]; + + if (!questionsArr) return; + + for (let questionObj of qArr) { + // 1. test each option obj if have conditionals and a length + if (typeof questionObj.conditionalQuestions !== 'undefined' && questionObj.conditionalQuestions.length > 0) { + // 2. for each option in conditional questions loop + questionObj.conditionalQuestions.forEach(option => { + // 3. test if option has a questionId and if so modify + if (typeof option.questionId !== 'undefined') { + option['questionId'] = `${option.questionId.toLowerCase()}_${uniqueId}`; + } + // 4. test the input for options and if options defined means it is another recursive loop call + if (typeof questionObj.input === 'object' && typeof questionObj.input.options !== 'undefined') { + child = modifyNestedQuestionIds(option.conditionalQuestions, uniqueId); + } + }); + } + // 5. return recursive call + if (child) return child; + } +}; + +let insertQuestionSeparator = (questionsArr = [], separatorText = '') => { + // 1. guard for empty questions array and empty separator + if(_.isEmpty(questionsArr) || _.isEmpty(separatorText)) { + return questionsArr; + } + // 2. locate and update the first duplicate question + questionsArr[0].question = `\n${separatorText}\n\n${questionsArr[0].question}`; + // 3 return mutated questions with separator pre-pended + return questionsArr; +} + +let insertQuestionSet = (questionSetId, duplicateQuestionSet, schema) => { + let { questionPanels, questionSets } = { ...schema }; + // 1. update the questionSets with our new duplicatedQuestion + questionSets = [...questionSets, duplicateQuestionSet]; + + let qSet = findQuestionSet(questionSetId, schema); + + if (!_.isEmpty(qSet)) { + // 2. find the questionSet to duplicate for the qSet + let { + questions: [question], + } = qSet; + // 3. get the questionSetId that we need to insert into our questionPanel + if (typeof question.input.panelId !== 'undefined') { + let { + input: { panelId }, + } = question; + // 4. find question panel + let questionPanel = findQuestionPanel(panelId, questionPanels) || {}; + if (!_.isEmpty(questionPanel)) { + let { questionSets } = questionPanel; + // 5. new questionSet to be pushed + let questionSet = { + index: 5, + questionSetId: duplicateQuestionSet.questionSetId, + }; + let idx = questionSets.length - 1; + // 6. push into preliminary position + questionSets.splice(idx, 0, questionSet); + } + return { + ...schema, + questionSets, + questionPanels, + }; + } + } + return { ...schema }; +}; + +let insertQuestions = (questionSetId, targetQuestionId, duplicatedQuestions, schema) => { + // 1. find question set index in schema + let qSetIdx = schema.questionSets.findIndex(q => q.questionSetId === questionSetId); + // locate and update parent of target questionId (Id of the button that invoked this action) with the duplicated questions + let found = false; + // 2. Recursive function to iterate through each level of questions + schema.questionSets[qSetIdx].questions.forEach(function iter(currentQuestion, index, currentArray) { + // 3. Prevent unnecessary computation by exiting loop if question was found + if (found) { + return; + } + // 4. If the current question matches the target question, replace with updated question + if (currentQuestion.questionId === targetQuestionId) { + currentArray.splice(currentArray.length - 1, 0, ...duplicatedQuestions); + found = true; + return; + } + // 5. If target question has not been identified, recall function with child questions + if (_.has(currentQuestion, 'input.options')) { + currentQuestion.input.options.forEach(option => { + if (_.has(option, 'conditionalQuestions')) { + Array.isArray(option.conditionalQuestions) && option.conditionalQuestions.forEach(iter); + } + }); + } + }); + // 7. return updated schema + return schema; +}; + +let removeQuestionSetReferences = (questionSetId, questionId, schema) => { + let questionSet, question; + let { questionPanels, questionSets } = { ...schema }; + // 1. find questionSet in questionSets + questionSet = findQuestionSet(questionSetId, schema); + // 2. find the question in questionSet + question = findQuestion(questionId, questionSet); + if (!_.isEmpty(question)) { + // 3. extract panelId + let { + input: { panelId }, + } = question; + // 4. remove from questionSet + questionSets = questionSets.filter(qs => { + return qs.questionSetId !== questionSetId; + }); + // 5. remove from questionPanel + questionPanels = questionPanels.map(questionSetObj => { + return removeQuestionSet(questionSetObj, panelId, questionSetId); + }); + // 6. return new schema + return { + ...schema, + questionPanels, + questionSets, + }; + } + return schema; +}; + +let removeQuestionSet = (questionSetObj = {}, panelId = '', questionSetId = '') => { + if (questionSetObj.panelId === panelId) { + const items = questionSetObj.questionSets.filter(qs => { + return qs.questionSetId !== questionSetId; + }); + questionSetObj.questionSets = items; + + return questionSetObj; + } + + return questionSetObj; +}; + +let removeQuestionSetAnswers = (questionId = '', questionAnswers = {}) => { + if (!_.isEmpty(questionId) && !_.isEmpty(questionAnswers)) { + let id = _.last(questionId.split('_')); + if (typeof id != 'undefined') { + Object.keys(questionAnswers).forEach(key => { + if (key.includes(id)) { + questionAnswers[key] = ''; + } + }); + } + } + return questionAnswers; +}; + +let removeQuestionReferences = (questionSetId, questionIdsToRemove = [], schema) => { + // 1. guard clause to return unmodified schema if no questions passed for removal + if(_.isEmpty(questionIdsToRemove)) { + return schema; + } + // 2. find question set index in schema + let qSetIdx = schema.questionSets.findIndex(q => q.questionSetId === questionSetId); + // 3. iterate through each question id and delete from schema + questionIdsToRemove.forEach(questionIdToRemove => { + let found = false; + schema.questionSets[qSetIdx].questions.forEach(function iter(currentQuestion, index, currentArray) { + if(found) return; + // 4. If the current question is found in the questions to remove, then remove it + if (currentQuestion.questionId === questionIdToRemove) { + currentArray.splice(index, 1); + found = true; + } + // 5. If target question has not been identified, recall function with child questions + if (_.has(currentQuestion, 'input.options')) { + currentQuestion.input.options.forEach(option => { + if (_.has(option, 'conditionalQuestions')) { + Array.isArray(option.conditionalQuestions) && option.conditionalQuestions.forEach(iter); + } + }); + } + }); + }); + // 6. return modified schema + return schema; +}; + +let removeQuestionAnswers = (questionIds = [], questionAnswers = {}) => { + // 1. guard for empty question ids array + if(_.isEmpty(questionIds)) { + return questionAnswers; + } + // 2. delete each question answer from the answers object + questionIds.forEach(questionId => { + delete questionAnswers[questionId]; + }) + // 3. return the updated question answers object + return questionAnswers; +}; + +export default { + findQuestion: findQuestion, + findQuestionRecursive: findQuestionRecursive, + findQuestionSet: findQuestionSet, + findQuestionPanel: findQuestionPanel, + duplicateQuestionSet: duplicateQuestionSet, + duplicateQuestions: duplicateQuestions, + insertQuestionSet: insertQuestionSet, + insertQuestions: insertQuestions, + insertQuestionSeparator: insertQuestionSeparator, + removeQuestionSetReferences: removeQuestionSetReferences, + removeQuestionSetAnswers: removeQuestionSetAnswers, + removeQuestionReferences: removeQuestionReferences, + removeQuestionAnswers: removeQuestionAnswers +}; \ No newline at end of file diff --git a/src/resources/utilities/emailBuilder.js b/src/resources/utilities/emailBuilder.js index 6f7f1196..0d3d13b9 100644 --- a/src/resources/utilities/emailBuilder.js +++ b/src/resources/utilities/emailBuilder.js @@ -1,3 +1,5 @@ +import constants from './constants.util'; + const setMessageProperties = (emailRecipientType, body, user) => { const { @@ -14,11 +16,10 @@ const setMessageProperties = (emailRecipientType, body, user) => { title, custodianEmail } = body; - const hdrukEmail = `enquiry@healthdatagateway.org`; const dataCustodianEmail = process.env.DATA_CUSTODIAN_EMAIL || custodianEmail; let msg = { - from: `${hdrukEmail}`, + from: `${constants.hdrukEmail}`, subject: `Enquires for ${title} dataset healthdatagateway.org`, html: ` An enquiry to access the ${title} dataset has been made. Please see the details of the enquiry below:


diff --git a/src/resources/utilities/emailGenerator.util.js b/src/resources/utilities/emailGenerator.util.js index 306cf953..be6224e1 100644 --- a/src/resources/utilities/emailGenerator.util.js +++ b/src/resources/utilities/emailGenerator.util.js @@ -8,9 +8,21 @@ import constants from '../utilities/constants.util'; const sgMail = require('@sendgrid/mail'); let parent, qsId; let questionList = []; -let excludedQuestionSetIds = ['addApplicant', 'removeApplicant']; +let excludedQuestionSetIds = ['addRepeatableSection', 'removeRepeatableSection']; let autoCompleteLookups = { fullname: ['email'] }; +const _getStepReviewers = (reviewers = []) => { + if (!_.isEmpty(reviewers)) return [...reviewers].map(reviewer => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); + + return ''; +}; + +const _getStepSections = (sections = []) => { + if (!_.isEmpty(sections)) return [...sections].map(section => constants.darPanelMapper[section]).join(', '); + + return ''; +}; + /** * [_unNestQuestionPanels] * @@ -18,25 +30,19 @@ let autoCompleteLookups = { fullname: ['email'] }; * @param {Array} [{panelId, pageId, questionSets, ...}] * @return {Array} [{panel}, {}] */ -const _unNestQuestionPanels = (panels) => { +const _unNestQuestionPanels = panels => { return [...panels].reduce((arr, panel) => { // deconstruct questionPanel:[{panel}] - let { - panelId, - pageId, - questionSets, - questionPanelHeaderText, - navHeader, - } = panel; + let { panelId, pageId, questionSets, questionPanelHeaderText, navHeader } = panel; if (typeof questionSets !== 'undefined') { if (questionSets.length > 1) { // filters excluded questionSetIds - let filtered = [...questionSets].filter((item) => { + let filtered = [...questionSets].filter(item => { let [questionId, uniqueId] = item.questionSetId.split('_'); return !excludedQuestionSetIds.includes(questionId); }); // builds new array of [{panelId, pageId, etc}] - let newPanels = filtered.map((set) => { + let newPanels = filtered.map(set => { return { panelId, pageId, @@ -83,13 +89,9 @@ const _initalQuestionSpread = (questions, pages, questionPanels) => { let [qSId, uniqueQsId] = questionSetId.split('_'); // question set full Id ie: applicant_hUad8 - let qsFullId = - typeof uniqueQsId !== 'undefined' ? `${qSId}_${uniqueQsId}` : qSId; + let qsFullId = typeof uniqueQsId !== 'undefined' ? `${qSId}_${uniqueQsId}` : qSId; // remove out unwanted buttons or elements - if ( - !excludedQuestionSetIds.includes(qSId) && - questionSet.hasOwnProperty('questions') - ) { + if (!excludedQuestionSetIds.includes(qSId) && questionSet.hasOwnProperty('questions')) { for (let question of questionSet.questions) { //deconstruct quesitonId from question let { questionId } = question; @@ -98,28 +100,25 @@ const _initalQuestionSpread = (questions, pages, questionPanels) => { let [qId, uniqueQId] = questionId.split('_'); // pass in questionPanels - let questionPanel = [...questionPanels].find((i) => i.panelId === qSId); - // find page it belongs too - if(questionPanel) { - let page = [...pages].find((i) => i.pageId === questionPanel.pageId); - - // if page not found skip and the questionId isnt excluded - if ( - typeof page !== 'undefined' && - !excludedQuestionSetIds.includes(qId) - ) { - // if it is a generated field ie ui driven add back on uniqueId - let obj = { - page: page.title, - section: questionPanel.navHeader, - questionSetId: qsFullId, - questionSetHeader, - ...question, - }; - // update flatQuestionList array, spread previous add new object - flatQuestionList = [...flatQuestionList, obj]; - } - } + let questionPanel = [...questionPanels].find(i => i.panelId === qSId); + // find page it belongs too + if (questionPanel) { + let page = [...pages].find(i => i.pageId === questionPanel.pageId); + + // if page not found skip and the questionId isnt excluded + if (typeof page !== 'undefined' && !excludedQuestionSetIds.includes(qId)) { + // if it is a generated field ie ui driven add back on uniqueId + let obj = { + page: page.title, + section: questionPanel.navHeader, + questionSetId: qsFullId, + questionSetHeader, + ...question, + }; + // update flatQuestionList array, spread previous add new object + flatQuestionList = [...flatQuestionList, obj]; + } + } } } } @@ -131,16 +130,13 @@ const _initalQuestionSpread = (questions, pages, questionPanels) => { * * @return {Array} [{questionId, question}] */ -const _getAllQuestionsFlattened = (allQuestions) => { +const _getAllQuestionsFlattened = allQuestions => { let child; if (!allQuestions) return; for (let questionObj of allQuestions) { if (questionObj.hasOwnProperty('questionId')) { - if ( - questionObj.hasOwnProperty('page') && - questionObj.hasOwnProperty('section') - ) { + if (questionObj.hasOwnProperty('page') && questionObj.hasOwnProperty('section')) { let { page, section, questionSetId, questionSetHeader } = questionObj; if (typeof questionSetId !== 'undefined') qsId = questionSetId; // set the parent page and parent section as nested wont have reference to its parent @@ -150,8 +146,7 @@ const _getAllQuestionsFlattened = (allQuestions) => { // split up questionId let [qId, uniqueId] = questionId.split('_'); // actual quesitonId - let questionTitle = - typeof uniqueId !== 'undefined' ? `${qId}_${uniqueId}` : qId; + let questionTitle = typeof uniqueId !== 'undefined' ? `${qId}_${uniqueId}` : qId; // if not in exclude list if (!excludedQuestionSetIds.includes(questionTitle)) { questionList = [ @@ -168,18 +163,12 @@ const _getAllQuestionsFlattened = (allQuestions) => { } } - if ( - typeof questionObj.input === 'object' && - typeof questionObj.input.options !== 'undefined' - ) { + if (typeof questionObj.input === 'object' && typeof questionObj.input.options !== 'undefined') { questionObj.input.options - .filter((option) => { - return ( - typeof option.conditionalQuestions !== 'undefined' && - option.conditionalQuestions.length > 0 - ); + .filter(option => { + return typeof option.conditionalQuestions !== 'undefined' && option.conditionalQuestions.length > 0; }) - .forEach((option) => { + .forEach(option => { child = _getAllQuestionsFlattened(option.conditionalQuestions); }); } @@ -190,23 +179,23 @@ const _getAllQuestionsFlattened = (allQuestions) => { } }; -const _formatSectionTitle = (value) => { +const _formatSectionTitle = value => { let [questionId] = value.split('_'); return _.capitalize(questionId); }; const _buildSubjectTitle = (user, title, submissionType) => { - let subject = ''; + let subject = ''; if (user.toUpperCase() === 'DATACUSTODIAN') { subject = `Someone has submitted an application to access ${title} dataset. Please let the applicant know as soon as there is progress in the review of their submission.`; } else { - if(submissionType === constants.submissionTypes.INITIAL) { - subject = `You have requested access to ${title}. The custodian will be in contact about the application.`; - } else { - subject = `You have made updates to your Data Access Request for ${title}. The custodian will be in contact about the application.`; - } - } - return subject; + if (submissionType === constants.submissionTypes.INITIAL) { + subject = `You have requested access to ${title}. The custodian will be in contact about the application.`; + } else { + subject = `You have made updates to your Data Access Request for ${title}. The custodian will be in contact about the application.`; + } + } + return subject; }; /** @@ -220,11 +209,14 @@ const _buildSubjectTitle = (user, title, submissionType) => { */ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) => { let parent; - let { userType, userName, userEmail, datasetTitles, submissionType } = options; - let dateSubmitted = moment().format('D MMM YYYY'); - let { projectName = 'No project name set', isNationalCoreStudies = false, nationalCoreStudiesProjectId = '' } = aboutApplication; - let linkNationalCoreStudies = nationalCoreStudiesProjectId === '' ? '' : `${process.env.homeURL}/project/${nationalCoreStudiesProjectId}`; - let heading = submissionType === constants.submissionTypes.INITIAL ? `New data access request application` : `Existing data access request application with new updates`; + let { userType, userName, userEmail, datasetTitles, submissionType } = options; + let dateSubmitted = moment().format('D MMM YYYY'); + let { projectName = 'No project name set', isNationalCoreStudies = false, nationalCoreStudiesProjectId = '' } = aboutApplication; + let linkNationalCoreStudies = nationalCoreStudiesProjectId === '' ? '' : `${process.env.homeURL}/project/${nationalCoreStudiesProjectId}`; + let heading = + submissionType === constants.submissionTypes.INITIAL + ? `New data access request application` + : `Existing data access request application with new updates`; let subject = _buildSubjectTitle(userType, datasetTitles, submissionType); let questionTree = { ...fullQuestions }; let answers = { ...questionAnswers }; @@ -254,15 +246,17 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) - + - - - + + + @@ -284,7 +278,7 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) // Create json content payload for attaching to email const jsonContent = { - applicationDetails: { projectName, linkNationalCoreStudies, datasetTitles, dateSubmitted, applicantName: userName }, + applicationDetails: { projectName, linkNationalCoreStudies, datasetTitles, dateSubmitted, applicantName: userName }, questions: { ...fullQuestions }, answers: { ...questionAnswers }, }; @@ -308,8 +302,7 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) // Safe People = [Applicant, Principle Investigator, ...] // Safe People to order array for applicant let sectionKeys; - if (page.toUpperCase() === 'SAFE PEOPLE') - sectionKeys = Object.keys({ ...parent }).sort(); + if (page.toUpperCase() === 'SAFE PEOPLE') sectionKeys = Object.keys({ ...parent }).sort(); else sectionKeys = Object.keys({ ...parent }); // styling for last child @@ -348,15 +341,15 @@ const _buildEmail = (aboutApplication, fullQuestions, questionAnswers, options) * @desc This function will group all the questions into the correct format for emailBuilder * @return {Object} {Safe People: {Applicant: [], Applicant_U8ad: []}, Safe Project: {}} */ -const _groupByPageSection = (allQuestions) => { +const _groupByPageSection = allQuestions => { // group by page [Safe People, Safe Project] - let groupedByPage = _.groupBy(allQuestions, (item) => { + let groupedByPage = _.groupBy(allQuestions, item => { return item.page; }); // within grouped [Safe People: {Applicant, Applicant1, Something}] let grouped = _.forEach(groupedByPage, (value, key) => { - groupedByPage[key] = _.groupBy(groupedByPage[key], (item) => { + groupedByPage[key] = _.groupBy(groupedByPage[key], item => { return item.questionSetId; }); }); @@ -401,9 +394,7 @@ const _actualQuestionAnswers = async (questionAnswers, options) => { // show full email for custodian or redacted for non custodians let validEmail = _displayCorrectEmailAddress(email, userType); // check if uniqueId and set email field - typeof uniqueId !== 'undefined' - ? (obj[`email_${uniqueId}`] = validEmail) - : (obj[`email`] = validEmail); + typeof uniqueId !== 'undefined' ? (obj[`email_${uniqueId}`] = validEmail) : (obj[`email`] = validEmail); break; default: obj[key] = value; @@ -423,9 +414,7 @@ const _actualQuestionAnswers = async (questionAnswers, options) => { * @return {String} 'r********@**********m' */ const _displayCorrectEmailAddress = (email, userType) => { - return userType.toUpperCase() === 'DATACUSTODIAN' - ? email - : helper.censorEmail(email); + return userType.toUpperCase() === 'DATACUSTODIAN' ? email : helper.censorEmail(email); }; /** @@ -435,7 +424,7 @@ const _displayCorrectEmailAddress = (email, userType) => { * @param {Int} 98767876 * @return {Object} {fullname: 'James Swallow', email: 'james@gmail.com'} */ -const _getUserDetails = async (userObj) => { +const _getUserDetails = async userObj => { return new Promise(async (resolve, reject) => { try { let { id } = userObj; @@ -448,53 +437,27 @@ const _getUserDetails = async (userObj) => { }); }; -const _generateEmail = async ( - aboutApplication, - questions, - pages, - questionPanels, - questionAnswers, - options -) => { +const _generateEmail = async (aboutApplication, questions, pages, questionPanels, questionAnswers, options) => { // reset questionList arr questionList = []; // set questionAnswers - let flatQuestionAnswers = await _actualQuestionAnswers( - questionAnswers, - options - ); + let flatQuestionAnswers = await _actualQuestionAnswers(questionAnswers, options); // unnest each questionPanel if questionSets let flatQuestionPanels = _unNestQuestionPanels(questionPanels); // unnest question flat - let unNestedQuestions = _initalQuestionSpread( - questions, - pages, - flatQuestionPanels - ); + let unNestedQuestions = _initalQuestionSpread(questions, pages, flatQuestionPanels); // assigns to questionList let fullQuestionSet = _getAllQuestionsFlattened(unNestedQuestions); // fullQuestions [SafePeople: {Applicant: {}, Applicant_aca: {}}, SafeProject:{}] let fullQuestions = _groupByPageSection([...questionList]); // build up email with values - let { html, jsonContent } = _buildEmail( - aboutApplication, - fullQuestions, - flatQuestionAnswers, - options - ); + let { html, jsonContent } = _buildEmail(aboutApplication, fullQuestions, flatQuestionAnswers, options); // return email return { html, jsonContent }; }; -const _displayConditionalStatusDesc = ( - applicationStatus, - applicationStatusDesc -) => { - if ( - (applicationStatusDesc && - applicationStatus === 'approved with conditions') || - applicationStatus === 'rejected' - ) { +const _displayConditionalStatusDesc = (applicationStatus, applicationStatusDesc) => { + if ((applicationStatusDesc && applicationStatus === 'approved with conditions') || applicationStatus === 'rejected') { let conditionalTitle = ''; switch (applicationStatus) { case 'approved with conditions': @@ -512,14 +475,14 @@ const _displayConditionalStatusDesc = ( return ''; }; -const _displayDARLink = (accessId) => { +const _displayDARLink = accessId => { if (!accessId) return ''; let darLink = `${process.env.homeURL}/data-access-request/${accessId}`; return `View application`; }; -const _generateDARStatusChangedEmail = (options) => { +const _generateDARStatusChangedEmail = options => { let { id, applicationStatus, @@ -587,35 +550,76 @@ const _generateDARStatusChangedEmail = (options) => {
Project ${projectName}
Related NCS project${isNationalCoreStudies ? `View NCS project` : 'no'}
${ + isNationalCoreStudies ? `View NCS project` : 'no' + }
Dataset(s) ${datasetTitles}
- ${_displayConditionalStatusDesc( - applicationStatus, - applicationStatusDesc - )} + ${_displayConditionalStatusDesc(applicationStatus, applicationStatusDesc)} ${_displayDARLink(id)}
`; return body; }; -const _generateContributorEmail = (options) => { - let { - id, - datasetTitles, - projectName, - projectId, - change, - actioner, - applicants, - } = options; - let header = `You've been ${ - change === 'added' ? 'added to' : 'removed from' - } a data access request application`; - let subheader = `${actioner} ${change} you as a contributor ${ - change === 'added' ? 'to' : 'from' - } a data access request application. ${ - change == 'added' - ? 'Contributors can exchange private notes, make edits, invite others and submit the application.' - : '' +const _generateDARReturnedEmail = options => { + let { id, projectName, publisher, datasetTitles, dateSubmitted, applicants } = options; + let body = `
+ + + + + + + + + + + + + + +
+ You’ve been requested to update a data access request application +
+ ${publisher} has requested you update answers provided in a submitted data access request application. +
+ + + + + + + + + + + + + + + + + +
Project${ + projectName || 'No project name set' + }
Dataset(s)${datasetTitles}
Submitted${moment( + dateSubmitted + ).format('D MMM YYYY')}
Applicants${applicants}
+
+
+ ${_displayDARLink(id)} +
+
`; + return body; +}; + +const _generateContributorEmail = options => { + let { id, datasetTitles, projectName, projectId, change, actioner, applicants } = options; + let header = `You've been ${change === 'added' ? 'added to' : 'removed from'} a data access request application`; + let subheader = `${actioner} ${change} you as a contributor ${change === 'added' ? 'to' : 'from'} a data access request application. ${ + change == 'added' ? 'Contributors can exchange private notes, make edits, invite others and submit the application.' : '' }`; let body = `
@@ -680,7 +684,7 @@ const _generateContributorEmail = (options) => { return body; }; -const _generateStepOverrideEmail = (options) => { +const _generateStepOverrideEmail = options => { let { id, projectName, @@ -814,14 +818,12 @@ const _generateStepOverrideEmail = (options) => { -
${_displayDARLink( - id - )}
+
${_displayDARLink(id)}
`; return body; }; -const _generateNewReviewPhaseEmail = (options) => { +const _generateNewReviewPhaseEmail = options => { let { id, projectName, @@ -876,7 +878,9 @@ const _generateNewReviewPhaseEmail = (options) => { Dataset(s) - ${datasetTitles} + ${ + datasetTitles || 'No dataset titles' + } Applicants @@ -928,7 +932,163 @@ const _generateNewReviewPhaseEmail = (options) => { return body; }; -const _generateReviewDeadlineWarning = (options) => { +const _generateWorkflowCreated = options => { + let { workflowName, steps, createdAt, actioner } = options; + + let table = `
+ + + + + + + + + + `; + + for (let step of steps) { + let { reviewers = [], sections = [], stepName = '' } = step; + let stepReviewers = _getStepReviewers(reviewers); + let stepSections = _getStepSections(sections); + table += ` + + `; + } + + table += ` +
+ A new Workflow has been created. +
+ ${actioner} has created ${workflowName} on ${moment(createdAt).format('D MMM YYYY')} +
+ + + + + + + + + + + + +
+

${stepName}

+
Review Sections${stepSections}
Reviewers${stepReviewers}
+
+
`; + + return table; +}; + +const _generateWorkflowAssigned = options => { + let { id, projectId, workflowName, projectName, applicants, steps, actioner, datasetTitles, dateSubmitted } = options; + + let table = `
+ + + + + + + + + + + + + `; + + for (let step of steps) { + let { reviewers = [], sections = [], stepName = '' } = step; + let stepReviewers = _getStepReviewers(reviewers); + let stepSections = _getStepSections(sections); + table += ` + + `; + } + + table += ` +
+ Workflow has been assigned. +
+ ${actioner} has assigned ${workflowName} to a Data Access Request +
+ + + + + + + + + + + + + + + + + + + + + + + + +
+

Application Details

+
Project${ + projectName || 'No project name' + }
Project Id${ + projectId || id + }
Dataset Titles${ + datasetTitles || 'No dataset titles' + }
Applicants${applicants}
Submitted${moment( + dateSubmitted + ).format('D MMM YYYY')}
+
+ + + + + + + + + + + + +
+

${stepName}

+
Review Sections${stepSections}
Reviewers${stepReviewers}
+
+
+ ${_displayDARLink(id)} +
+
`; + + return table; +}; + +const _generateReviewDeadlineWarning = options => { let { id, projectName, @@ -958,9 +1118,9 @@ const _generateReviewDeadlineWarning = (options) => { - The following data access request application is approaching the review deadline of ${moment( - dateDeadline - ).format('D MMM YYYY')}. + The following data access request application is approaching the review deadline of ${moment(dateDeadline).format( + 'D MMM YYYY' + )}. @@ -1024,7 +1184,7 @@ const _generateReviewDeadlineWarning = (options) => { return body; }; -const _generateReviewDeadlinePassed = (options) => { +const _generateReviewDeadlinePassed = options => { let { id, projectName, @@ -1118,7 +1278,7 @@ const _generateReviewDeadlinePassed = (options) => { return body; }; -const _generateFinalDecisionRequiredEmail = (options) => { +const _generateFinalDecisionRequiredEmail = options => { let { id, projectName, @@ -1236,14 +1396,12 @@ const _generateFinalDecisionRequiredEmail = (options) => { -
${_displayDARLink( - id - )}
+
${_displayDARLink(id)}
`; return body; }; -const _generateRemovedFromTeam = (options) => { +const _generateRemovedFromTeam = options => { let { teamName } = options; let header = `You've been removed from the ${teamName} team on the HDR Innovation Gateway`; let subheader = `You will no longer be able to access Data Access Requests, messages or the profile area relating to this team.`; @@ -1287,13 +1445,13 @@ const _generateRemovedFromTeam = (options) => { return body; }; -const _generateAddedToTeam = (options) => { +const _generateAddedToTeam = options => { let { teamName, role } = options; let header = `You've been added to the ${teamName} team as a ${role} on the HDR Innovation Gateway`; let subheader = ``; - if (role === teamController.roleTypes.MANAGER) { + if (role === constants.roleTypes.MANAGER) { subheader = `You will now be able to create and manage Data Access Request workflows, process applications, send messages, and manage the profile area relating to this team, including the ability to add and remove new members.`; - } else if (role === teamController.roleTypes.REVIEWER) { + } else if (role === constants.roleTypes.REVIEWER) { subheader = `You will now be able to review assigned Data Access Requests, send messages and visit the profile area relating to this team.`; } let body = `
@@ -1341,21 +1499,12 @@ const _generateAddedToTeam = (options) => { * @desc Send an email to an array of users using Twilio SendGrid * @param {Object} context */ -const _sendEmail = async ( - to, - from, - subject, - html, - allowUnsubscribe = true, - attachments = [] -) => { +const _sendEmail = async (to, from, subject, html, allowUnsubscribe = true, attachments = []) => { // 1. Apply SendGrid API key from environment variable sgMail.setApiKey(process.env.SENDGRID_API_KEY); // 2. Ensure any duplicates recieve only a single email - const recipients = [ - ...new Map(to.map((item) => [item['email'], item])).values(), - ]; + const recipients = [...new Map(to.map(item => [item['email'], item])).values()]; // 3. Build each email object for SendGrid extracting email addresses from user object with unique unsubscribe link (to) for (let recipient of recipients) { @@ -1419,7 +1568,7 @@ const _generateEmailFooter = (recipient, allowUnsubscribe) => { }; const _generateAttachment = (filename, content, type) => { - return { + return { content, filename, type, @@ -1429,6 +1578,7 @@ const _generateAttachment = (filename, content, type) => { export default { generateEmail: _generateEmail, + generateDARReturnedEmail: _generateDARReturnedEmail, generateDARStatusChangedEmail: _generateDARStatusChangedEmail, generateContributorEmail: _generateContributorEmail, generateStepOverrideEmail: _generateStepOverrideEmail, @@ -1441,4 +1591,6 @@ export default { sendEmail: _sendEmail, generateEmailFooter: _generateEmailFooter, generateAttachment: _generateAttachment, + generateWorkflowAssigned: _generateWorkflowAssigned, + generateWorkflowCreated: _generateWorkflowCreated, }; diff --git a/src/resources/utilities/helper.util.js b/src/resources/utilities/helper.util.js index a783d60a..7110d184 100644 --- a/src/resources/utilities/helper.util.js +++ b/src/resources/utilities/helper.util.js @@ -1,8 +1,15 @@ -const _censorWord = (str) => { - return str[0] + '*'.repeat(str.length - 2) + str.slice(-1); +import crypto from 'crypto'; + +const _censorWord = str => { + if(str.length === 1) + return '*'; + else if(str.length === 2) + return `${str[0]}*`; + else + return str[0] + '*'.repeat(str.length - 2) + str.slice(-1); }; -const _censorEmail = (email) => { +const _censorEmail = email => { let arr = email.split('@'); return _censorWord(arr[0]) + '@' + _censorWord(arr[1]); }; @@ -18,50 +25,54 @@ const _arraysEqual = (a, b) => { return true; }; -const _generateFriendlyId = (id) => { +const _generateFriendlyId = id => { return id - .toString() - .toUpperCase() + .toString() + .toUpperCase() .match(/.{1,4}/g) .join('-'); }; const _generatedNumericId = () => { return parseInt(Math.random().toString().replace('0.', '')); -} +}; -const _hidePrivateProfileDetails = (persons) => { +const _generateAlphaNumericString = (length) => { + return crypto.randomBytes(length).toString('hex').substring(length); +}; - return persons.map(person => { +const _hidePrivateProfileDetails = persons => { + return persons.map(person => { let personWithPrivateDetailsRemoved = person; - personWithPrivateDetailsRemoved.bio = person.showBio ? person.bio : ""; - personWithPrivateDetailsRemoved.organisation = person.showOrganisation ? person.organisation : ""; - personWithPrivateDetailsRemoved.sector = person.showSector ? person.sector : ""; - personWithPrivateDetailsRemoved.domain = person.showDomain ? person.domain : ""; - personWithPrivateDetailsRemoved.link = person.showLink ? person.link : ""; - personWithPrivateDetailsRemoved.orcid = person.showOrcid ? person.orcid : ""; + personWithPrivateDetailsRemoved.bio = person.showBio ? person.bio : ''; + personWithPrivateDetailsRemoved.organisation = person.showOrganisation ? person.organisation : ''; + personWithPrivateDetailsRemoved.sector = person.showSector ? person.sector : ''; + personWithPrivateDetailsRemoved.domain = person.showDomain ? person.domain : ''; + personWithPrivateDetailsRemoved.link = person.showLink ? person.link : ''; + personWithPrivateDetailsRemoved.orcid = person.showOrcid ? person.orcid : ''; return personWithPrivateDetailsRemoved; }); -} +}; const _getEnvironment = () => { - let environment = 'local'; + let environment = 'local'; - if (process.env.environment === 'www') environment = 'prod'; - else if (process.env.environment === 'uat') environment = 'uat'; - else if (process.env.environment === 'uatbeta') environment = 'uatbeta'; - else if (process.env.environment === 'latest') environment = 'latest'; + if (process.env.environment === 'www') environment = 'prod'; + else if (process.env.environment === 'uat') environment = 'uat'; + else if (process.env.environment === 'uatbeta') environment = 'uatbeta'; + else if (process.env.environment === 'latest') environment = 'latest'; - return environment; -} + return environment; +}; export default { censorEmail: _censorEmail, - arraysEqual: _arraysEqual, + arraysEqual: _arraysEqual, generateFriendlyId: _generateFriendlyId, generatedNumericId: _generatedNumericId, - hidePrivateProfileDetails: _hidePrivateProfileDetails, - getEnvironment: _getEnvironment + generateAlphaNumericString: _generateAlphaNumericString, + hidePrivateProfileDetails: _hidePrivateProfileDetails, + getEnvironment: _getEnvironment, }; diff --git a/src/resources/utilities/inputSanitizer.js b/src/resources/utilities/inputSanitizer.js index 4e7e3605..6aaf8c20 100644 --- a/src/resources/utilities/inputSanitizer.js +++ b/src/resources/utilities/inputSanitizer.js @@ -1,17 +1,18 @@ //takes either a String or an array of Strings and removes non-breaking spaces -const removeNonBreakingSpaces = (str) => { - let sanitizedValues = []; +const removeNonBreakingSpaces = str => { + let sanitizedValues = []; - if(Array.isArray(str) && str !== []){ - str.forEach((s) => {sanitizedValues.push(removeNonBreakingSpaces(s))}); - } - else if(!Array.isArray(str) && typeof(str) !== 'object'){ - var re = / /g - return (!str || !isNaN(str)) ? str : str.replace(re ," "); - } - return sanitizedValues; -} + if (Array.isArray(str) && str !== []) { + str.forEach(s => { + sanitizedValues.push(removeNonBreakingSpaces(s)); + }); + } else if (!Array.isArray(str) && typeof str !== 'object') { + var re = / /g; + return !str || !isNaN(str) ? str : str.replace(re, ' '); + } + return sanitizedValues; +}; module.exports = { - removeNonBreakingSpaces: removeNonBreakingSpaces -} \ No newline at end of file + removeNonBreakingSpaces: removeNonBreakingSpaces, +}; diff --git a/src/resources/utilities/notificationBuilder.js b/src/resources/utilities/notificationBuilder.js index fad53327..5350a58f 100644 --- a/src/resources/utilities/notificationBuilder.js +++ b/src/resources/utilities/notificationBuilder.js @@ -19,5 +19,8 @@ const triggerNotificationMessage = (messageRecipients, messageDescription, messa } }); }); -} -module.exports.triggerNotificationMessage = triggerNotificationMessage; +}; + +export default { + triggerNotificationMessage: triggerNotificationMessage +}; diff --git a/src/resources/utilities/urlValidator.js b/src/resources/utilities/urlValidator.js index 335c928d..5ba88e98 100644 --- a/src/resources/utilities/urlValidator.js +++ b/src/resources/utilities/urlValidator.js @@ -1,18 +1,23 @@ -const validateURL = (link) => { - if (link && !/^https?:\/\//i.test(link)) { - link = 'https://' + link; - } - return link; - } +const validateURL = link => { + if (link && !/^https?:\/\//i.test(link)) { + link = 'https://' + link; + } + return link; +}; - const validateOrcidURL = (link) => { - if (!/^https?:\/\/orcid.org\//i.test(link)) { - link = 'https://orcid.org/' + link; - } - return link; - } +const validateOrcidURL = link => { + if (!/^https?:\/\/orcid.org\//i.test(link)) { + link = 'https://orcid.org/' + link; + } + return link; +}; - module.exports = { - validateURL: validateURL, - validateOrcidURL: validateOrcidURL - } \ No newline at end of file +const _isDOILink = link => { + return /^(?:(http)(s)?(:\/\/))?(dx.)?doi.org\/([\w.\/-]*)/i.test(link); +}; + +module.exports = { + validateURL: validateURL, + validateOrcidURL: validateOrcidURL, + isDOILink: _isDOILink, +}; diff --git a/src/resources/workflow/__mocks__/workflow.js b/src/resources/workflow/__mocks__/workflow.js index 7ec33a8c..23f55be8 100644 --- a/src/resources/workflow/__mocks__/workflow.js +++ b/src/resources/workflow/__mocks__/workflow.js @@ -2,27 +2,22 @@ import mongoose from 'mongoose'; module.exports = { steps: [ - { - name: 'Step 1', - active: true, - completed: false, - reviewers: [ - new mongoose.Types.ObjectId(), - new mongoose.Types.ObjectId() - ], - sections: [ - 'safepeople' - ] - }, - { - name: 'Step 2', - active: false, - completed: false - }, - { - name: 'Step 3', - active: true, - completed: false - } - ] + { + name: 'Step 1', + active: true, + completed: false, + reviewers: [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()], + sections: ['safepeople'], + }, + { + name: 'Step 2', + active: false, + completed: false, + }, + { + name: 'Step 3', + active: true, + completed: false, + }, + ], }; diff --git a/src/resources/workflow/__tests__/workflow.test.js b/src/resources/workflow/__tests__/workflow.test.js index a9449006..70780092 100755 --- a/src/resources/workflow/__tests__/workflow.test.js +++ b/src/resources/workflow/__tests__/workflow.test.js @@ -2,22 +2,22 @@ const workflowModel = require('../workflow.model'); const workflow = require('../__mocks__/workflow'); describe('minSteps', () => { - test('model validation requires at least one step', () => { + test('model validation requires at least one step', () => { expect(workflowModel.minSteps(workflow.steps)).toEqual(true); expect(workflowModel.minSteps([])).toEqual(false); }); }); describe('minReviewers', () => { - test('model validation requires at least one reviewer in a step', () => { + test('model validation requires at least one reviewer in a step', () => { expect(workflowModel.minReviewers(workflow.steps[0].reviewers)).toEqual(true); expect(workflowModel.minReviewers([])).toEqual(false); }); }); describe('minSections', () => { - test('model validation requires at least one section in a step', () => { + test('model validation requires at least one section in a step', () => { expect(workflowModel.minSections(workflow.steps[0].sections)).toEqual(true); expect(workflowModel.minSections([])).toEqual(false); }); -}); \ No newline at end of file +}); diff --git a/src/resources/workflow/workflow.controller.js b/src/resources/workflow/workflow.controller.js index a8f0be9f..e3eddd52 100644 --- a/src/resources/workflow/workflow.controller.js +++ b/src/resources/workflow/workflow.controller.js @@ -4,149 +4,230 @@ import { WorkflowModel } from './workflow.model'; import teamController from '../team/team.controller'; import helper from '../utilities/helper.util'; import constants from '../utilities/constants.util'; +import emailGenerator from '../utilities/emailGenerator.util'; +import notificationBuilder from '../utilities/notificationBuilder'; import moment from 'moment'; import _ from 'lodash'; import mongoose from 'mongoose'; - // GET api/v1/workflows/:id - const getWorkflowById = async (req, res) => { - try { - // 1. Get the workflow from the database including the team members to check authorisation and the number of in-flight applications - const workflow = await WorkflowModel.findOne({ - _id: req.params.id, - }).populate([ - { - path: 'publisher', - select: 'team', - populate: { - path: 'team', - select: 'members -_id', - }, - }, - { - path: 'steps.reviewers', - model: 'User', - select: '_id id firstname lastname', - }, - { - path: 'applications', - select: 'aboutApplication', - match: { applicationStatus: 'inReview' }, +// GET api/v1/workflows/:id +const getWorkflowById = async (req, res) => { + try { + // 1. Get the workflow from the database including the team members to check authorisation and the number of in-flight applications + const workflow = await WorkflowModel.findOne({ + _id: req.params.id, + }).populate([ + { + path: 'publisher', + select: 'team', + populate: { + path: 'team', + select: 'members -_id', }, - ]); - if (!workflow) { - return res.status(404).json({ success: false }); - } - // 2. Check the requesting user is a manager of the custodian team - let { _id: userId } = req.user; - let authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - workflow.publisher.team.toObject(), - userId - ); - // 3. If not return unauthorised - if (!authorised) { - return res.status(401).json({ success: false }); + }, + { + path: 'steps.reviewers', + model: 'User', + select: '_id id firstname lastname', + }, + { + path: 'applications', + select: 'aboutApplication', + match: { applicationStatus: 'inReview' }, + }, + ]); + if (!workflow) { + return res.status(404).json({ success: false }); + } + // 2. Check the requesting user is a manager of the custodian team + let { _id: userId } = req.user; + let authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, workflow.publisher.team.toObject(), userId); + // 3. If not return unauthorised + if (!authorised) { + return res.status(401).json({ success: false }); + } + // 4. Build workflow response + let { active, _id, id, workflowName, version, steps, applications = [] } = workflow.toObject(); + applications = applications.map(app => { + let { aboutApplication, _id } = app; + if (typeof aboutApplication === 'string') { + aboutApplication = JSON.parse(aboutApplication) || {}; } - // 4. Build workflow response - let { + let { projectName = 'No project name' } = aboutApplication; + return { projectName, _id }; + }); + // Set operation permissions + let canDelete = applications.length === 0, + canEdit = applications.length === 0; + // 5. Return payload + return res.status(200).json({ + success: true, + workflow: { active, _id, id, workflowName, version, steps, - applications = [], - } = workflow.toObject(); - applications = applications.map((app) => { - let { aboutApplication, _id } = app; - if(typeof aboutApplication === 'string') { - aboutApplication = JSON.parse(aboutApplication) || {}; - } - let { projectName = 'No project name' } = aboutApplication; - return { projectName, _id }; - }); - // Set operation permissions - let canDelete = applications.length === 0, - canEdit = applications.length === 0; - // 5. Return payload - return res.status(200).json({ - success: true, - workflow: { - active, - _id, - id, - workflowName, - version, - steps, - applications, - appCount: applications.length, - canDelete, - canEdit, - }, + applications, + appCount: applications.length, + canDelete, + canEdit, + }, + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred searching for the specified workflow', + }); + } +}; + +// POST api/v1/workflows +const createWorkflow = async (req, res) => { + try { + const { _id: userId, firstname, lastname } = req.user; + // 1. Look at the payload for the publisher passed + const { workflowName = '', publisher = '', steps = [] } = req.body; + if (_.isEmpty(workflowName.trim()) || _.isEmpty(publisher.trim()) || _.isEmpty(steps)) { + return res.status(400).json({ + success: false, + message: 'You must supply a workflow name, publisher, and at least one step definition to create a workflow', }); - } catch (err) { - console.error(err.message); - return res.status(500).json({ + } + // 2. Look up publisher and team + const publisherObj = await PublisherModel.findOne({ + _id: publisher, + }).populate({ + path: 'team members', + populate: { + path: 'users', + select: '_id id email firstname lastname', + }, + }); + + if (!publisherObj) { + return res.status(400).json({ success: false, - message: 'An error occurred searching for the specified workflow', + message: 'You must supply a valid publisher to create the workflow against', }); } - }; + // 3. Check the requesting user is a manager of the custodian team + let authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, publisherObj.team.toObject(), userId); - // POST api/v1/workflows - const createWorkflow = async (req, res) => { - try { - const { _id: userId } = req.user; - // 1. Look at the payload for the publisher passed - const { workflowName = '', publisher = '', steps = [] } = req.body; - if ( - _.isEmpty(workflowName.trim()) || - _.isEmpty(publisher.trim()) || - _.isEmpty(steps) - ) { - return res.status(400).json({ - success: false, - message: - 'You must supply a workflow name, publisher, and at least one step definition to create a workflow', - }); - } - // 2. Look up publisher and team - const publisherObj = await PublisherModel.findOne({ - _id: publisher, - }).populate('team', 'members'); - if (!publisherObj) { + // 4. Refuse access if not authorised + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 5. Create new workflow model + const id = helper.generatedNumericId(); + // 6. set workflow obj for saving + let workflow = new WorkflowModel({ + id, + workflowName, + publisher, + steps, + createdBy: new mongoose.Types.ObjectId(userId), + }); + // 7. save new workflow to db + workflow.save(function (err) { + if (err) { return res.status(400).json({ success: false, - message: - 'You must supply a valid publisher to create the workflow against', + message: err.message, }); } - // 3. Check the requesting user is a manager of the custodian team - let authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - publisherObj.team.toObject(), - userId + // 8. populate the workflow with the needed fiedls for our new notification and email + workflow.populate( + { + path: 'steps.reviewers', + select: 'firstname lastname email -_id', + }, + (err, doc) => { + if (err) { + // 9. if issue + return res.status(400).json({ + success: false, + message: err.message, + }); + } + // 10. set context + let context = { + publisherObj: publisherObj.team.toObject(), + actioner: `${firstname} ${lastname}`, + workflow: doc.toObject(), + }; + // 11. Generate new notifications / emails for managers of the team only on creation of a workflow + createNotifications(context, constants.notificationTypes.WORKFLOWCREATED); + // 12. full complete return + return res.status(201).json({ + success: true, + workflow, + }); + } ); + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred creating the workflow', + }); + } +}; - // 4. Refuse access if not authorised - if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); - } - // 5. Create new workflow model - const id = helper.generatedNumericId(); - let workflow = new WorkflowModel({ - id, - workflowName, - publisher, - steps, - createdBy: new mongoose.Types.ObjectId(userId), +// PUT api/v1/workflows/:id +const updateWorkflow = async (req, res) => { + try { + const { _id: userId } = req.user; + const { id: workflowId } = req.params; + // 1. Look up workflow + let workflow = await WorkflowModel.findOne({ + _id: req.params.id, + }).populate({ + path: 'publisher steps.reviewers', + select: 'team', + populate: { + path: 'team', + select: 'members -_id', + }, + }); + if (!workflow) { + return res.status(404).json({ success: false }); + } + // 2. Check the requesting user is a manager of the custodian team + let authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, workflow.publisher.team.toObject(), userId); + // 3. Refuse access if not authorised + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 4. Ensure there are no in-review DARs with this workflow + const applications = await DataRequestModel.countDocuments({ + workflowId, + applicationStatus: 'inReview', + }); + if (applications > 0) { + return res.status(400).json({ + success: false, + message: 'A workflow which is attached to applications currently in review cannot be edited', }); - // 6. Submit save - workflow.save(function (err) { + } + // 5. Edit workflow + const { workflowName = '', steps = [] } = req.body; + let isDirty = false; + // Check if workflow name updated + if (!_.isEmpty(workflowName)) { + workflow.workflowName = workflowName; + isDirty = true; + } // Check if steps updated + if (!_.isEmpty(steps)) { + workflow.steps = steps; + isDirty = true; + } // Perform save if changes have been made + if (isDirty) { + workflow.save(async err => { if (err) { console.error(err); return res.status(400).json({ @@ -155,544 +236,467 @@ import mongoose from 'mongoose'; }); } else { // 7. Return workflow payload - return res.status(201).json({ + return res.status(204).json({ success: true, workflow, }); } }); - } catch (err) { - console.error(err.message); - return res.status(500).json({ - success: false, - message: 'An error occurred creating the workflow', + } else { + return res.status(200).json({ + success: true, }); } - }; + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred editing the workflow', + }); + } +}; - // PUT api/v1/workflows/:id - const updateWorkflow = async (req, res) => { - try { - const { _id: userId } = req.user; - const { id: workflowId } = req.params; - // 1. Look up workflow - let workflow = await WorkflowModel.findOne({ - _id: req.params.id, - }).populate({ - path: 'publisher steps.reviewers', - select: 'team', - populate: { - path: 'team', - select: 'members -_id', - }, - }); - if (!workflow) { - return res.status(404).json({ success: false }); - } - // 2. Check the requesting user is a manager of the custodian team - let authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - workflow.publisher.team.toObject(), - userId - ); - // 3. Refuse access if not authorised - if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); - } - // 4. Ensure there are no in-review DARs with this workflow - const applications = await DataRequestModel.countDocuments({ - workflowId, - applicationStatus: 'inReview', +// DELETE api/v1/workflows/:id +const deleteWorkflow = async (req, res) => { + try { + const { _id: userId } = req.user; + const { id: workflowId } = req.params; + // 1. Look up workflow + const workflow = await WorkflowModel.findOne({ + _id: req.params.id, + }).populate({ + path: 'publisher steps.reviewers', + select: 'team', + populate: { + path: 'team', + select: 'members -_id', + }, + }); + if (!workflow) { + return res.status(404).json({ success: false }); + } + // 2. Check the requesting user is a manager of the custodian team + let authorised = teamController.checkTeamPermissions(constants.roleTypes.MANAGER, workflow.publisher.team.toObject(), userId); + // 3. Refuse access if not authorised + if (!authorised) { + return res.status(401).json({ status: 'failure', message: 'Unauthorised' }); + } + // 4. Ensure there are no in-review DARs with this workflow + const applications = await DataRequestModel.countDocuments({ + workflowId, + applicationStatus: 'inReview', + }); + if (applications > 0) { + return res.status(400).json({ + success: false, + message: 'A workflow which is attached to applications currently in review cannot be deleted', }); - if (applications > 0) { + } + // 5. Delete workflow + WorkflowModel.deleteOne({ _id: workflowId }, function (err) { + if (err) { + console.error(err); return res.status(400).json({ success: false, - message: - 'A workflow which is attached to applications currently in review cannot be edited', - }); - } - // 5. Edit workflow - const { workflowName = '', steps = [] } = req.body; - let isDirty = false; - // Check if workflow name updated - if (!_.isEmpty(workflowName)) { - workflow.workflowName = workflowName; - isDirty = true; - } // Check if steps updated - if (!_.isEmpty(steps)) { - workflow.steps = steps; - isDirty = true; - } // Perform save if changes have been made - if (isDirty) { - workflow.save(async (err) => { - if (err) { - console.error(err); - return res.status(400).json({ - success: false, - message: err.message, - }); - } else { - // 7. Return workflow payload - return res.status(204).json({ - success: true, - workflow - }); - } + message: 'An error occurred deleting the workflow', }); } else { - return res.status(200).json({ - success: true + // 7. Return workflow payload + return res.status(204).json({ + success: true, }); } - } catch (err) { - console.error(err.message); - return res.status(500).json({ - success: false, - message: 'An error occurred editing the workflow', - }); + }); + } catch (err) { + console.error(err.message); + return res.status(500).json({ + success: false, + message: 'An error occurred deleting the workflow', + }); + } +}; + +const createNotifications = async (context, type = '') => { + if (!_.isEmpty(type)) { + // local variables set here + let custodianManagers = [], + managerUserIds = [], + options = {}, + html = ''; + + // deconstruct context + let { publisherObj, workflow = {}, actioner = '' } = context; + + // switch over types + switch (type) { + case constants.notificationTypes.WORKFLOWCREATED: + // 1. Get managers for publisher + custodianManagers = teamController.getTeamMembersByRole(publisherObj, constants.roleTypes.MANAGER); + // 2. Get managerIds for notifications + managerUserIds = custodianManagers.map(user => user.id); + // 3. deconstruct workflow + let { workflowName = 'Workflow Title', _id, steps, createdAt } = workflow; + // 4. setup options + options = { + actioner, + workflowName, + _id, + steps, + createdAt, + }; + // 4. Create notifications for the managers only + await notificationBuilder.triggerNotificationMessage( + managerUserIds, + `A new workflow of ${workflowName} has been created`, + 'workflow', + _id + ); + // 5. Generate the email + html = await emailGenerator.generateWorkflowCreated(options); + // 6. Send email to custodian managers only within the team + await emailGenerator.sendEmail(custodianManagers, constants.hdrukEmail, `A Workflow has been created`, html, false); + break; } - }; + } +}; - // DELETE api/v1/workflows/:id - const deleteWorkflow = async (req, res) => { - try { - const { _id: userId } = req.user; - const { id: workflowId } = req.params; - // 1. Look up workflow - const workflow = await WorkflowModel.findOne({ - _id: req.params.id, - }).populate({ - path: 'publisher steps.reviewers', - select: 'team', - populate: { - path: 'team', - select: 'members -_id', - }, - }); - if (!workflow) { - return res.status(404).json({ success: false }); - } - // 2. Check the requesting user is a manager of the custodian team - let authorised = teamController.checkTeamPermissions( - constants.roleTypes.MANAGER, - workflow.publisher.team.toObject(), - userId - ); - // 3. Refuse access if not authorised - if (!authorised) { - return res - .status(401) - .json({ status: 'failure', message: 'Unauthorised' }); - } - // 4. Ensure there are no in-review DARs with this workflow - const applications = await DataRequestModel.countDocuments({ - workflowId, - applicationStatus: 'inReview', - }); - if (applications > 0) { - return res.status(400).json({ - success: false, - message: - 'A workflow which is attached to applications currently in review cannot be deleted', - }); +const calculateStepDeadlineReminderDate = step => { + // Extract deadline and reminder offset in days from step definition + let { deadline, reminderOffset } = step; + // Subtract SLA reminder offset + let reminderPeriod = +deadline - +reminderOffset; + return `P${reminderPeriod}D`; +}; + +const workflowStepContainsManager = (reviewers, team) => { + let managerExists = false; + // 1. Extract team members + let { members } = team; + // 2. Iterate through each reviewer to check if they are a manager of the team + reviewers.forEach(reviewer => { + // 3. Find the current user + let userMember = members.find(member => member.memberid.toString() === reviewer.toString()); + // 4. If the user was found check if they are a manager + if (userMember) { + let { roles } = userMember; + if (roles.includes(constants.roleTypes.MANAGER)) { + managerExists = true; } - // 5. Delete workflow - WorkflowModel.deleteOne({ _id: workflowId }, function (err) { - if (err) { - console.error(err); - return res.status(400).json({ - success: false, - message: 'An error occurred deleting the workflow', - }); - } else { - // 7. Return workflow payload - return res.status(204).json({ - success: true, - }); - } - }); - } catch (err) { - console.error(err.message); - return res.status(500).json({ - success: false, - message: 'An error occurred deleting the workflow', - }); } - }; + }); + return managerExists; +}; - const calculateStepDeadlineReminderDate = (step) => { - // Extract deadline and reminder offset in days from step definition - let { deadline, reminderOffset } = step; - // Subtract SLA reminder offset - let reminderPeriod = +deadline - +reminderOffset; - return `P${reminderPeriod}D`; +const buildNextStep = (userId, application, activeStepIndex, override) => { + // Check the current position of the application within its assigned workflow + const finalStep = activeStepIndex === application.workflow.steps.length - 1; + const requiredReviews = application.workflow.steps[activeStepIndex].reviewers.length; + const completedReviews = application.workflow.steps[activeStepIndex].recommendations.length; + const stepComplete = completedReviews === requiredReviews; + // Establish base payload for Camunda + // (1) phaseApproved is passed as true when the manager is overriding the current step/phase + // this short circuits the review process in the workflow and closes any remaining user tasks + // i.e. reviewers within the active step OR when the last reviewer in the step submits a vote + // (2) managerApproved is passed as true when the manager is approving the entire application + // from any point within the review process + // (3) finalPhaseApproved is passed as true when the final step is completed naturally through all + // reviewers casting their votes + let bpmContext = { + businessKey: application._id, + dataRequestUserId: userId.toString(), + managerApproved: override, + phaseApproved: (override && !finalStep) || stepComplete, + finalPhaseApproved: finalStep, + stepComplete, }; + if (!finalStep) { + // Extract the information for the next step defintion + let { name: dataRequestPublisher } = application.publisherObj; + let nextStep = application.workflow.steps[activeStepIndex + 1]; + let reviewerList = nextStep.reviewers.map(reviewer => reviewer._id.toString()); + let { stepName: dataRequestStepName } = nextStep; + // Update Camunda payload with the next step information + bpmContext = { + ...bpmContext, + dataRequestPublisher, + dataRequestStepName, + notifyReviewerSLA: calculateStepDeadlineReminderDate(nextStep), + reviewerList, + }; + } + return bpmContext; +}; - const workflowStepContainsManager = (reviewers, team) => { - let managerExists = false; - // 1. Extract team members - let { members } = team; - // 2. Iterate through each reviewer to check if they are a manager of the team - reviewers.forEach(reviewer => { - // 3. Find the current user - let userMember = members.find( - (member) => member.memberid.toString() === reviewer.toString() - ); - // 4. If the user was found check if they are a manager - if (userMember) { - let { roles } = userMember; - if (roles.includes(constants.roleTypes.MANAGER)) { - managerExists = true; - } - } - }) - return managerExists; - }; +const getWorkflowCompleted = (workflow = {}) => { + let workflowCompleted = false; + if (!_.isEmpty(workflow)) { + let { steps } = workflow; + workflowCompleted = steps.every(step => step.completed); + } + return workflowCompleted; +}; - const buildNextStep = (userId, application, activeStepIndex, override) => { - // Check the current position of the application within its assigned workflow - const finalStep = activeStepIndex === application.workflow.steps.length -1; - const requiredReviews = application.workflow.steps[activeStepIndex].reviewers.length; - const completedReviews = application.workflow.steps[activeStepIndex].recommendations.length; - const stepComplete = completedReviews === requiredReviews; - // Establish base payload for Camunda - // (1) phaseApproved is passed as true when the manager is overriding the current step/phase - // this short circuits the review process in the workflow and closes any remaining user tasks - // i.e. reviewers within the active step OR when the last reviewer in the step submits a vote - // (2) managerApproved is passed as true when the manager is approving the entire application - // from any point within the review process - // (3) finalPhaseApproved is passed as true when the final step is completed naturally through all - // reviewers casting their votes - let bpmContext = { - businessKey: application._id, - dataRequestUserId: userId.toString(), - managerApproved: override, - phaseApproved: (override && !finalStep) || stepComplete, - finalPhaseApproved: finalStep, - stepComplete - } - if(!finalStep) { - // Extract the information for the next step defintion - let { name: dataRequestPublisher } = application.publisherObj; - let nextStep = application.workflow.steps[activeStepIndex+1]; - let reviewerList = nextStep.reviewers.map((reviewer) => reviewer._id.toString()); - let { stepName: dataRequestStepName } = nextStep; - // Update Camunda payload with the next step information - bpmContext = { - ...bpmContext, - dataRequestPublisher, - dataRequestStepName, - notifyReviewerSLA: calculateStepDeadlineReminderDate( - nextStep - ), - reviewerList - }; - } - return bpmContext; - }; +const getActiveWorkflowStep = (workflow = {}) => { + let activeStep = {}; + if (!_.isEmpty(workflow)) { + let { steps } = workflow; + activeStep = steps.find(step => { + return step.active; + }); + } + return activeStep; +}; - const getWorkflowCompleted = (workflow = {}) => { - let workflowCompleted = false; - if (!_.isEmpty(workflow)) { - let { steps } = workflow; - workflowCompleted = steps.every((step) => step.completed); +const getStepReviewers = (step = {}) => { + let stepReviewers = []; + // Attempt to get step reviewers if workflow passed + if (!_.isEmpty(step)) { + // Get active reviewers + if (step) { + ({ reviewers: stepReviewers } = step); } - return workflowCompleted; - }; + } + return stepReviewers; +}; - const getActiveWorkflowStep = (workflow = {}) => { - let activeStep = {}; - if (!_.isEmpty(workflow)) { - let { steps } = workflow; - activeStep = steps.find((step) => { - return step.active; - }); - } - return activeStep; - }; +const getRemainingReviewers = (Step = {}, users) => { + let { reviewers = [], recommendations = [] } = Step; + let remainingActioners = reviewers.filter(reviewer => !recommendations.some(rec => rec.reviewer.toString() === reviewer._id.toString())); + remainingActioners = [...users].filter(user => remainingActioners.some(actioner => actioner._id.toString() === user._id.toString())); - const getStepReviewers = (step = {}) => { - let stepReviewers = []; - // Attempt to get step reviewers if workflow passed - if (!_.isEmpty(step)) { - // Get active reviewers - if(step) { - ({ reviewers: stepReviewers } = step); - } - } - return stepReviewers; - }; + return remainingActioners; +}; - const getRemainingReviewers = (Step = {}, users) => { - let { reviewers = [], recommendations = []} = Step; - let remainingActioners = reviewers.filter( - (reviewer) => - !recommendations.some( - (rec) => rec.reviewer.toString() === reviewer._id.toString() - ) - ); - remainingActioners = [...users] - .filter((user) => - remainingActioners.some( - (actioner) => actioner._id.toString() === user._id.toString() - ) - ); +const getActiveStepStatus = (activeStep, users = [], userId = '') => { + let reviewStatus = '', + deadlinePassed = false, + remainingActioners = [], + decisionMade = false, + decisionComments = '', + decisionApproved = false, + decisionDate = '', + decisionStatus = ''; + let { stepName, deadline, startDateTime, reviewers = [], recommendations = [], sections = [] } = activeStep; + let deadlineDate = moment(startDateTime).add(deadline, 'days'); + let diff = parseInt(deadlineDate.diff(new Date(), 'days')); + if (diff > 0) { + reviewStatus = `Deadline in ${diff} days`; + } else if (diff < 0) { + reviewStatus = `Deadline was ${Math.abs(diff)} days ago`; + deadlinePassed = true; + } else { + reviewStatus = `Deadline is today`; + } + remainingActioners = reviewers.filter(reviewer => !recommendations.some(rec => rec.reviewer.toString() === reviewer._id.toString())); + remainingActioners = users + .filter(user => remainingActioners.some(actioner => actioner._id.toString() === user._id.toString())) + .map(user => { + let isCurrentUser = user._id.toString() === userId.toString(); + return `${user.firstname} ${user.lastname}${isCurrentUser ? ` (you)` : ``}`; + }); + + let isReviewer = reviewers.some(reviewer => reviewer._id.toString() === userId.toString()); + let hasRecommended = recommendations.some(rec => rec.reviewer.toString() === userId.toString()); + + decisionMade = isReviewer && hasRecommended; - return remainingActioners; + if (decisionMade) { + decisionStatus = 'Decision made for this phase'; + } else if (isReviewer) { + decisionStatus = 'Decision required'; + } else { + decisionStatus = ''; } - const getActiveStepStatus = (activeStep, users = [], userId = '') => { - let reviewStatus = '', - deadlinePassed = false, - remainingActioners = [], - decisionMade = false, - decisionComments = '', - decisionApproved = false, - decisionDate = '', - decisionStatus = ''; - let { - stepName, - deadline, - startDateTime, - reviewers = [], - recommendations = [], - sections = [], - } = activeStep; - let deadlineDate = moment(startDateTime).add(deadline, 'days'); - let diff = parseInt(deadlineDate.diff(new Date(), 'days')); - if (diff > 0) { - reviewStatus = `Deadline in ${diff} days`; - } else if (diff < 0) { - reviewStatus = `Deadline was ${Math.abs(diff)} days ago`; - deadlinePassed = true; - } else { - reviewStatus = `Deadline is today`; - } - remainingActioners = reviewers.filter( - (reviewer) => - !recommendations.some( - (rec) => rec.reviewer.toString() === reviewer._id.toString() - ) - ); - remainingActioners = users - .filter((user) => - remainingActioners.some( - (actioner) => actioner._id.toString() === user._id.toString() - ) - ) - .map((user) => { - let isCurrentUser = user._id.toString() === userId.toString(); - return `${user.firstname} ${user.lastname}${isCurrentUser ? ` (you)`:``}`; - }); - - let isReviewer = reviewers.some( - (reviewer) => reviewer._id.toString() === userId.toString() - ); - let hasRecommended = recommendations.some( - (rec) => rec.reviewer.toString() === userId.toString() - ); - - decisionMade = isReviewer && hasRecommended; - - if (decisionMade) { - decisionStatus = 'Decision made for this phase'; - } else if (isReviewer) { - decisionStatus = 'Decision required'; - } else { - decisionStatus = ''; - } - - if (hasRecommended) { - let recommendation = recommendations.find( - (rec) => rec.reviewer.toString() === userId.toString() - ); - ({ - comments: decisionComments, - approved: decisionApproved, - createdDate: decisionDate, - } = recommendation); - } - - let reviewPanels = sections - .map((section) => constants.darPanelMapper[section]) - .join(', '); - - return { - stepName, - remainingActioners: remainingActioners.join(', '), - deadlinePassed, - isReviewer, - reviewStatus, - decisionMade, - decisionApproved, - decisionDate, - decisionStatus, - decisionComments, - reviewPanels, - }; + if (hasRecommended) { + let recommendation = recommendations.find(rec => rec.reviewer.toString() === userId.toString()); + ({ comments: decisionComments, approved: decisionApproved, createdDate: decisionDate } = recommendation); + } + + let reviewPanels = sections.map(section => constants.darPanelMapper[section]).join(', '); + + return { + stepName, + remainingActioners: remainingActioners.join(', '), + deadlinePassed, + isReviewer, + reviewStatus, + decisionMade, + decisionApproved, + decisionDate, + decisionStatus, + decisionComments, + reviewPanels, }; - - const getWorkflowStatus = (application) => { - let workflowStatus = {}; - let { workflow = {} } = application; - if (!_.isEmpty(workflow)) { - let { workflowName, steps } = workflow; - // Find the active step in steps - let activeStep = getActiveWorkflowStep(workflow); - let activeStepIndex = steps.findIndex((step) => { - return step.active === true; - }); - if (activeStep) { - let { - reviewStatus, - deadlinePassed, - } = getActiveStepStatus(activeStep); - //Update active step with review status - steps[activeStepIndex] = { - ...steps[activeStepIndex], - reviewStatus, - deadlinePassed, - }; - } - //Update steps with user friendly review sections - let formattedSteps = [...steps].reduce((arr, item) => { - let step = { - ...item, - sections: [...item.sections].map( - (section) => constants.darPanelMapper[section] - ), - }; - arr.push(step); - return arr; - }, []); - - workflowStatus = { - workflowName, - steps: formattedSteps, - isCompleted: getWorkflowCompleted(workflow), +}; + +const getWorkflowStatus = application => { + let workflowStatus = {}; + let { workflow = {} } = application; + if (!_.isEmpty(workflow)) { + let { workflowName, steps } = workflow; + // Find the active step in steps + let activeStep = getActiveWorkflowStep(workflow); + let activeStepIndex = steps.findIndex(step => { + return step.active === true; + }); + if (activeStep) { + let { reviewStatus, deadlinePassed } = getActiveStepStatus(activeStep); + //Update active step with review status + steps[activeStepIndex] = { + ...steps[activeStepIndex], + reviewStatus, + deadlinePassed, }; } - return workflowStatus; - }; + //Update steps with user friendly review sections + let formattedSteps = [...steps].reduce((arr, item) => { + let step = { + ...item, + sections: [...item.sections].map(section => constants.darPanelMapper[section]), + }; + arr.push(step); + return arr; + }, []); - const getReviewStatus = (application, userId) => { - let inReviewMode = false, - reviewSections = [], - isActiveStepReviewer = false, - hasRecommended = false; - // Get current application status - let { applicationStatus } = application; - // Check if the current user is a reviewer on the current step of an attached workflow - let { workflow = {} } = application; - if (!_.isEmpty(workflow)) { - let { steps } = workflow; - let activeStep = steps.find((step) => { - return step.active === true; - }); - if (activeStep) { - isActiveStepReviewer = activeStep.reviewers.some( - (reviewer) => reviewer._id.toString() === userId.toString() - ); - reviewSections = [...activeStep.sections]; - - let { recommendations = [] } = activeStep; - if (!_.isEmpty(recommendations)) { - hasRecommended = recommendations.some( - (rec) => rec.reviewer.toString() === userId.toString() - ); - } + workflowStatus = { + workflowName, + steps: formattedSteps, + isCompleted: getWorkflowCompleted(workflow), + }; + } + return workflowStatus; +}; + +const getReviewStatus = (application, userId) => { + let inReviewMode = false, + reviewSections = [], + isActiveStepReviewer = false, + hasRecommended = false; + // Get current application status + let { applicationStatus } = application; + // Check if the current user is a reviewer on the current step of an attached workflow + let { workflow = {} } = application; + if (!_.isEmpty(workflow)) { + let { steps } = workflow; + let activeStep = steps.find(step => { + return step.active === true; + }); + if (activeStep) { + isActiveStepReviewer = activeStep.reviewers.some(reviewer => reviewer._id.toString() === userId.toString()); + reviewSections = [...activeStep.sections]; + + let { recommendations = [] } = activeStep; + if (!_.isEmpty(recommendations)) { + hasRecommended = recommendations.some(rec => rec.reviewer.toString() === userId.toString()); } } - // Return active review mode if conditions apply - if (applicationStatus === 'inReview' && isActiveStepReviewer) { - inReviewMode = true; + } + // Return active review mode if conditions apply + if (applicationStatus === 'inReview' && isActiveStepReviewer) { + inReviewMode = true; + } + + return { inReviewMode, reviewSections, hasRecommended }; +}; + +const getWorkflowEmailContext = (accessRecord, workflow, relatedStepIndex) => { + // Extract workflow email variables + const { dateReviewStart = '' } = accessRecord; + const { workflowName, steps } = workflow; + const { stepName, startDateTime = '', endDateTime = '', completed = false, deadline: stepDeadline = 0, reminderOffset = 0 } = steps[ + relatedStepIndex + ]; + const stepReviewers = getStepReviewers(steps[relatedStepIndex]); + const reviewerNames = [...stepReviewers].map(reviewer => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); + const reviewSections = [...steps[relatedStepIndex].sections].map(section => constants.darPanelMapper[section]).join(', '); + const stepReviewerUserIds = [...stepReviewers].map(user => user.id); + const currentDeadline = stepDeadline === 0 ? 'No deadline specified' : moment().add(stepDeadline, 'days'); + let nextStepName = '', + nextReviewerNames = '', + nextReviewSections = '', + duration = '', + totalDuration = '', + nextDeadline = '', + dateDeadline = '', + deadlineElapsed = false, + deadlineApproaching = false, + remainingReviewers = [], + remainingReviewerUserIds = []; + + // Calculate duration for step if it is completed + if (completed) { + if (!_.isEmpty(startDateTime.toString()) && !_.isEmpty(endDateTime.toString())) { + duration = moment(endDateTime).diff(moment(startDateTime), 'days'); + duration = duration === 0 ? `Same day` : duration === 1 ? `1 day` : `${duration} days`; } - - return { inReviewMode, reviewSections, hasRecommended }; - }; - - const getWorkflowEmailContext = (accessRecord, workflow, relatedStepIndex) => { - // Extract workflow email variables - const { dateReviewStart = '' } = accessRecord; - const { workflowName, steps } = workflow; - const { stepName, startDateTime = '', endDateTime = '', completed = false, deadline: stepDeadline = 0, reminderOffset = 0 } = steps[relatedStepIndex]; - const stepReviewers = getStepReviewers(steps[relatedStepIndex]); - const reviewerNames = [...stepReviewers].map((reviewer) => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); - const reviewSections = [...steps[relatedStepIndex].sections].map((section) => constants.darPanelMapper[section]).join(', '); - const stepReviewerUserIds = [...stepReviewers].map((user) => user.id); - const currentDeadline = stepDeadline === 0 ? 'No deadline specified' : moment().add(stepDeadline, 'days'); - let nextStepName = '', nextReviewerNames = '', nextReviewSections = '', duration = '', totalDuration = '', nextDeadline = '', dateDeadline = '', deadlineElapsed = false, deadlineApproaching = false, remainingReviewers = [], remainingReviewerUserIds = []; - - // Calculate duration for step if it is completed - if(completed) { - if(!_.isEmpty(startDateTime.toString()) && !_.isEmpty(endDateTime.toString())) { - duration = moment(endDateTime).diff(moment(startDateTime), 'days'); - duration = duration === 0 ? `Same day` : duration === 1 ? `1 day` : `${duration} days`; - } - } else { - //If related step is not completed, check if deadline has elapsed or is approaching - if(!_.isEmpty(startDateTime.toString()) && stepDeadline != 0) { - dateDeadline = moment(startDateTime).add(stepDeadline, 'days'); - deadlineElapsed = moment().isAfter(dateDeadline, 'second'); - - // If deadline is not elapsed, check if it is within SLA period - if(!deadlineElapsed && reminderOffset !== 0) { - let deadlineReminderDate = moment(dateDeadline).subtract(reminderOffset, 'days'); - deadlineApproaching = moment().isAfter(deadlineReminderDate, 'second'); - } - } - // Find reviewers of the current incomplete phase - let accessRecordObj = accessRecord.toObject(); - if(_.has(accessRecordObj, 'publisherObj.team.users')){ - let { publisherObj: { team: { users = [] } } } = accessRecordObj; - remainingReviewers = getRemainingReviewers(steps[relatedStepIndex], users); - remainingReviewerUserIds = [...remainingReviewers].map((user) => user.id); + } else { + //If related step is not completed, check if deadline has elapsed or is approaching + if (!_.isEmpty(startDateTime.toString()) && stepDeadline != 0) { + dateDeadline = moment(startDateTime).add(stepDeadline, 'days'); + deadlineElapsed = moment().isAfter(dateDeadline, 'second'); + + // If deadline is not elapsed, check if it is within SLA period + if (!deadlineElapsed && reminderOffset !== 0) { + let deadlineReminderDate = moment(dateDeadline).subtract(reminderOffset, 'days'); + deadlineApproaching = moment().isAfter(deadlineReminderDate, 'second'); } } + // Find reviewers of the current incomplete phase + let accessRecordObj = accessRecord.toObject(); + if (_.has(accessRecordObj, 'publisherObj.team.users')) { + let { + publisherObj: { + team: { users = [] }, + }, + } = accessRecordObj; + remainingReviewers = getRemainingReviewers(steps[relatedStepIndex], users); + remainingReviewerUserIds = [...remainingReviewers].map(user => user.id); + } + } - // Check if there is another step after the current related step - if(relatedStepIndex + 1 === steps.length) { - // If workflow completed - nextStepName = 'No next step'; - // Calculate total duration for workflow - if(steps[relatedStepIndex].completed && !_.isEmpty(dateReviewStart.toString())){ - totalDuration = moment().diff(moment(dateReviewStart), 'days'); - totalDuration = totalDuration === 0 ? `Same day` : duration === 1 ? `1 day` : `${duration} days`; - } - } else { - // Get details of next step if this is not the final step - ({ stepName: nextStepName } = steps[relatedStepIndex + 1]); - let nextStepReviewers = getStepReviewers(steps[relatedStepIndex + 1]); - nextReviewerNames = [...nextStepReviewers].map((reviewer) => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); - nextReviewSections = [...steps[relatedStepIndex + 1].sections].map((section) => constants.darPanelMapper[section]).join(', '); - let { deadline = 0 } = steps[relatedStepIndex + 1]; - nextDeadline = deadline === 0 ? 'No deadline specified' : moment().add(deadline, 'days'); + // Check if there is another step after the current related step + if (relatedStepIndex + 1 === steps.length) { + // If workflow completed + nextStepName = 'No next step'; + // Calculate total duration for workflow + if (steps[relatedStepIndex].completed && !_.isEmpty(dateReviewStart.toString())) { + totalDuration = moment().diff(moment(dateReviewStart), 'days'); + totalDuration = totalDuration === 0 ? `Same day` : duration === 1 ? `1 day` : `${duration} days`; } - return { - workflowName, - stepName, - startDateTime, - endDateTime, - stepReviewers, - duration, - totalDuration, - reviewerNames, - stepReviewerUserIds, - reviewSections, - currentDeadline, - nextStepName, - nextReviewerNames, - nextReviewSections, - nextDeadline, - dateDeadline, - deadlineElapsed, - deadlineApproaching, - remainingReviewers, - remainingReviewerUserIds - }; + } else { + // Get details of next step if this is not the final step + ({ stepName: nextStepName } = steps[relatedStepIndex + 1]); + let nextStepReviewers = getStepReviewers(steps[relatedStepIndex + 1]); + nextReviewerNames = [...nextStepReviewers].map(reviewer => `${reviewer.firstname} ${reviewer.lastname}`).join(', '); + nextReviewSections = [...steps[relatedStepIndex + 1].sections].map(section => constants.darPanelMapper[section]).join(', '); + let { deadline = 0 } = steps[relatedStepIndex + 1]; + nextDeadline = deadline === 0 ? 'No deadline specified' : moment().add(deadline, 'days'); + } + return { + workflowName, + steps, + stepName, + startDateTime, + endDateTime, + stepReviewers, + duration, + totalDuration, + reviewerNames, + stepReviewerUserIds, + reviewSections, + currentDeadline, + nextStepName, + nextReviewerNames, + nextReviewSections, + nextDeadline, + dateDeadline, + deadlineElapsed, + deadlineApproaching, + remainingReviewers, + remainingReviewerUserIds, }; +}; export default { getWorkflowById: getWorkflowById, @@ -708,5 +712,6 @@ export default { getActiveStepStatus: getActiveStepStatus, getWorkflowStatus: getWorkflowStatus, getReviewStatus: getReviewStatus, - getWorkflowEmailContext: getWorkflowEmailContext -}; \ No newline at end of file + getWorkflowEmailContext: getWorkflowEmailContext, + createNotifications: createNotifications, +}; diff --git a/src/resources/workflow/workflow.model.js b/src/resources/workflow/workflow.model.js index 42434181..eb1c8f1a 100644 --- a/src/resources/workflow/workflow.model.js +++ b/src/resources/workflow/workflow.model.js @@ -1,58 +1,66 @@ import { model, Schema } from 'mongoose'; -export const minReviewers = (val) => { - return val.length > 0; -} +export const minReviewers = val => { + return val.length > 0; +}; -export const minSteps = (val) => { - return val.length > 0; -} +export const minSteps = val => { + return val.length > 0; +}; -export const minSections = (val) => { - return val.length > 0; -} +export const minSections = val => { + return val.length > 0; +}; const StepSchema = new Schema({ - stepName: { type: String, required: true }, - reviewers: { type: [{ type : Schema.Types.ObjectId, ref: 'User' }], validate:[minReviewers, 'There must be at least one reviewer per phase'] }, - sections: { type: [String], validate:[minSections, 'There must be at least one section assigned to a phase'] }, - deadline: { type: Number, required: true }, // Number of days from step starting that a deadline is reached - reminderOffset: { type: Number, required: true, default: 3 }, // Number of days before deadline that SLAs are triggered by Camunda - // Items below not required for step definition - active: { type: Boolean, default: false }, - completed: { type: Boolean, default: false }, - startDateTime: { type: Date }, - endDateTime: { type: Date }, - recommendations: [{ - reviewer: { type : Schema.Types.ObjectId, ref: 'User' }, - approved: { type: Boolean }, - comments: { type: String }, - createdDate: { type: Date } - }] + stepName: { type: String, required: true }, + reviewers: { + type: [{ type: Schema.Types.ObjectId, ref: 'User' }], + validate: [minReviewers, 'There must be at least one reviewer per phase'], + }, + sections: { type: [String], validate: [minSections, 'There must be at least one section assigned to a phase'] }, + deadline: { type: Number, required: true }, // Number of days from step starting that a deadline is reached + reminderOffset: { type: Number, required: true, default: 3 }, // Number of days before deadline that SLAs are triggered by Camunda + // Items below not required for step definition + active: { type: Boolean, default: false }, + completed: { type: Boolean, default: false }, + startDateTime: { type: Date }, + endDateTime: { type: Date }, + recommendations: [ + { + reviewer: { type: Schema.Types.ObjectId, ref: 'User' }, + approved: { type: Boolean }, + comments: { type: String }, + createdDate: { type: Date }, + }, + ], }); -export const WorkflowSchema = new Schema({ - id: { type: Number, required: true }, - workflowName: { type: String, required: true }, - version: Number, - publisher: { type : Schema.Types.ObjectId, ref: 'Publisher', required: true }, - steps: { type: [ StepSchema ], validate:[minSteps, 'There must be at least one phase in a workflow']}, - active: { - type: Boolean, - default: true - }, - createdBy: { type : Schema.Types.ObjectId, ref: 'User', required: true }, - updatedBy: { type : Schema.Types.ObjectId, ref: 'User' } -}, { - timestamps: true, - toJSON: { virtuals: true }, - toObject: { virtuals: true } -}); +export const WorkflowSchema = new Schema( + { + id: { type: Number, required: true }, + workflowName: { type: String, required: true }, + version: Number, + publisher: { type: Schema.Types.ObjectId, ref: 'Publisher', required: true }, + steps: { type: [StepSchema], validate: [minSteps, 'There must be at least one phase in a workflow'] }, + active: { + type: Boolean, + default: true, + }, + createdBy: { type: Schema.Types.ObjectId, ref: 'User', required: true }, + updatedBy: { type: Schema.Types.ObjectId, ref: 'User' }, + }, + { + timestamps: true, + toJSON: { virtuals: true }, + toObject: { virtuals: true }, + } +); WorkflowSchema.virtual('applications', { - ref: 'data_request', - foreignField: 'workflowId', - localField: '_id' + ref: 'data_request', + foreignField: 'workflowId', + localField: '_id', }); -export const WorkflowModel = model('Workflow', WorkflowSchema); \ No newline at end of file +export const WorkflowModel = model('Workflow', WorkflowSchema); diff --git a/src/resources/workflow/workflow.route.js b/src/resources/workflow/workflow.route.js index 8af3d8a0..75910fb1 100644 --- a/src/resources/workflow/workflow.route.js +++ b/src/resources/workflow/workflow.route.js @@ -24,4 +24,4 @@ router.put('/:id', passport.authenticate('jwt'), workflowController.updateWorkfl // @access Private router.delete('/:id', passport.authenticate('jwt'), workflowController.deleteWorkflow); -module.exports = router +module.exports = router; diff --git a/test/inputSanitizer.test.js b/test/inputSanitizer.test.js index e3deaea2..ca5f8788 100644 --- a/test/inputSanitizer.test.js +++ b/test/inputSanitizer.test.js @@ -1,30 +1,30 @@ const inputSanitizer = require('../src/resources/utilities/inputSanitizer'); -describe("removes non-breaking spaces", () => { - test("removes non-breaking spaces from a string", () => { - let str = "testsave testsave test"; - let result = inputSanitizer.removeNonBreakingSpaces(str); - expect(result).toBe("testsave testsave test") - }) +describe('removes non-breaking spaces', () => { + test('removes non-breaking spaces from a string', () => { + let str = 'testsave testsave test'; + let result = inputSanitizer.removeNonBreakingSpaces(str); + expect(result).toBe('testsave testsave test'); + }); - test("removes non-breaking spaces from an array of strings", () => { - let str = ["testsave testsave test", " ", "123", "abcd efg", ""]; - let result = inputSanitizer.removeNonBreakingSpaces(str); - let expectedResult = ["testsave testsave test", " ", "123", "abcd efg", ""] - expect(result).toStrictEqual(expectedResult); - }) + test('removes non-breaking spaces from an array of strings', () => { + let str = ['testsave testsave test', ' ', '123', 'abcd efg', '']; + let result = inputSanitizer.removeNonBreakingSpaces(str); + let expectedResult = ['testsave testsave test', ' ', '123', 'abcd efg', '']; + expect(result).toStrictEqual(expectedResult); + }); - test("returns empty array when passed an empty array", () => { - let str = []; - let result = inputSanitizer.removeNonBreakingSpaces(str); - let expectedResult = [] - expect(result).toStrictEqual(expectedResult); - }) - - test("returns empty string when passed an empty string", () => { - let str = ""; - let result = inputSanitizer.removeNonBreakingSpaces(str); - let expectedResult = ""; - expect(result).toStrictEqual(expectedResult); - }) -}) \ No newline at end of file + test('returns empty array when passed an empty array', () => { + let str = []; + let result = inputSanitizer.removeNonBreakingSpaces(str); + let expectedResult = []; + expect(result).toStrictEqual(expectedResult); + }); + + test('returns empty string when passed an empty string', () => { + let str = ''; + let result = inputSanitizer.removeNonBreakingSpaces(str); + let expectedResult = ''; + expect(result).toStrictEqual(expectedResult); + }); +}); diff --git a/test/routes.test.js b/test/routes.test.js index a356e465..f8b4c1ad 100644 --- a/test/routes.test.js +++ b/test/routes.test.js @@ -1,119 +1,99 @@ -const request = require("supertest"); -const testURL = request( process.env.URL || 'https://api.latest.healthdatagateway.org/'); - - -describe("Wake up API", () => { - test("Check the api is alive", async () => { - jest.setTimeout(30000); - const response = await testURL.get("/api/dead"); - expect(response.statusCode).toBe(404); - }); +const request = require('supertest'); +const testURL = request(process.env.URL || 'https://api.latest.healthdatagateway.org/'); + +describe('Wake up API', () => { + test('Check the api is alive', async () => { + jest.setTimeout(30000); + const response = await testURL.get('/api/dead'); + expect(response.statusCode).toBe(404); + }); }, 30000); - -describe("Search API", () => { - test("Search without any parameters should return at least one result", async () => { - const response = await testURL.get("/api/v1/search"); - expect(response.statusCode).toBe(200); - let payload = JSON.parse(response.text); - - expect(payload).toHaveProperty('success'); - expect(payload).toHaveProperty('datasetResults'); - expect(payload['datasetResults'].length).toBeGreaterThanOrEqual(1); - expect(payload).toHaveProperty('summary'); - - }); - - ['covid','CMMID'].forEach(function(searchString) { - - test(`Search for string '${searchString}', first tool result should contain name or description '${searchString}'`, async () => { - const response = await testURL.get('/api/v1/search?search='+searchString); - expect(response.statusCode).toBe(200); - let payload = JSON.parse(response.text); - - expect(payload).toHaveProperty('success'); - expect(payload).toHaveProperty('toolResults'); - expect(payload['toolResults'].length).toBeGreaterThanOrEqual(1); - expect(payload).toHaveProperty('summary'); - - - expect(payload['toolResults'][0]).toHaveProperty('name'); - expect(payload['toolResults'][0]).toHaveProperty('description'); - expect(payload['toolResults'][0]).toHaveProperty('tags'); - - let name = payload['toolResults'][0]['name'].toLowerCase() || ''; - let description = payload['toolResults'][0]['description'].toLowerCase() || ''; - let tags = payload['toolResults'][0]['tags']['topics'].join().toLowerCase() || ''; - let string = searchString.toLowerCase(); - - expect( name.includes(string) || description.includes(string) || tags.includes(string)).toBeTruthy(); - - }); - - }); - - - //add other things to search for here THAT SHOULD NOT RETURN!!! - ['crap','zzz'].forEach(function(searchString) { - - test(`Search for string '${searchString}', nothing should be returned`, async () => { - const response = await testURL.get('/api/v1/search?search='+searchString); - expect(response.statusCode).toBe(200); - let payload = JSON.parse(response.text); - - expect(payload).toHaveProperty('success'); - expect(payload).toHaveProperty('toolResults'); - expect(payload['toolResults'].length).toBe(0); - expect(payload).toHaveProperty('summary'); - - }); - - }); - - ['annual district death daily','cancer','epilepsy'].forEach(function(searchString) { - - test(`Search for string '${searchString}', first dataset result should contain name or description '${searchString}'`, async () => { - const response = await testURL.get('/api/v1/search?search='+searchString); - expect(response.statusCode).toBe(200); - let payload = JSON.parse(response.text); - expect(payload).toHaveProperty('success'); - expect(payload).toHaveProperty('datasetResults'); - expect(payload['datasetResults'].length).toBeGreaterThanOrEqual(1); - expect(payload).toHaveProperty('summary'); - - - expect(payload['datasetResults'][0]).toHaveProperty('name'); - expect(payload['datasetResults'][0]).toHaveProperty('description'); - //expect(payload['datasetResults'][0]).toHaveProperty('keywords');//cant always be expected - - let name = payload['datasetResults'][0]['name'] || ''; - let description = payload['datasetResults'][0]['description'] || ''; - let keywords = payload['datasetResults'][0]['keywords'] || ''; - - let expected = [ - expect.stringMatching(searchString.toLowerCase()), - ]; - - expect([name.toLowerCase(), description.toLowerCase(), keywords.toLowerCase()]).toEqual( - expect.arrayContaining(expected), - ); - }); - - }); - - - test("Search for string 'cancer' dataset limit results to 40, 40 or less results should be returned", async () => { - let searchString = "cancer"; - let maxResults = 40; - - const response = await testURL.get('/api/v1/search?search='+searchString); - expect(response.statusCode).toBe(200); - let payload = JSON.parse(response.text); - - expect(payload).toHaveProperty('success'); - expect(payload).toHaveProperty('datasetResults'); - expect(payload['datasetResults'].length).toBeLessThanOrEqual(maxResults); - expect(payload).toHaveProperty('summary'); - }); - +describe('Search API', () => { + test('Search without any parameters should return at least one result', async () => { + const response = await testURL.get('/api/v1/search'); + expect(response.statusCode).toBe(200); + let payload = JSON.parse(response.text); + + expect(payload).toHaveProperty('success'); + expect(payload).toHaveProperty('datasetResults'); + expect(payload['datasetResults'].length).toBeGreaterThanOrEqual(1); + expect(payload).toHaveProperty('summary'); + }); + + ['covid', 'CMMID'].forEach(function (searchString) { + test(`Search for string '${searchString}', first tool result should contain name or description '${searchString}'`, async () => { + const response = await testURL.get('/api/v1/search?search=' + searchString); + expect(response.statusCode).toBe(200); + let payload = JSON.parse(response.text); + + expect(payload).toHaveProperty('success'); + expect(payload).toHaveProperty('toolResults'); + expect(payload['toolResults'].length).toBeGreaterThanOrEqual(1); + expect(payload).toHaveProperty('summary'); + + expect(payload['toolResults'][0]).toHaveProperty('name'); + expect(payload['toolResults'][0]).toHaveProperty('description'); + expect(payload['toolResults'][0]).toHaveProperty('tags'); + + let name = payload['toolResults'][0]['name'].toLowerCase() || ''; + let description = payload['toolResults'][0]['description'].toLowerCase() || ''; + let tags = payload['toolResults'][0]['tags']['topics'].join().toLowerCase() || ''; + let string = searchString.toLowerCase(); + + expect(name.includes(string) || description.includes(string) || tags.includes(string)).toBeTruthy(); + }); + }); + + //add other things to search for here THAT SHOULD NOT RETURN!!! + ['crap', 'zzz'].forEach(function (searchString) { + test(`Search for string '${searchString}', nothing should be returned`, async () => { + const response = await testURL.get('/api/v1/search?search=' + searchString); + expect(response.statusCode).toBe(200); + let payload = JSON.parse(response.text); + + expect(payload).toHaveProperty('success'); + expect(payload).toHaveProperty('toolResults'); + expect(payload['toolResults'].length).toBe(0); + expect(payload).toHaveProperty('summary'); + }); + }); + + ['annual district death daily', 'cancer', 'epilepsy'].forEach(function (searchString) { + test(`Search for string '${searchString}', first dataset result should contain name or description '${searchString}'`, async () => { + const response = await testURL.get('/api/v1/search?search=' + searchString); + expect(response.statusCode).toBe(200); + let payload = JSON.parse(response.text); + expect(payload).toHaveProperty('success'); + expect(payload).toHaveProperty('datasetResults'); + expect(payload['datasetResults'].length).toBeGreaterThanOrEqual(1); + expect(payload).toHaveProperty('summary'); + + expect(payload['datasetResults'][0]).toHaveProperty('name'); + expect(payload['datasetResults'][0]).toHaveProperty('description'); + //expect(payload['datasetResults'][0]).toHaveProperty('keywords');//cant always be expected + + let name = payload['datasetResults'][0]['name'] || ''; + let description = payload['datasetResults'][0]['description'] || ''; + let keywords = payload['datasetResults'][0]['keywords'] || ''; + + let expected = [expect.stringMatching(searchString.toLowerCase())]; + + expect([name.toLowerCase(), description.toLowerCase(), keywords.toLowerCase()]).toEqual(expect.arrayContaining(expected)); + }); + }); + + test("Search for string 'cancer' dataset limit results to 40, 40 or less results should be returned", async () => { + let searchString = 'cancer'; + let maxResults = 40; + + const response = await testURL.get('/api/v1/search?search=' + searchString); + expect(response.statusCode).toBe(200); + let payload = JSON.parse(response.text); + + expect(payload).toHaveProperty('success'); + expect(payload).toHaveProperty('datasetResults'); + expect(payload['datasetResults'].length).toBeLessThanOrEqual(maxResults); + expect(payload).toHaveProperty('summary'); + }); });