diff --git a/api.planx.uk/editor/copyFlow.ts b/api.planx.uk/editor/copyFlow.ts deleted file mode 100644 index 8f8e703e0c..0000000000 --- a/api.planx.uk/editor/copyFlow.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { Request, Response, NextFunction } from "express"; -import { makeUniqueFlow, getFlowData, insertFlow } from "../helpers"; -import { Flow } from "../types"; -import { userContext } from "../modules/auth/middleware"; - -const copyFlow = async ( - req: Request, - res: Response, - next: NextFunction, -): Promise => { - try { - if (!req.params?.flowId || !req.body?.replaceValue) { - return next({ - status: 400, - message: "Missing required values to proceed", - }); - } - - // Fetch the original flow - const flow: Flow = await getFlowData(req.params.flowId); - - // Generate new flow data which is an exact "content" copy of the original but with unique nodeIds - const uniqueFlowData = makeUniqueFlow(flow.data, req.body.replaceValue); - - // Check if copied flow data should be inserted into `flows` table, or just returned for reference - const shouldInsert = (req.body?.insert as boolean) || false; - if (shouldInsert) { - const newSlug = flow.slug + "-copy"; - const creatorId = userContext.getStore()?.user?.sub; - if (!creatorId) throw Error("User details missing from request"); - - // Insert the flow and an associated operation - await insertFlow( - flow.team_id, - newSlug, - uniqueFlowData, - parseInt(creatorId), - req.params.flowId, - ); - } - - res.status(200).send({ - message: `Successfully copied ${flow.slug}`, - inserted: shouldInsert, - replaceValue: req.body.replaceValue, - data: uniqueFlowData, - }); - } catch (error) { - return next(error); - } -}; - -export { copyFlow }; diff --git a/api.planx.uk/editor/copyPortalAsFlow.ts b/api.planx.uk/editor/copyPortalAsFlow.ts deleted file mode 100644 index c220942123..0000000000 --- a/api.planx.uk/editor/copyPortalAsFlow.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { getFlowData, getChildren, makeUniqueFlow } from "../helpers"; -import { Request, Response, NextFunction } from "express"; -import { Flow } from "../types"; - -/** - * Copies an internal portal and transforms it to be an independent flow - */ -const copyPortalAsFlow = async ( - req: Request, - res: Response, - next: NextFunction, -) => { - try { - // fetch the parent flow data - const flow = await getFlowData(req.params.flowId); - if (!flow) { - return next({ status: 404, message: "Unknown flowId" }); - } - - // confirm that the node id provided is a valid portal - const portalId = req.params.portalNodeId; - if ( - !Object.keys(flow.data).includes(portalId) || - flow.data[portalId]?.type !== 300 - ) { - return next({ status: 404, message: "Unknown portalNodeId" }); - } - - // set the portal node as the new "_root", then extract all its' children from the parent flow and add them to the new flow data object - let portalData: Flow["data"] = { - _root: { edges: flow.data[portalId]?.edges }, - }; - Object.entries(portalData).forEach(([_nodeId, node]) => { - portalData = getChildren(node, flow.data, portalData); - }); - - // to avoid the new flow nodes acting as clones of the original internal portal, rename - // the non-root node ids using the first three alphanumeric characters of the portal name - const replacementCharacters = flow.data[portalId]?.data?.text - ?.replace(/\W/g, "") - ?.slice(0, 3); - portalData = makeUniqueFlow(portalData, replacementCharacters); - - // FUTURE: - // - change GET to POST and write portalData directly to a new flow? - // - assume same team as parent flow and use name of internal portal as slug, or pass in body? - // - update the parent flow to remove the original internal portal and reference this new flow as an external portal? - - res.status(200).send({ - message: `Successfully copied internal portal: ${flow.data[portalId]?.data?.text}`, - data: portalData, - }); - } catch (error) { - return next(error); - } -}; - -export { copyPortalAsFlow }; diff --git a/api.planx.uk/editor/findReplace.ts b/api.planx.uk/editor/findReplace.ts deleted file mode 100644 index a0beaae8eb..0000000000 --- a/api.planx.uk/editor/findReplace.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { Flow } from "./../types"; -import { gql } from "graphql-request"; -import { getFlowData } from "../helpers"; -import { Request, Response, NextFunction } from "express"; -import { getClient } from "../client"; -import { FlowGraph } from "@opensystemslab/planx-core/types"; - -interface MatchResult { - matches: Flow["data"]; - flowData: Flow["data"]; -} - -/** - * Find and return the node ids and specific data properties that match a given search term, - * and return an updated copy of the flow data if a replaceValue is provided, else return the original flowData - */ -const getMatches = ( - flowData: Flow["data"], - searchTerm: string, - replaceValue: string | undefined = undefined, -): MatchResult => { - const matches: MatchResult["matches"] = {}; - - const nodes = Object.keys(flowData).filter((key) => key !== "_root"); - nodes.forEach((node) => { - const data = flowData[node]["data"]; - if (data) { - // search all "data" properties independent of component type (eg `fn`, `val`, `text`) - const keys = Object.keys(data); - keys.forEach((k) => { - // if any value strictly matches the searchTerm, add that node id & key to the matches object - if (data[k] === searchTerm) { - matches[node] = { - data: { - [k]: data[k], - }, - }; - // if a replaceValue is provided, additionally update the flowData - if (replaceValue) { - data[k] = replaceValue; - } - } - }); - } - }); - - return { - matches: matches, - flowData: flowData, - }; -}; - -interface UpdateFlow { - flow: { - id: string; - slug: string; - data: FlowGraph; - updatedAt: string; - }; -} - -/** - * @swagger - * /flows/{flowId}/search: - * post: - * summary: Find and replace - * description: Find and replace a data variable in a flow - * tags: - * - flows - * parameters: - * - in: path - * name: flowId - * type: string - * required: true - * - in: query - * name: find - * type: string - * required: true - * - in: query - * name: replace - * type: string - * required: false - * responses: - * '200': - * description: OK - * content: - * application/json: - * schema: - * type: object - * properties: - * message: - * type: string - * required: true - * matches: - * type: object - * required: true - * additionalProperties: true - * updatedFlow: - * type: object - * required: false - * additionalProperties: true - * properties: - * _root: - * type: object - * properties: - * edges: - * type: array - * items: - * type: string - */ -const findAndReplaceInFlow = async ( - req: Request, - res: Response, - next: NextFunction, -): Promise => { - try { - const flow = await getFlowData(req.params.flowId); - if (!flow) return next({ status: 401, message: "Unknown flowId" }); - - const { find, replace } = req.query as Record; - if (!find) - return next({ - status: 401, - message: `Expected at least one query parameter "find"`, - }); - - if (find && !replace) { - const matches = getMatches(flow.data, find)["matches"]; - - res.json({ - message: `Found ${ - Object.keys(matches).length - } matches of "${find}" in this flow`, - matches: matches, - }); - } - - if (find && replace) { - const { matches, flowData } = getMatches(flow.data, find, replace); - - // if no matches, send message & exit - if (Object.keys(matches).length === 0) { - res.json({ - message: `Didn't find "${find}" in this flow, nothing to replace`, - }); - } - - // if matches, proceed with mutation to update flow data - const { client: $client } = getClient(); - const response = await $client.request( - gql` - mutation UpdateFlow($data: jsonb = {}, $id: uuid!) { - flow: update_flows_by_pk( - pk_columns: { id: $id } - _set: { data: $data } - ) { - id - slug - data - updatedAt: updated_at - } - } - `, - { - data: flowData, - id: req.params.flowId, - }, - ); - - const updatedFlow = response.flow && response.flow.data; - - res.json({ - message: `Found ${ - Object.keys(matches).length - } matches of "${find}" and replaced with "${replace}"`, - matches: matches, - updatedFlow: updatedFlow, - }); - } - } catch (error) { - next(error); - } -}; - -export { findAndReplaceInFlow }; diff --git a/api.planx.uk/editor/moveFlow.ts b/api.planx.uk/editor/moveFlow.ts deleted file mode 100644 index 0135f81f37..0000000000 --- a/api.planx.uk/editor/moveFlow.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { Request, Response, NextFunction } from "express"; -import { gql } from "graphql-request"; -import { Flow, Team } from "../types"; -import { $public, getClient } from "../client"; - -const moveFlow = async ( - req: Request, - res: Response, - next: NextFunction, -): Promise => { - try { - if (!req.params?.flowId || !req.params?.teamSlug) { - return next({ - status: 400, - message: "Missing required values to proceed", - }); - } - - // Translate teamSlug to teamId - const teamId = await getTeamIdBySlug(req.params.teamSlug); - - // If we have a valid teamId, update the flow record - if (teamId) { - await updateFlow(req.params.flowId, teamId); - res.status(200).send({ - message: `Successfully moved flow to ${req.params.teamSlug}`, - }); - } else { - return next({ - status: 400, - message: `Unable to find a team matching slug ${req.params.teamSlug}, exiting move`, - }); - } - } catch (error) { - return next(error); - } -}; - -interface GetTeam { - teams: Pick[]; -} - -const getTeamIdBySlug = async (slug: Team["slug"]): Promise => { - const data = await $public.client.request( - gql` - query GetTeam($slug: String!) { - teams(where: { slug: { _eq: $slug } }) { - id - } - } - `, - { - slug: slug, - }, - ); - - return data?.teams[0].id; -}; - -interface UpdateFlow { - flow: Pick; -} - -const updateFlow = async ( - flowId: Flow["id"], - teamId: Team["id"], -): Promise => { - const { client: $client } = getClient(); - const { flow } = await $client.request( - gql` - mutation UpdateFlow($id: uuid!, $team_id: Int!) { - flow: update_flows_by_pk( - pk_columns: { id: $id } - _set: { team_id: $team_id } - ) { - id - } - } - `, - { - id: flowId, - team_id: teamId, - }, - ); - - return flow.id; -}; - -export { moveFlow }; diff --git a/api.planx.uk/modules/flows/copyFlow/controller.ts b/api.planx.uk/modules/flows/copyFlow/controller.ts new file mode 100644 index 0000000000..c3bfcfc82a --- /dev/null +++ b/api.planx.uk/modules/flows/copyFlow/controller.ts @@ -0,0 +1,54 @@ +import { z } from "zod"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { Flow } from "../../../types"; +import { ServerError } from "../../../errors"; +import { copyFlow } from "./service"; + +interface CopyFlowResponse { + message: string; + inserted: boolean; + replaceValue: string; + data: Flow["data"]; +} + +export const copyFlowSchema = z.object({ + params: z.object({ + flowId: z.string(), + }), + body: z.object({ + replaceValue: z.string().length(5), + insert: z.boolean().optional().default(false), + }), +}); + +export type CopyFlowController = ValidatedRequestHandler< + typeof copyFlowSchema, + CopyFlowResponse +>; + +export const copyFlowController: CopyFlowController = async ( + req, + res, + next, +) => { + try { + const { flowId } = res.locals.parsedReq.params; + const { replaceValue, insert } = res.locals.parsedReq.body; + const { flow, uniqueFlowData } = await copyFlow( + flowId, + replaceValue, + insert, + ); + + res.status(200).send({ + message: `Successfully copied ${flow.slug}`, + inserted: insert, + replaceValue: replaceValue, + data: uniqueFlowData, + }); + } catch (error) { + return next( + new ServerError({ message: "Failed to copy flow", cause: error }), + ); + } +}; diff --git a/api.planx.uk/editor/copyFlow.test.ts b/api.planx.uk/modules/flows/copyFlow/copyFlow.test.ts similarity index 84% rename from api.planx.uk/editor/copyFlow.test.ts rename to api.planx.uk/modules/flows/copyFlow/copyFlow.test.ts index 15c683719f..30bef2e79e 100644 --- a/api.planx.uk/editor/copyFlow.test.ts +++ b/api.planx.uk/modules/flows/copyFlow/copyFlow.test.ts @@ -1,9 +1,9 @@ import supertest from "supertest"; -import { queryMock } from "../tests/graphqlQueryMock"; -import { authHeader } from "../tests/mockJWT"; -import app from "../server"; -import { Flow } from "../types"; +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader } from "../../../tests/mockJWT"; +import app from "../../../server"; +import { Flow } from "../../../types"; beforeEach(() => { queryMock.mockQuery({ @@ -42,7 +42,7 @@ const auth = authHeader({ role: "teamEditor" }); it("returns an error if authorization headers are not set", async () => { const validBody = { insert: false, - replaceValue: "T3ST", + replaceValue: "T3ST1", }; await supertest(app) @@ -59,7 +59,7 @@ it("returns an error if authorization headers are not set", async () => { it("returns an error if the user does not have the correct role", async () => { const validBody = { insert: false, - replaceValue: "T3ST", + replaceValue: "T3ST1", }; await supertest(app) @@ -80,16 +80,15 @@ it("returns an error if required replacement characters are not provided in the .set(auth) .expect(400) .then((res) => { - expect(res.body).toEqual({ - error: "Missing required values to proceed", - }); + expect(res.body).toHaveProperty("issues"); + expect(res.body).toHaveProperty("name", "ZodError"); }); }); it("returns copied unique flow data without inserting a new record", async () => { const body = { insert: false, - replaceValue: "T3ST", + replaceValue: "T3ST1", }; await supertest(app) @@ -105,7 +104,7 @@ it("returns copied unique flow data without inserting a new record", async () => it("inserts copied unique flow data", async () => { const body = { insert: true, - replaceValue: "T3ST", + replaceValue: "T3ST1", }; await supertest(app) @@ -154,28 +153,28 @@ const mockFlowData: Flow["data"] = { // the copied flow data with unique nodeIds using the replaceValue const mockCopiedFlowData: Flow["data"] = { _root: { - edges: ["rUilJQT3ST", "kNX8ReT3ST"], + edges: ["rUilJT3ST1", "kNX8RT3ST1"], }, - rUilJQT3ST: { + rUilJT3ST1: { type: 100, data: { text: "Copy or paste?", }, - edges: ["Yh7t91T3ST", "h8DSw4T3ST"], + edges: ["Yh7t9T3ST1", "h8DSwT3ST1"], }, - Yh7t91T3ST: { + Yh7t9T3ST1: { type: 200, data: { text: "Copy", }, }, - h8DSw4T3ST: { + h8DSwT3ST1: { type: 200, data: { text: "Paste", }, }, - kNX8ReT3ST: { + kNX8RT3ST1: { type: 110, data: { title: "Why do you want to copy this flow?", @@ -187,13 +186,13 @@ const mockCopiedFlowData: Flow["data"] = { const mockCopyFlowResponse = { message: `Successfully copied undefined`, // 'undefined' just reflects that we haven't mocked a flow.name here! inserted: false, - replaceValue: "T3ST", + replaceValue: "T3ST1", data: mockCopiedFlowData, }; const mockCopyFlowResponseInserted = { message: `Successfully copied undefined`, inserted: true, - replaceValue: "T3ST", + replaceValue: "T3ST1", data: mockCopiedFlowData, }; diff --git a/api.planx.uk/modules/flows/copyFlow/service.ts b/api.planx.uk/modules/flows/copyFlow/service.ts new file mode 100644 index 0000000000..d79707388a --- /dev/null +++ b/api.planx.uk/modules/flows/copyFlow/service.ts @@ -0,0 +1,35 @@ +import { makeUniqueFlow, getFlowData, insertFlow } from "../../../helpers"; +import { Flow } from "../../../types"; +import { userContext } from "../../auth/middleware"; + +const copyFlow = async ( + flowId: string, + replaceValue: string, + insert: boolean, +) => { + // Fetch the original flow + const flow: Flow = await getFlowData(flowId); + + // Generate new flow data which is an exact "content" copy of the original but with unique nodeIds + const uniqueFlowData = makeUniqueFlow(flow.data, replaceValue); + + // Check if copied flow data should be inserted into `flows` table, or just returned for reference + if (insert) { + const newSlug = flow.slug + "-copy"; + const creatorId = userContext.getStore()?.user?.sub; + if (!creatorId) throw Error("User details missing from request"); + + // Insert the flow and an associated operation + await insertFlow( + flow.team_id, + newSlug, + uniqueFlowData, + parseInt(creatorId), + flowId, + ); + } + + return { flow, uniqueFlowData }; +}; + +export { copyFlow }; diff --git a/api.planx.uk/modules/flows/copyFlowAsPortal/controller.ts b/api.planx.uk/modules/flows/copyFlowAsPortal/controller.ts new file mode 100644 index 0000000000..be3892ca8f --- /dev/null +++ b/api.planx.uk/modules/flows/copyFlowAsPortal/controller.ts @@ -0,0 +1,44 @@ +import { z } from "zod"; +import { Flow } from "../../../types"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { copyPortalAsFlow } from "./service"; +import { ServerError } from "../../../errors"; + +interface CopyFlowAsPortalResponse { + message: string; + data: Flow["data"]; +} + +export const copyFlowAsPortalSchema = z.object({ + params: z.object({ + flowId: z.string(), + portalNodeId: z.string(), + }), +}); + +export type CopyFlowAsPortalController = ValidatedRequestHandler< + typeof copyFlowAsPortalSchema, + CopyFlowAsPortalResponse +>; + +const copyPortalAsFlowController: CopyFlowAsPortalController = async ( + _req, + res, + next, +) => { + try { + const { flowId, portalNodeId } = res.locals.parsedReq.params; + const { flow, portalData } = await copyPortalAsFlow(flowId, portalNodeId); + + res.status(200).send({ + message: `Successfully copied internal portal: ${flow.data[portalNodeId]?.data?.text}`, + data: portalData, + }); + } catch (error) { + return next( + new ServerError({ message: `Failed to copy flow as portal: ${error}` }), + ); + } +}; + +export { copyPortalAsFlowController }; diff --git a/api.planx.uk/editor/copyPortalAsFlow.test.ts b/api.planx.uk/modules/flows/copyFlowAsPortal/copyPortalAsFlow.test.ts similarity index 88% rename from api.planx.uk/editor/copyPortalAsFlow.test.ts rename to api.planx.uk/modules/flows/copyFlowAsPortal/copyPortalAsFlow.test.ts index a13fe8baa5..7d4868aba4 100644 --- a/api.planx.uk/editor/copyPortalAsFlow.test.ts +++ b/api.planx.uk/modules/flows/copyFlowAsPortal/copyPortalAsFlow.test.ts @@ -1,9 +1,9 @@ import supertest from "supertest"; -import { queryMock } from "../tests/graphqlQueryMock"; -import { authHeader } from "../tests/mockJWT"; -import app from "../server"; -import { Flow } from "../types"; +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader } from "../../../tests/mockJWT"; +import app from "../../../server"; +import { Flow } from "../../../types"; beforeEach(() => { queryMock.mockQuery({ @@ -18,31 +18,30 @@ beforeEach(() => { }); it("requires a user to be logged in", async () => { - await supertest(app).get("/flows/1/copy-portal/eyOm0NyDSl").expect(401); + await supertest(app).put("/flows/1/copy-portal/eyOm0NyDSl").expect(401); }); it("requires a user to have the 'platformAdmin' role", async () => { await supertest(app) - .get("/flows/1/copy-portal/eyOm0NyDSl") + .put("/flows/1/copy-portal/eyOm0NyDSl") .set(authHeader({ role: "teamEditor" })) .expect(403); }); it("throws an error if the portalNodeId parameter is not a portal (type = 300)", async () => { await supertest(app) - .get("/flows/1/copy-portal/eyOm0NyDSl") + .put("/flows/1/copy-portal/eyOm0NyDSl") .set(authHeader({ role: "platformAdmin" })) - .expect(404) + .expect(500) .then((res) => { - expect(res.body).toEqual({ - error: "Unknown portalNodeId", - }); + expect(res.body.error).toMatch(/Failed to copy flow as portal/); + expect(res.body.error).toMatch(/Unknown portalNodeId/); }); }); it("returns transformed, unique flow data for a valid internal portal", async () => { await supertest(app) - .get("/flows/1/copy-portal/MgCe3pSTrt") + .put("/flows/1/copy-portal/MgCe3pSTrt") .set(authHeader({ role: "platformAdmin" })) .expect(200) .then((res) => { diff --git a/api.planx.uk/modules/flows/copyFlowAsPortal/service.ts b/api.planx.uk/modules/flows/copyFlowAsPortal/service.ts new file mode 100644 index 0000000000..118c72bd63 --- /dev/null +++ b/api.planx.uk/modules/flows/copyFlowAsPortal/service.ts @@ -0,0 +1,42 @@ +import { getFlowData, getChildren, makeUniqueFlow } from "../../../helpers"; +import { Flow } from "../../../types"; + +/** + * Copies an internal portal and transforms it to be an independent flow + */ +const copyPortalAsFlow = async (flowId: string, portalNodeId: string) => { + // fetch the parent flow data + const flow = await getFlowData(flowId); + if (!flow) throw Error("Unknown flowId"); + + // confirm that the node id provided is a valid portal + if ( + !Object.keys(flow.data).includes(portalNodeId) || + flow.data[portalNodeId]?.type !== 300 + ) { + throw Error("Unknown portalNodeId"); + } + + // set the portal node as the new "_root", then extract all its' children from the parent flow and add them to the new flow data object + let portalData: Flow["data"] = { + _root: { edges: flow.data[portalNodeId]?.edges }, + }; + Object.entries(portalData).forEach(([_nodeId, node]) => { + portalData = getChildren(node, flow.data, portalData); + }); + + // to avoid the new flow nodes acting as clones of the original internal portal, rename + // the non-root node ids using the first three alphanumeric characters of the portal name + const replacementCharacters = flow.data[portalNodeId]?.data?.text + ?.replace(/\W/g, "") + ?.slice(0, 3); + portalData = makeUniqueFlow(portalData, replacementCharacters); + + // FUTURE: + // - change GET to POST and write portalData directly to a new flow? + // - assume same team as parent flow and use name of internal portal as slug, or pass in body? + // - update the parent flow to remove the original internal portal and reference this new flow as an external portal? + return { flow, portalData }; +}; + +export { copyPortalAsFlow }; diff --git a/api.planx.uk/modules/flows/docs.yaml b/api.planx.uk/modules/flows/docs.yaml new file mode 100644 index 0000000000..76d25589ce --- /dev/null +++ b/api.planx.uk/modules/flows/docs.yaml @@ -0,0 +1,260 @@ +openapi: 3.1.0 +info: + title: Planâś• API + version: 0.1.0 +tags: + name: flows + description: Flow associated requests +components: + parameters: + flowId: + in: path + name: flowId + type: string + required: true + teamId: + in: path + name: teamId + type: string + required: true + portalNodeId: + in: path + name: portalNodeId + type: string + required: true + schemas: + Node: + type: object + properties: + id: string + type: number + data: object + edges: + type: array + items: + type: string + CopyFlow: + type: object + properties: + replaceValue: + type: string + example: ab123 + length: 5 + description: When copying a flow, we make nodeIds unique by replacing part of the original nodeId string + required: true + insert: + type: boolean + description: Operator to indicate if the copied flow should be inserted to the database, or simple returned in the response body + FlowData: + type: object + additionalProperties: true + properties: + _root: + type: object + properties: + edges: + type: array + items: + type: string + responses: + CopyFlow: + content: + application/json: + schema: + type: object + properties: + message: + type: string + inserted: + type: boolean + replaceValue: + type: string + length: 5 + data: + $ref: "#/components/schemas/FlowData" + CopyFlowAsPortal: + content: + application/json: + schema: + type: object + properties: + message: + type: string + data: + $ref: "#/components/schemas/FlowData" + FindAndReplace: + content: + application/json: + schema: + type: object + properties: + message: + type: string + required: true + matches: + oneOf: + - $ref: "#/components/schemas/FlowData" + - type: "null" + updatedFlow: + $ref: "#/components/schemas/FlowData" + required: false + PublishFlow: + content: + application/json: + schema: + type: object + properties: + message: + type: string + required: true + alteredNodes: + oneOf: + - type: array + items: + $ref: "#/components/schemas/Node" + - type: "null" + updatedFlow: + $ref: "#/components/schemas/FlowData" + required: false + ValidateAndDiff: + content: + application/json: + schema: + type: object + properties: + message: + type: string + required: false + description: + type: string + required: false + alteredNodes: + oneOf: + - type: array + items: + $ref: "#/components/schemas/Node" + - type: "null" + updatedFlow: + $ref: "#/components/schemas/FlowData" + required: false +paths: + /flows/{flowId}/copy: + post: + summary: Copy a flow + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CopyFlow" + responses: + "200": + $ref: "#/components/responses/CopyFlow" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/copy-portal/{portalNodeId}: + put: + summary: Create a new flow from a portal + description: Copies an internal portal and transforms it to be an independent flow + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + - $ref: "#/components/parameters/portalNodeId" + responses: + "200": + $ref: "#/components/responses/CopyFlowAsPortal" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/search: + post: + summary: Find and replace + description: Find and replace a data variable in a flow + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + - in: query + name: find + type: string + required: true + - in: query + name: replace + type: string + required: false + responses: + "200": + $ref: "#/components/responses/FindAndReplace" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/move/{teamSlug}: + post: + summary: Move a flow + description: Move ownership of a flow from one team to another + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + - $ref: "#/components/parameters/teamId" + responses: + "200": + $ref: "#/components/responses/SuccessMessage" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/publish: + post: + summary: Publish a flow + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + - in: query + name: summary + type: text + required: false + description: Optional text to summarise the published changes + responses: + "200": + $ref: "#/components/responses/PublishFlow" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/diff: + post: + summary: Diff and validate a flow + description: Validate and view the diff between the current unpublished version of a flow and the most recently published version + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + responses: + "200": + $ref: "#/components/responses/ValidateAndDiff" + "500": + $ref: "#/components/responses/ErrorMessage" + /flows/{flowId}/download-schema: + post: + summary: Download flow schema + description: Download a CSV file representing the flow's schema + tags: ["flows"] + security: + - bearerAuth: [] + parameters: + - $ref: "#/components/parameters/flowId" + responses: + "200": + content: + text/csv: + schema: + type: string + "500": + $ref: "#/components/responses/ErrorMessage" diff --git a/api.planx.uk/modules/flows/downloadSchema/controller.ts b/api.planx.uk/modules/flows/downloadSchema/controller.ts new file mode 100644 index 0000000000..95113bacc4 --- /dev/null +++ b/api.planx.uk/modules/flows/downloadSchema/controller.ts @@ -0,0 +1,41 @@ +import { z } from "zod"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { stringify } from "csv-stringify"; +import { getFlowSchema } from "./service"; +import { ServerError } from "../../../errors"; + +interface DownloadFlowSchemaResponse { + message: string; + alteredNodes: Node[] | null; + description?: string; +} + +export const downloadFlowSchema = z.object({ + params: z.object({ + flowId: z.string(), + }), +}); + +export type DownloadFlowSchemaController = ValidatedRequestHandler< + typeof downloadFlowSchema, + DownloadFlowSchemaResponse +>; + +export const downloadFlowSchemaController: DownloadFlowSchemaController = + async (_res, res, next) => { + try { + const { flowId } = res.locals.parsedReq.params; + const flowSchema = await getFlowSchema(flowId); + + // Build a CSV and stream it + stringify(flowSchema, { header: true }).pipe(res); + res.header("Content-type", "text/csv"); + res.attachment(`${flowId}.csv`); + } catch (error) { + return next( + new ServerError({ + message: `Failed to download flow schema: ${error}`, + }), + ); + } + }; diff --git a/api.planx.uk/modules/flows/downloadSchema/service.ts b/api.planx.uk/modules/flows/downloadSchema/service.ts new file mode 100644 index 0000000000..8c5ae5211c --- /dev/null +++ b/api.planx.uk/modules/flows/downloadSchema/service.ts @@ -0,0 +1,35 @@ +import { $public } from "../../../client"; +import { gql } from "graphql-request"; + +interface FlowSchema { + node: string; + type: string; + text: string; + planx_variable: string; +} + +export const getFlowSchema = async (flowId: string) => { + const { flowSchema } = await $public.client.request<{ + flowSchema: FlowSchema[]; + }>( + gql` + query ($flow_id: String!) { + flowSchema: get_flow_schema(args: { published_flow_id: $flow_id }) { + node + type + text + planx_variable + } + } + `, + { flow_id: flowId }, + ); + + if (!flowSchema.length) { + throw Error( + "Can't find a schema for this flow. Make sure it's published or try a different flow id.", + ); + } + + return flowSchema; +}; diff --git a/api.planx.uk/modules/flows/findReplace/controller.ts b/api.planx.uk/modules/flows/findReplace/controller.ts new file mode 100644 index 0000000000..c64387dc64 --- /dev/null +++ b/api.planx.uk/modules/flows/findReplace/controller.ts @@ -0,0 +1,51 @@ +import { Flow } from "../../../types"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { z } from "zod"; +import { ServerError } from "../../../errors"; +import { findAndReplaceInFlow } from "./service"; +import { FlowGraph } from "@opensystemslab/planx-core/types"; + +interface FindAndReplaceResponse { + message: string; + matches: Flow["data"] | null; + updatedFlow?: FlowGraph; +} + +export const findAndReplaceSchema = z.object({ + params: z.object({ + flowId: z.string(), + }), + query: z.object({ + find: z.string(), + replace: z.string().optional(), + }), +}); + +export type FindAndReplaceController = ValidatedRequestHandler< + typeof findAndReplaceSchema, + FindAndReplaceResponse +>; + +const findAndReplaceController: FindAndReplaceController = async ( + _req, + res, + next, +) => { + try { + const { flowId } = res.locals.parsedReq.params; + const { find, replace } = res.locals.parsedReq.query; + const { matches, updatedFlow, message } = await findAndReplaceInFlow( + flowId, + find, + replace, + ); + + res.json({ message, matches, updatedFlow }); + } catch (error) { + return next( + new ServerError({ message: `Failed to find and replace: ${error}` }), + ); + } +}; + +export { findAndReplaceController }; diff --git a/api.planx.uk/editor/findReplace.test.ts b/api.planx.uk/modules/flows/findReplace/findReplace.test.ts similarity index 93% rename from api.planx.uk/editor/findReplace.test.ts rename to api.planx.uk/modules/flows/findReplace/findReplace.test.ts index f1ded631ee..85202976d6 100644 --- a/api.planx.uk/editor/findReplace.test.ts +++ b/api.planx.uk/modules/flows/findReplace/findReplace.test.ts @@ -1,9 +1,9 @@ import supertest from "supertest"; -import { queryMock } from "../tests/graphqlQueryMock"; -import { authHeader } from "../tests/mockJWT"; -import app from "../server"; -import { Flow } from "../types"; +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader } from "../../../tests/mockJWT"; +import app from "../../../server"; +import { Flow } from "../../../types"; beforeEach(() => { queryMock.mockQuery({ @@ -46,11 +46,10 @@ it("throws an error if missing query parameter `find`", async () => { await supertest(app) .post("/flows/1/search") .set(auth) - .expect(401) + .expect(400) .then((res) => { - expect(res.body).toEqual({ - error: `Expected at least one query parameter "find"`, - }); + expect(res.body).toHaveProperty("issues"); + expect(res.body).toHaveProperty("name", "ZodError"); }); }); @@ -86,6 +85,7 @@ it("does not replace if no matches are found", async () => { .then((res) => { expect(res.body).toEqual({ message: `Didn't find "bananas" in this flow, nothing to replace`, + matches: null, }); }); }); diff --git a/api.planx.uk/modules/flows/findReplace/service.ts b/api.planx.uk/modules/flows/findReplace/service.ts new file mode 100644 index 0000000000..471b2718d7 --- /dev/null +++ b/api.planx.uk/modules/flows/findReplace/service.ts @@ -0,0 +1,116 @@ +import { gql } from "graphql-request"; +import { getFlowData } from "../../../helpers"; +import { getClient } from "../../../client"; +import { FlowGraph } from "@opensystemslab/planx-core/types"; +import { Flow } from "../../../types"; + +interface MatchResult { + matches: Flow["data"]; + flowData: Flow["data"]; +} + +/** + * Find and return the node ids and specific data properties that match a given search term, + * and return an updated copy of the flow data if a replaceValue is provided, else return the original flowData + */ +const getMatches = ( + flowData: Flow["data"], + searchTerm: string, + replaceValue: string | undefined = undefined, +): MatchResult => { + const matches: MatchResult["matches"] = {}; + + const nodes = Object.keys(flowData).filter((key) => key !== "_root"); + nodes.forEach((node) => { + const data = flowData[node]["data"]; + if (data) { + // search all "data" properties independent of component type (eg `fn`, `val`, `text`) + const keys = Object.keys(data); + keys.forEach((k) => { + // if any value strictly matches the searchTerm, add that node id & key to the matches object + if (data[k] === searchTerm) { + matches[node] = { + data: { + [k]: data[k], + }, + }; + // if a replaceValue is provided, additionally update the flowData + if (replaceValue) { + data[k] = replaceValue; + } + } + }); + } + }); + + return { + matches: matches, + flowData: flowData, + }; +}; + +interface UpdateFlow { + flow: { + id: string; + slug: string; + data: FlowGraph; + updatedAt: string; + }; +} + +const findAndReplaceInFlow = async ( + flowId: string, + find: string, + replace?: string, +) => { + const flow = await getFlowData(flowId); + if (!flow) throw Error("Unknown flowId"); + + // Find + if (!replace) { + const { matches } = getMatches(flow.data, find); + const message = `Found ${ + Object.keys(matches).length + } matches of "${find}" in this flow`; + return { matches, message }; + } + + // Find & Replace + const { matches, flowData } = getMatches(flow.data, find, replace); + + if (Object.keys(matches).length === 0) { + const message = `Didn't find "${find}" in this flow, nothing to replace`; + return { matches: null, message }; + } + + // if matches, proceed with mutation to update flow data + const { client: $client } = getClient(); + const response = await $client.request( + gql` + mutation UpdateFlow($data: jsonb = {}, $id: uuid!) { + flow: update_flows_by_pk( + pk_columns: { id: $id } + _set: { data: $data } + ) { + id + slug + data + updatedAt: updated_at + } + } + `, + { + data: flowData, + id: flowId, + }, + ); + + const updatedFlow = response.flow && response.flow.data; + const message = `Found ${ + Object.keys(matches).length + } matches of "${find}" and replaced with "${replace}"`; + + return { matches, message, updatedFlow }; +}; + +export { findAndReplaceInFlow }; diff --git a/api.planx.uk/modules/flows/moveFlow/controller.ts b/api.planx.uk/modules/flows/moveFlow/controller.ts new file mode 100644 index 0000000000..319ac0c1ca --- /dev/null +++ b/api.planx.uk/modules/flows/moveFlow/controller.ts @@ -0,0 +1,37 @@ +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { z } from "zod"; +import { ServerError } from "../../../errors"; +import { moveFlow } from "./service"; + +interface MoveFlowResponse { + message: string; +} + +export const moveFlowSchema = z.object({ + params: z.object({ + flowId: z.string(), + teamSlug: z.string(), + }), +}); + +export type MoveFlowController = ValidatedRequestHandler< + typeof moveFlowSchema, + MoveFlowResponse +>; + +export const moveFlowController: MoveFlowController = async ( + _req, + res, + next, +) => { + try { + const { flowId, teamSlug } = res.locals.parsedReq.params; + await moveFlow(flowId, teamSlug); + + res.status(200).send({ + message: `Successfully moved flow to ${teamSlug}`, + }); + } catch (error) { + return next(new ServerError({ message: `Failed to move flow: ${error}` })); + } +}; diff --git a/api.planx.uk/editor/moveFlow.test.ts b/api.planx.uk/modules/flows/moveFlow/moveFlow.test.ts similarity index 87% rename from api.planx.uk/editor/moveFlow.test.ts rename to api.planx.uk/modules/flows/moveFlow/moveFlow.test.ts index f8d377777e..f7a95196d0 100644 --- a/api.planx.uk/editor/moveFlow.test.ts +++ b/api.planx.uk/modules/flows/moveFlow/moveFlow.test.ts @@ -1,12 +1,12 @@ import supertest from "supertest"; -import { queryMock } from "../tests/graphqlQueryMock"; -import { authHeader } from "../tests/mockJWT"; -import app from "../server"; +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader } from "../../../tests/mockJWT"; +import app from "../../../server"; beforeEach(() => { queryMock.mockQuery({ - name: "GetTeam", + name: "GetTeamBySlug", variables: { slug: "new-team", }, diff --git a/api.planx.uk/modules/flows/moveFlow/service.ts b/api.planx.uk/modules/flows/moveFlow/service.ts new file mode 100644 index 0000000000..e77a811422 --- /dev/null +++ b/api.planx.uk/modules/flows/moveFlow/service.ts @@ -0,0 +1,42 @@ +import { gql } from "graphql-request"; +import { Flow, Team } from "../../../types"; +import { $public, getClient } from "../../../client"; + +export const moveFlow = async (flowId: string, teamSlug: string) => { + const team = await $public.team.getBySlug(teamSlug); + if (!team) + throw Error( + `Unable to find a team matching slug ${teamSlug}, exiting move`, + ); + + await updateFlow(flowId, team.id); +}; + +interface UpdateFlow { + flow: Pick; +} + +const updateFlow = async ( + flowId: Flow["id"], + teamId: Team["id"], +): Promise => { + const { client: $client } = getClient(); + const { flow } = await $client.request( + gql` + mutation UpdateFlow($id: uuid!, $team_id: Int!) { + flow: update_flows_by_pk( + pk_columns: { id: $id } + _set: { team_id: $team_id } + ) { + id + } + } + `, + { + id: flowId, + team_id: teamId, + }, + ); + + return flow.id; +}; diff --git a/api.planx.uk/modules/flows/publish/controller.ts b/api.planx.uk/modules/flows/publish/controller.ts new file mode 100644 index 0000000000..2eb95a8ef2 --- /dev/null +++ b/api.planx.uk/modules/flows/publish/controller.ts @@ -0,0 +1,45 @@ +import { Node } from "@opensystemslab/planx-core/types"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { z } from "zod"; +import { publishFlow } from "./service"; +import { ServerError } from "../../../errors"; + +interface PublishFlowResponse { + message: string; + alteredNodes: Node[] | null; +} + +export const publishFlowSchema = z.object({ + params: z.object({ + flowId: z.string(), + }), + query: z.object({ + summary: z.string().optional(), + }), +}); + +export type PublishFlowController = ValidatedRequestHandler< + typeof publishFlowSchema, + PublishFlowResponse +>; + +export const publishFlowController: PublishFlowController = async ( + _req, + res, + next, +) => { + try { + const { flowId } = res.locals.parsedReq.params; + const { summary } = res.locals.parsedReq.query; + const alteredNodes = await publishFlow(flowId, summary); + + return res.json({ + alteredNodes, + message: alteredNodes ? "Changes published" : "No new changes to publish", + }); + } catch (error) { + return next( + new ServerError({ message: `Failed to publish flow: ${error}` }), + ); + } +}; diff --git a/api.planx.uk/modules/flows/publish/publish.test.ts b/api.planx.uk/modules/flows/publish/publish.test.ts new file mode 100644 index 0000000000..3f1f5173c5 --- /dev/null +++ b/api.planx.uk/modules/flows/publish/publish.test.ts @@ -0,0 +1,156 @@ +import supertest from "supertest"; + +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader, getJWT } from "../../../tests/mockJWT"; +import app from "../../../server"; +import { userContext } from "../../auth/middleware"; +import { mockFlowData } from "../../../tests/mocks/validateAndPublishMocks"; + +beforeAll(() => { + const getStoreMock = jest.spyOn(userContext, "getStore"); + getStoreMock.mockReturnValue({ + user: { + sub: "123", + jwt: getJWT({ role: "teamEditor" }), + }, + }); +}); + +beforeEach(() => { + queryMock.mockQuery({ + name: "GetFlowData", + matchOnVariables: false, + data: { + flow: { + data: mockFlowData, + }, + }, + }); + + queryMock.mockQuery({ + name: "GetMostRecentPublishedFlow", + matchOnVariables: false, + data: { + flow: { + publishedFlows: [ + { + data: mockFlowData, + }, + ], + }, + }, + }); + + queryMock.mockQuery({ + name: "PublishFlow", + matchOnVariables: false, + data: { + publishedFlow: { + data: mockFlowData, + }, + }, + }); +}); + +const auth = authHeader({ role: "platformAdmin" }); + +it("requires a user to be logged in", async () => { + await supertest(app).post("/flows/1/publish").expect(401); +}); + +it("requires a user to have the 'teamEditor' role", async () => { + await supertest(app) + .post("/flows/1/publish") + .set(authHeader({ role: "teamViewer" })) + .expect(403); +}); + +describe("publish", () => { + it("publishes for the first time", async () => { + queryMock.mockQuery({ + name: "GetMostRecentPublishedFlow", + matchOnVariables: false, + data: { + flow: { + publishedFlows: [], + }, + }, + }); + + await supertest(app).post("/flows/1/publish").set(auth).expect(200); + }); + + it("does not update if there are no new changes", async () => { + await supertest(app) + .post("/flows/1/publish") + .set(auth) + .expect(200) + .then((res) => { + expect(res.body).toEqual({ + alteredNodes: null, + message: "No new changes to publish", + }); + }); + }); + + it("updates published flow and returns altered nodes if there have been changes", async () => { + const alteredFlow = { + ...mockFlowData, + ResultNode: { + data: { + flagSet: "Planning permission", + overrides: { + NO_APP_REQUIRED: { + heading: "Some Other Heading", + }, + }, + }, + type: 3, + }, + }; + + queryMock.mockQuery({ + name: "GetFlowData", + matchOnVariables: false, + data: { + flow: { + data: alteredFlow, + }, + }, + }); + + queryMock.mockQuery({ + name: "PublishFlow", + matchOnVariables: false, + data: { + publishedFlow: { + data: alteredFlow, + }, + }, + }); + + await supertest(app) + .post("/flows/1/publish") + .set(auth) + .expect(200) + .then((res) => { + expect(res.body).toEqual({ + message: "Changes published", + alteredNodes: [ + { + id: "ResultNode", + type: 3, + data: { + flagSet: "Planning permission", + overrides: { + NO_APP_REQUIRED: { + heading: "Some Other Heading", + }, + }, + }, + }, + ], + }); + }); + }); +}); diff --git a/api.planx.uk/modules/flows/publish/service.ts b/api.planx.uk/modules/flows/publish/service.ts new file mode 100644 index 0000000000..ee7d03da74 --- /dev/null +++ b/api.planx.uk/modules/flows/publish/service.ts @@ -0,0 +1,69 @@ +import * as jsondiffpatch from "jsondiffpatch"; +import { dataMerged, getMostRecentPublishedFlow } from "../../../helpers"; +import { gql } from "graphql-request"; +import { FlowGraph, Node } from "@opensystemslab/planx-core/types"; +import { userContext } from "../../auth/middleware"; +import { getClient } from "../../../client"; + +interface PublishFlow { + publishedFlow: { + id: string; + flowId: string; + publisherId: string; + createdAt: string; + data: FlowGraph; + }; +} + +export const publishFlow = async (flowId: string, summary?: string) => { + const userId = userContext.getStore()?.user?.sub; + if (!userId) throw Error("User details missing from request"); + + const flattenedFlow = await dataMerged(flowId); + const mostRecent = await getMostRecentPublishedFlow(flowId); + const delta = jsondiffpatch.diff(mostRecent, flattenedFlow); + + if (!delta) return null; + + const { client: $client } = getClient(); + const response = await $client.request( + gql` + mutation PublishFlow( + $data: jsonb = {} + $flow_id: uuid + $publisher_id: Int + $summary: String + ) { + publishedFlow: insert_published_flows_one( + object: { + data: $data + flow_id: $flow_id + publisher_id: $publisher_id + summary: $summary + } + ) { + id + flowId: flow_id + publisherId: publisher_id + createdAt: created_at + data + } + } + `, + { + data: flattenedFlow, + flow_id: flowId, + publisher_id: parseInt(userId), + summary: summary ?? null, + }, + ); + + const publishedFlow = response.publishedFlow && response.publishedFlow.data; + + const alteredNodes: Node[] = Object.keys(delta).map((key) => ({ + id: key, + ...publishedFlow[key], + })); + + return alteredNodes; +}; diff --git a/api.planx.uk/modules/flows/routes.ts b/api.planx.uk/modules/flows/routes.ts new file mode 100644 index 0000000000..d74ad9422c --- /dev/null +++ b/api.planx.uk/modules/flows/routes.ts @@ -0,0 +1,74 @@ +import { Router } from "express"; +import { usePlatformAdminAuth, useTeamEditorAuth } from "../auth/middleware"; +import { publishFlowController } from "./publish/controller"; +import { copyFlowController, copyFlowSchema } from "./copyFlow/controller"; +import { validate } from "../../shared/middleware/validate"; +import { + copyFlowAsPortalSchema, + copyPortalAsFlowController, +} from "./copyFlowAsPortal/controller"; +import { + findAndReplaceController, + findAndReplaceSchema, +} from "./findReplace/controller"; +import { moveFlowController, moveFlowSchema } from "./moveFlow/controller"; +import { + validateAndDiffFlowController, + validateAndDiffSchema, +} from "./validate/controller"; +import { publishFlowSchema } from "./publish/controller"; +import { + downloadFlowSchema, + downloadFlowSchemaController, +} from "./downloadSchema/controller"; +const router = Router(); + +router.post( + "/:flowId/copy", + useTeamEditorAuth, + validate(copyFlowSchema), + copyFlowController, +); + +router.post( + "/:flowId/search", + usePlatformAdminAuth, + validate(findAndReplaceSchema), + findAndReplaceController, +); + +router.put( + "/:flowId/copy-portal/:portalNodeId", + usePlatformAdminAuth, + validate(copyFlowAsPortalSchema), + copyPortalAsFlowController, +); + +router.post( + "/:flowId/move/:teamSlug", + useTeamEditorAuth, + validate(moveFlowSchema), + moveFlowController, +); + +router.post( + "/:flowId/publish", + useTeamEditorAuth, + validate(publishFlowSchema), + publishFlowController, +); + +router.post( + "/:flowId/diff", + useTeamEditorAuth, + validate(validateAndDiffSchema), + validateAndDiffFlowController, +); + +router.get( + "/:flowId/download-schema", + validate(downloadFlowSchema), + downloadFlowSchemaController, +); + +export default router; diff --git a/api.planx.uk/modules/flows/validate/controller.ts b/api.planx.uk/modules/flows/validate/controller.ts new file mode 100644 index 0000000000..5020a798a5 --- /dev/null +++ b/api.planx.uk/modules/flows/validate/controller.ts @@ -0,0 +1,37 @@ +import { Node } from "@opensystemslab/planx-core/types"; +import { ValidatedRequestHandler } from "../../../shared/middleware/validate"; +import { z } from "zod"; +import { validateAndDiffFlow } from "./service"; +import { ServerError } from "../../../errors"; + +interface ValidateAndDiffResponse { + message: string; + alteredNodes: Node[] | null; + description?: string; +} + +export const validateAndDiffSchema = z.object({ + params: z.object({ + flowId: z.string(), + }), +}); + +export type ValidateAndDiffFlowController = ValidatedRequestHandler< + typeof validateAndDiffSchema, + ValidateAndDiffResponse +>; + +export const validateAndDiffFlowController: ValidateAndDiffFlowController = + async (_req, res, next) => { + try { + const { flowId } = res.locals.parsedReq.params; + const result = await validateAndDiffFlow(flowId); + return res.json(result); + } catch (error) { + return next( + new ServerError({ + message: `Failed to validate and diff flow: ${error}`, + }), + ); + } + }; diff --git a/api.planx.uk/editor/publish.ts b/api.planx.uk/modules/flows/validate/service.ts similarity index 59% rename from api.planx.uk/editor/publish.ts rename to api.planx.uk/modules/flows/validate/service.ts index ca4d8976f6..132725e59e 100644 --- a/api.planx.uk/editor/publish.ts +++ b/api.planx.uk/modules/flows/validate/service.ts @@ -1,151 +1,60 @@ import * as jsondiffpatch from "jsondiffpatch"; -import { Request, Response, NextFunction } from "express"; -import { dataMerged, getMostRecentPublishedFlow } from "../helpers"; -import { gql } from "graphql-request"; +import { dataMerged, getMostRecentPublishedFlow } from "../../../helpers"; import intersection from "lodash/intersection"; import { ComponentType, FlowGraph, Node, } from "@opensystemslab/planx-core/types"; -import { userContext } from "../modules/auth/middleware"; import type { Entry } from "type-fest"; -import { getClient } from "../client"; -const validateAndDiffFlow = async ( - req: Request, - res: Response, - next: NextFunction, -): Promise => { - try { - const flattenedFlow = await dataMerged(req.params.flowId); - - const { - isValid: sectionsAreValid, +const validateAndDiffFlow = async (flowId: string) => { + const flattenedFlow = await dataMerged(flowId); + + const { + isValid: sectionsAreValid, + message: sectionsValidationMessage, + description: sectionsValidationDescription, + } = validateSections(flattenedFlow); + if (!sectionsAreValid) { + return { + alteredNodes: null, message: sectionsValidationMessage, description: sectionsValidationDescription, - } = validateSections(flattenedFlow); - if (!sectionsAreValid) { - return res.json({ - alteredNodes: null, - message: sectionsValidationMessage, - description: sectionsValidationDescription, - }); - } + }; + } - const { - isValid: payIsValid, + const { + isValid: payIsValid, + message: payValidationMessage, + description: payValidationDescription, + } = validateInviteToPay(flattenedFlow); + if (!payIsValid) { + return { + alteredNodes: null, message: payValidationMessage, description: payValidationDescription, - } = validateInviteToPay(flattenedFlow); - if (!payIsValid) { - return res.json({ - alteredNodes: null, - message: payValidationMessage, - description: payValidationDescription, - }); - } - - const mostRecent = await getMostRecentPublishedFlow(req.params.flowId); - const delta = jsondiffpatch.diff(mostRecent, flattenedFlow); - - if (delta) { - const alteredNodes = Object.keys(delta).map((key) => ({ - id: key, - ...flattenedFlow[key], - })); - - return res.json({ - alteredNodes, - }); - } else { - return res.json({ - alteredNodes: null, - message: "No new changes to publish", - }); - } - } catch (error) { - return next(error); + }; } -}; -interface PublishFlow { - publishedFlow: { - id: string; - flowId: string; - publisherId: string; - createdAt: string; - data: FlowGraph; - }; -} - -const publishFlow = async ( - req: Request, - res: Response, - next: NextFunction, -): Promise => { - try { - const flattenedFlow = await dataMerged(req.params.flowId); - const mostRecent = await getMostRecentPublishedFlow(req.params.flowId); - const delta = jsondiffpatch.diff(mostRecent, flattenedFlow); - - const userId = userContext.getStore()?.user?.sub; - if (!userId) throw Error("User details missing from request"); - - if (delta) { - const { client: $client } = getClient(); - const response = await $client.request( - gql` - mutation PublishFlow( - $data: jsonb = {} - $flow_id: uuid - $publisher_id: Int - $summary: String - ) { - publishedFlow: insert_published_flows_one( - object: { - data: $data - flow_id: $flow_id - publisher_id: $publisher_id - summary: $summary - } - ) { - id - flowId: flow_id - publisherId: publisher_id - createdAt: created_at - data - } - } - `, - { - data: flattenedFlow, - flow_id: req.params.flowId, - publisher_id: parseInt(userId), - summary: req.query?.summary || null, - }, - ); + const mostRecent = await getMostRecentPublishedFlow(flowId); + const delta = jsondiffpatch.diff(mostRecent, flattenedFlow); - const publishedFlow = - response.publishedFlow && response.publishedFlow.data; + if (!delta) + return { + alteredNodes: null, + message: "No new changes to publish", + }; - const alteredNodes = Object.keys(delta).map((key) => ({ - id: key, - ...publishedFlow[key], - })); + const alteredNodes = Object.keys(delta).map((key) => ({ + id: key, + ...flattenedFlow[key], + })); - return res.json({ - alteredNodes, - }); - } else { - return res.json({ - alteredNodes: null, - message: "No new changes to publish", - }); - } - } catch (error) { - return next(error); - } + return { + alteredNodes, + message: "Changes valid", + }; }; type ValidationResponse = { @@ -320,4 +229,4 @@ const numberOfComponentType = ( return nodeIds?.length; }; -export { validateAndDiffFlow, publishFlow }; +export { validateAndDiffFlow }; diff --git a/api.planx.uk/editor/publish.test.ts b/api.planx.uk/modules/flows/validate/validate.test.ts similarity index 59% rename from api.planx.uk/editor/publish.test.ts rename to api.planx.uk/modules/flows/validate/validate.test.ts index 58a9bb4033..41519f2925 100644 --- a/api.planx.uk/editor/publish.test.ts +++ b/api.planx.uk/modules/flows/validate/validate.test.ts @@ -1,11 +1,12 @@ import supertest from "supertest"; -import { queryMock } from "../tests/graphqlQueryMock"; -import { authHeader, getJWT } from "../tests/mockJWT"; -import app from "../server"; -import { flowWithInviteToPay } from "../tests/mocks/inviteToPayData"; +import { queryMock } from "../../../tests/graphqlQueryMock"; +import { authHeader, getJWT } from "../../../tests/mockJWT"; +import app from "../../../server"; +import { flowWithInviteToPay } from "../../../tests/mocks/inviteToPayData"; +import { userContext } from "../../auth/middleware"; import { FlowGraph } from "@opensystemslab/planx-core/types"; -import { userContext } from "../modules/auth/middleware"; +import { mockFlowData } from "../../../tests/mocks/validateAndPublishMocks"; beforeAll(() => { const getStoreMock = jest.spyOn(userContext, "getStore"); @@ -56,105 +57,16 @@ beforeEach(() => { const auth = authHeader({ role: "platformAdmin" }); it("requires a user to be logged in", async () => { - await supertest(app).post("/flows/1/publish").expect(401); + await supertest(app).post("/flows/1/diff").expect(401); }); it("requires a user to have the 'teamEditor' role", async () => { await supertest(app) - .post("/flows/1/publish") + .post("/flows/1/diff") .set(authHeader({ role: "teamViewer" })) .expect(403); }); -describe("publish", () => { - it("publishes for the first time", async () => { - queryMock.mockQuery({ - name: "GetMostRecentPublishedFlow", - matchOnVariables: false, - data: { - flow: { - publishedFlows: [], - }, - }, - }); - - await supertest(app).post("/flows/1/publish").set(auth).expect(200); - }); - - it("does not update if there are no new changes", async () => { - await supertest(app) - .post("/flows/1/publish") - .set(auth) - .expect(200) - .then((res) => { - expect(res.body).toEqual({ - alteredNodes: null, - message: "No new changes to publish", - }); - }); - }); - - it("updates published flow and returns altered nodes if there have been changes", async () => { - const alteredFlow = { - ...mockFlowData, - ResultNode: { - data: { - flagSet: "Planning permission", - overrides: { - NO_APP_REQUIRED: { - heading: "Some Other Heading", - }, - }, - }, - type: 3, - }, - }; - - queryMock.mockQuery({ - name: "GetFlowData", - matchOnVariables: false, - data: { - flow: { - data: alteredFlow, - }, - }, - }); - - queryMock.mockQuery({ - name: "PublishFlow", - matchOnVariables: false, - data: { - publishedFlow: { - data: alteredFlow, - }, - }, - }); - - await supertest(app) - .post("/flows/1/publish") - .set(auth) - .expect(200) - .then((res) => { - expect(res.body).toEqual({ - alteredNodes: [ - { - id: "ResultNode", - type: 3, - data: { - flagSet: "Planning permission", - overrides: { - NO_APP_REQUIRED: { - heading: "Some Other Heading", - }, - }, - }, - }, - ], - }); - }); - }); -}); - describe("sections validation on diff", () => { it("does not update if there are sections in an external portal", async () => { const alteredFlow = { @@ -383,113 +295,3 @@ describe("invite to pay validation on diff", () => { }); }); }); - -const mockFlowData: FlowGraph = { - _root: { - edges: [ - "SectionOne", - "QuestionOne", - "InternalPortalNode", - "FindPropertyNode", - "PayNode", - "SendNode", - "ResultNode", - "ConfirmationNode", - ], - }, - SectionOne: { - type: 360, - data: { - title: "Section 1", - }, - }, - FindPropertyNode: { - type: 9, - }, - ResultNode: { - data: { - flagSet: "Planning permission", - overrides: { - NO_APP_REQUIRED: { - heading: "Congratulations!", - }, - }, - }, - type: 3, - }, - AnswerOne: { - data: { - text: "?", - }, - type: 200, - }, - QuestionInPortal: { - data: { - text: "internal question", - }, - type: 100, - edges: ["AnswerInPortalOne", "AnswerInPortalTwo"], - }, - AnswerTwo: { - data: { - text: "!!", - }, - type: 200, - }, - InternalPortalNode: { - data: { - text: "portal", - }, - type: 300, - edges: ["QuestionInPortal"], - }, - QuestionOne: { - data: { - text: "Question", - }, - type: 100, - edges: ["AnswerOne", "AnswerTwo"], - }, - PayNode: { - data: { - fn: "application.fee.payable", - url: "http://localhost:7002/pay", - color: "#EFEFEF", - title: "Pay for your application", - description: - '

The planning fee covers the cost of processing your application. Find out more about how planning fees are calculated here.

', - }, - type: 400, - }, - AnswerInPortalOne: { - data: { - text: "?", - }, - type: 200, - }, - AnswerInPortalTwo: { - data: { - text: "*", - }, - type: 200, - }, - ConfirmationNode: { - data: { - heading: "Application sent", - moreInfo: - "

You will be contacted

\n
    \n
  • if there is anything missing from the information you have provided so far
  • \n
  • if any additional information is required
  • \n
  • to arrange a site visit, if required
  • \n
  • to inform you whether a certificate has been granted or not
  • \n
\n", - contactInfo: - '

You can contact us at planning@lambeth.gov.uk

\n', - description: - "A payment receipt has been emailed to you. You will also receive an email to confirm when your application has been received.", - feedbackCTA: "What did you think of this service? (takes 30 seconds)", - }, - type: 725, - }, - SendNode: { - data: { - url: "http://localhost:7002/bops/southwark", - }, - type: 650, - }, -}; diff --git a/api.planx.uk/server.ts b/api.planx.uk/server.ts index 6bb5d70382..38cdf5ed54 100644 --- a/api.planx.uk/server.ts +++ b/api.planx.uk/server.ts @@ -14,9 +14,6 @@ import helmet from "helmet"; import { ServerError } from "./errors"; import { locationSearch } from "./gis/index"; -import { validateAndDiffFlow, publishFlow } from "./editor/publish"; -import { findAndReplaceInFlow } from "./editor/findReplace"; -import { copyPortalAsFlow } from "./editor/copyPortalAsFlow"; import { makePaymentViaProxy, fetchPaymentViaProxy, @@ -28,11 +25,7 @@ import { buildPaymentPayload, fetchPaymentRequestViaProxy, } from "./inviteToPay"; -import { - useHasuraAuth, - usePlatformAdminAuth, - useTeamEditorAuth, -} from "./modules/auth/middleware"; +import { useHasuraAuth } from "./modules/auth/middleware"; import airbrake from "./airbrake"; import { apiLimiter } from "./rateLimit"; @@ -40,9 +33,6 @@ import { sendToBOPS } from "./send/bops"; import { createSendEvents } from "./send/createSendEvents"; import { downloadApplicationFiles, sendToEmail } from "./send/email"; import { sendToUniform } from "./send/uniform"; -import { copyFlow } from "./editor/copyFlow"; -import { moveFlow } from "./editor/moveFlow"; -import { gql } from "graphql-request"; import { classifiedRoadsSearch } from "./gis/classifiedRoads"; import { googleStrategy } from "./modules/auth/strategy/google"; import authRoutes from "./modules/auth/routes"; @@ -52,13 +42,13 @@ import userRoutes from "./modules/user/routes"; import webhookRoutes from "./modules/webhooks/routes"; import analyticsRoutes from "./modules/analytics/routes"; import adminRoutes from "./modules/admin/routes"; +import flowRoutes from "./modules/flows/routes"; import ordnanceSurveyRoutes from "./modules/ordnanceSurvey/routes"; -import fileRoutes from "./modules/file/routes"; -import sendEmailRoutes from "./modules/sendEmail/routes"; import saveAndReturnRoutes from "./modules/saveAndReturn/routes"; +import sendEmailRoutes from "./modules/sendEmail/routes"; +import fileRoutes from "./modules/file/routes"; import { useSwaggerDocs } from "./docs"; import { Role } from "@opensystemslab/planx-core/types"; -import { $public } from "./client"; const router = express.Router(); @@ -181,6 +171,7 @@ app.use(ordnanceSurveyRoutes); app.use("/file", fileRoutes); app.use(saveAndReturnRoutes); app.use(sendEmailRoutes); +app.use("/flows", flowRoutes); app.use("/gis", router); @@ -195,109 +186,21 @@ app.get("/gis/:localAuthority", locationSearch); app.get("/roads", classifiedRoadsSearch); -app.post("/flows/:flowId/copy", useTeamEditorAuth, copyFlow); - -app.post("/flows/:flowId/diff", useTeamEditorAuth, validateAndDiffFlow); - -app.post("/flows/:flowId/move/:teamSlug", useTeamEditorAuth, moveFlow); - -app.post("/flows/:flowId/publish", useTeamEditorAuth, publishFlow); - -/** - * @swagger - * /flows/{flowId}/search: - * post: - * summary: Find and replace - * description: Find and replace a data variable in a flow - * tags: - * - flows - * parameters: - * - in: path - * name: flowId - * type: string - * required: true - * - in: query - * name: find - * type: string - * required: true - * - in: query - * name: replace - * type: string - * required: false - * responses: - * '200': - * description: OK - * content: - * application/json: - * schema: - * type: object - * properties: - * message: - * type: string - * required: true - * matches: - * type: object - * required: true - * additionalProperties: true - * updatedFlow: - * type: object - * required: false - * additionalProperties: true - * properties: - * _root: - * type: object - * properties: - * edges: - * type: array - * items: - * type: string - */ -app.post("/flows/:flowId/search", usePlatformAdminAuth, findAndReplaceInFlow); - -app.get( - "/flows/:flowId/copy-portal/:portalNodeId", - usePlatformAdminAuth, - copyPortalAsFlow, -); - -interface FlowSchema { - node: string; - type: string; - text: string; - planx_variable: string; -} +// allows an applicant to download their application data on the Confirmation page +app.post("/download-application", async (req, res, next) => { + if (!req.body) { + res.send({ + message: "Missing application `data` to download", + }); + } -app.get("/flows/:flowId/download-schema", async (req, res, next) => { try { - const { flowSchema } = await $public.client.request<{ - flowSchema: FlowSchema[]; - }>( - gql` - query ($flow_id: String!) { - flowSchema: get_flow_schema(args: { published_flow_id: $flow_id }) { - node - type - text - planx_variable - } - } - `, - { flow_id: req.params.flowId }, - ); - - if (!flowSchema.length) { - next({ - status: 404, - message: - "Can't find a schema for this flow. Make sure it's published or try a different flow id.", - }); - } else { - // build a CSV and stream it - stringify(flowSchema, { header: true }).pipe(res); - - res.header("Content-type", "text/csv"); - res.attachment(`${req.params.flowId}.csv`); - } + // build a CSV and stream the response + stringify(req.body, { + columns: ["question", "responses", "metadata"], + header: true, + }).pipe(res); + res.header("Content-type", "text/csv"); } catch (err) { next(err); } diff --git a/api.planx.uk/tests/mocks/validateAndPublishMocks.ts b/api.planx.uk/tests/mocks/validateAndPublishMocks.ts new file mode 100644 index 0000000000..c137f6095d --- /dev/null +++ b/api.planx.uk/tests/mocks/validateAndPublishMocks.ts @@ -0,0 +1,111 @@ +import { FlowGraph } from "@opensystemslab/planx-core/types"; + +export const mockFlowData: FlowGraph = { + _root: { + edges: [ + "SectionOne", + "QuestionOne", + "InternalPortalNode", + "FindPropertyNode", + "PayNode", + "SendNode", + "ResultNode", + "ConfirmationNode", + ], + }, + SectionOne: { + type: 360, + data: { + title: "Section 1", + }, + }, + FindPropertyNode: { + type: 9, + }, + ResultNode: { + data: { + flagSet: "Planning permission", + overrides: { + NO_APP_REQUIRED: { + heading: "Congratulations!", + }, + }, + }, + type: 3, + }, + AnswerOne: { + data: { + text: "?", + }, + type: 200, + }, + QuestionInPortal: { + data: { + text: "internal question", + }, + type: 100, + edges: ["AnswerInPortalOne", "AnswerInPortalTwo"], + }, + AnswerTwo: { + data: { + text: "!!", + }, + type: 200, + }, + InternalPortalNode: { + data: { + text: "portal", + }, + type: 300, + edges: ["QuestionInPortal"], + }, + QuestionOne: { + data: { + text: "Question", + }, + type: 100, + edges: ["AnswerOne", "AnswerTwo"], + }, + PayNode: { + data: { + fn: "application.fee.payable", + url: "http://localhost:7002/pay", + color: "#EFEFEF", + title: "Pay for your application", + description: + '

The planning fee covers the cost of processing your application. Find out more about how planning fees are calculated here.

', + }, + type: 400, + }, + AnswerInPortalOne: { + data: { + text: "?", + }, + type: 200, + }, + AnswerInPortalTwo: { + data: { + text: "*", + }, + type: 200, + }, + ConfirmationNode: { + data: { + heading: "Application sent", + moreInfo: + "

You will be contacted

\n
    \n
  • if there is anything missing from the information you have provided so far
  • \n
  • if any additional information is required
  • \n
  • to arrange a site visit, if required
  • \n
  • to inform you whether a certificate has been granted or not
  • \n
\n", + contactInfo: + '

You can contact us at planning@lambeth.gov.uk

\n', + description: + "A payment receipt has been emailed to you. You will also receive an email to confirm when your application has been received.", + feedbackCTA: "What did you think of this service? (takes 30 seconds)", + }, + type: 725, + }, + SendNode: { + data: { + url: "http://localhost:7002/bops/southwark", + }, + type: 650, + }, +}; diff --git a/hasura.planx.uk/metadata/tables.yaml b/hasura.planx.uk/metadata/tables.yaml index fade60a7e9..a3b41c7d04 100644 --- a/hasura.planx.uk/metadata/tables.yaml +++ b/hasura.planx.uk/metadata/tables.yaml @@ -240,8 +240,9 @@ columns: - flow_id - node - - type - planx_variable + - text + - type filter: {} - table: schema: public @@ -1217,9 +1218,6 @@ - locked_at: _is_null: true check: null -- table: - schema: public - name: submission_services_summary - table: schema: public name: team_members