diff --git a/.env.example b/.env.example index fd2f0b1203..222a893a48 100644 --- a/.env.example +++ b/.env.example @@ -100,6 +100,8 @@ UNIFORM_CLIENT_WYCOMBE=👻 ## Forthcoming Idox Nexus integration IDOX_NEXUS_CLIENT=👻 +IDOX_NEXUS_TOKEN_URL=👻 +IDOX_NEXUS_SUBMISSION_URL=👻 ## End-to-end test team (borrows Lambeth's details) GOV_UK_PAY_SECRET_E2E=👻 diff --git a/api.planx.uk/.env.test.example b/api.planx.uk/.env.test.example index ba874716ac..2a36ccfc56 100644 --- a/api.planx.uk/.env.test.example +++ b/api.planx.uk/.env.test.example @@ -39,4 +39,7 @@ UNIFORM_SUBMISSION_URL=👻 SLACK_WEBHOOK_URL=👻 -ORDNANCE_SURVEY_API_KEY=👻 \ No newline at end of file +ORDNANCE_SURVEY_API_KEY=👻 + +IDOX_NEXUS_TOKEN_URL=👻 +IDOX_NEXUS_SUBMISSION_URL=👻 diff --git a/api.planx.uk/modules/admin/session/zip.ts b/api.planx.uk/modules/admin/session/zip.ts index 56a91b99db..53ac5791f3 100644 --- a/api.planx.uk/modules/admin/session/zip.ts +++ b/api.planx.uk/modules/admin/session/zip.ts @@ -5,8 +5,8 @@ import { buildSubmissionExportZip } from "../../send/utils/exportZip.js"; * @swagger * /admin/session/{sessionId}/zip: * get: - * summary: Generates and downloads a zip file for Send to Email, or Uniform when XML is included - * description: Generates and downloads a zip file for Send to Email, or Uniform when XML is included + * summary: Generates and downloads a zip file for integrations + * description: Generates and downloads a zip file for integrations * tags: * - admin * parameters: @@ -21,6 +21,11 @@ import { buildSubmissionExportZip } from "../../send/utils/exportZip.js"; * type: boolean * required: false * description: If the Digital Planning JSON file should be included in the zip (only generated for supported application types) + * - in: query + * name: onlyDigitalPlanningJSON + * type: boolean + * required: false + * description: If the Digital Planning JSON file should be the ONLY file included in the zip (only generated for supported application types) * security: * - bearerAuth: [] */ @@ -32,9 +37,10 @@ export async function generateZip( try { const zip = await buildSubmissionExportZip({ sessionId: req.params.sessionId, - includeOneAppXML: req.query.includeOneAppXML === "true", + includeOneAppXML: req.query.includeOneAppXML === "false", includeDigitalPlanningJSON: req.query.includeDigitalPlanningJSON === "false", + onlyDigitalPlanningJSON: req.query.onlyDigitalPlanningJSON === "false", }); res.download(zip.filename, () => { zip.remove(); diff --git a/api.planx.uk/modules/send/createSendEvents/controller.ts b/api.planx.uk/modules/send/createSendEvents/controller.ts index 4338ec3c9a..b2a756187d 100644 --- a/api.planx.uk/modules/send/createSendEvents/controller.ts +++ b/api.planx.uk/modules/send/createSendEvents/controller.ts @@ -50,7 +50,7 @@ const createSendEvents: CreateSendEventsController = async ( if (idox) { const idoxEvent = await createScheduledEvent({ webhook: `{{HASURA_PLANX_API_URL}}/idox/${idox.localAuthority}`, - schedule_at: new Date(now.getTime() + 60 * 1000), + schedule_at: new Date(now.getTime()), // now() is good for testing, but should be staggered if dual processing in future payload: idox.body, comment: `idox_nexus_submission_${sessionId}`, }); diff --git a/api.planx.uk/modules/send/idox/nexus.ts b/api.planx.uk/modules/send/idox/nexus.ts index 109e304f0d..f03808015d 100644 --- a/api.planx.uk/modules/send/idox/nexus.ts +++ b/api.planx.uk/modules/send/idox/nexus.ts @@ -9,25 +9,25 @@ import { $api } from "../../../client/index.js"; import { markSessionAsSubmitted } from "../../saveAndReturn/service/utils.js"; import { buildSubmissionExportZip } from "../utils/exportZip.js"; -interface UniformClient { +interface IdoxNexusClient { clientId: string; clientSecret: string; } -interface UniformSubmissionResponse { - submissionStatus?: string; - canDownload?: boolean; - submissionId?: string; -} - -interface RawUniformAuthResponse { +interface RawIdoxNexusAuthResponse { access_token: string; } -interface UniformAuthResponse { +interface IdoxNexusAuthResponse { token: string; - organisation: string; - organisationId: string; + organisations: Record; + authorities: string[]; +} + +interface UniformSubmissionResponse { + submissionStatus?: string; + canDownload?: boolean; + submissionId?: string; } interface UniformApplication { @@ -39,7 +39,7 @@ interface UniformApplication { created_at: string; } -interface SendToUniformPayload { +interface SendToIdoxNexusPayload { sessionId: string; } @@ -49,16 +49,16 @@ export async function sendToIdoxNexus( next: NextFunction, ) { /** - * Submits application data to Uniform + * Submits application data to Idox's Submission API (aka Nexus) * - * first, create a zip folder containing an XML (Idox's schema), CSV (our format), and any user-uploaded files + * first, create a zip folder containing the ODP Schema JSON * then, make requests to Uniform's "Submission API" to authenticate, create a submission, and attach the zip to the submission * finally, insert a record into uniform_applications for future auditing */ req.setTimeout(120 * 1000); // Temporary bump to address submission timeouts - // `/uniform/:localAuthority` is only called via Hasura's scheduled event webhook now, so body is wrapped in a "payload" key - const payload: SendToUniformPayload = req.body.payload; + // `/idox/:localAuthority` is only called via Hasura's scheduled event webhook now, so body is wrapped in a "payload" key + const payload: SendToIdoxNexusPayload = req.body.payload; if (!payload?.sessionId) { return next({ status: 400, @@ -68,39 +68,46 @@ export async function sendToIdoxNexus( // localAuthority is only parsed for audit record, not client-specific const localAuthority = req.params.localAuthority; - const uniformClient = getUniformClient(); + const idoxNexusClient = getIdoxNexusClient(); // confirm that this session has not already been successfully submitted before proceeding const submittedApp = await checkUniformAuditTable(payload?.sessionId); - const isAlreadySubmitted = + const _isAlreadySubmitted = submittedApp?.submissionStatus === "PENDING" && submittedApp?.canDownload; - if (isAlreadySubmitted) { - return res.status(200).send({ - sessionId: payload?.sessionId, - idoxSubmissionId: submittedApp?.submissionId, - message: `Skipping send, already successfully submitted`, - }); - } + // if (isAlreadySubmitted) { + // return res.status(200).send({ + // sessionId: payload?.sessionId, + // idoxSubmissionId: submittedApp?.submissionId, + // message: `Skipping send, already successfully submitted`, + // }); + // } try { // Request 1/4 - Authenticate - const { token, organisation, organisationId } = - await authenticate(uniformClient); + const { token, organisations } = await authenticate(idoxNexusClient); - // 2/4 - Create a submission - const idoxSubmissionId = await createSubmission( - token, - organisation, - organisationId, - payload.sessionId, - ); + // TEMP - Mock organisations do NOT correspond to council envs, so randomly alternate submissions among ones we have access to for initial testing + // Switch to `team_integrations`-based approach later + const orgIds = Object.keys(organisations); + const randomOrgId = orgIds[Math.floor(Math.random() * orgIds.length)]; + const randomOrg = organisations[randomOrgId]; - // 3/4 - Create & attach the zip + // Create a zip containing only the ODP Schema JSON + // Do this BEFORE creating a submission in order to throw any validation errors early const zip = await buildSubmissionExportZip({ sessionId: payload.sessionId, onlyDigitalPlanningJSON: true, }); + // 2/4 - Create a submission + const idoxSubmissionId = await createSubmission( + token, + randomOrg, + randomOrgId, + payload.sessionId, + ); + + // 3/4 - Attach the zip const attachmentAdded = await attachArchive( token, idoxSubmissionId, @@ -112,7 +119,6 @@ export async function sendToIdoxNexus( // 4/4 - Get submission details and create audit record const submissionDetails = await retrieveSubmission(token, idoxSubmissionId); - const applicationAuditRecord = await createUniformApplicationAuditRecord({ idoxSubmissionId, submissionDetails, @@ -124,7 +130,7 @@ export async function sendToIdoxNexus( markSessionAsSubmitted(payload?.sessionId); return res.status(200).send({ - message: `Successfully created an Idox Nexus submission`, + message: `Successfully created an Idox Nexus submission (${randomOrgId} - ${randomOrg})`, zipAttached: attachmentAdded, application: applicationAuditRecord, }); @@ -172,14 +178,14 @@ async function checkUniformAuditTable( async function authenticate({ clientId, clientSecret, -}: UniformClient): Promise { +}: IdoxNexusClient): Promise { const authString = Buffer.from(`${clientId}:${clientSecret}`).toString( "base64", ); const authConfig: AxiosRequestConfig = { method: "POST", - url: process.env.UNIFORM_TOKEN_URL!, + url: process.env.IDOX_NEXUS_TOKEN_URL!, headers: { Authorization: `Basic ${authString}`, "Content-type": "application/x-www-form-urlencoded", @@ -191,30 +197,32 @@ async function authenticate({ }), }; - const response = await axios.request(authConfig); + const response = await axios.request(authConfig); if (!response.data.access_token) { - throw Error("Failed to authenticate to Uniform - no access token returned"); + throw Error( + "Failed to authenticate to Idox Nexus - no access token returned", + ); } - // Decode access_token to get "organisation-name" & "organisation-id" + // Decode access_token to get "organisations" & "authorities" const decodedAccessToken = jwt.decode(response.data.access_token) as any; - const organisation = decodedAccessToken?.["organisation-name"]; - const organisationId = decodedAccessToken?.["organisation-id"]; + const organisations = decodedAccessToken?.["organisations"]; + const authorities = decodedAccessToken?.["authorities"]; - if (!organisation || !organisationId) { + if (!organisations || !authorities) { throw Error( - "Failed to authenticate to Uniform - failed to decode organisation details from access_token", + "Failed to authenticate to Idox Nexus - failed to decode organisations or authorities from access_token", ); } - const uniformAuthResponse: UniformAuthResponse = { + const idoxNexusAuthResponse: IdoxNexusAuthResponse = { token: response.data.access_token, - organisation: organisation, - organisationId: organisationId, + organisations: organisations, + authorities: authorities, }; - return uniformAuthResponse; + return idoxNexusAuthResponse; } /** @@ -227,13 +235,19 @@ async function createSubmission( organisationId: string, sessionId = "TEST", ): Promise { - const createSubmissionEndpoint = `${process.env - .UNIFORM_SUBMISSION_URL!}/secure/submission`; + const createSubmissionEndpoint = `${process.env.IDOX_NEXUS_SUBMISSION_URL!}/secure/submission`; - const isStaging = ["mock-server", "staging"].some((hostname) => + const isStaging = ["mock-server", "staging", "dev"].some((hostname) => createSubmissionEndpoint.includes(hostname), ); + // Get the application type prefix (eg "ldc", "pp", "pa") to send as the "entity" + const session = await $api.session.find(sessionId); + const rawApplicationType = session?.data.passport.data?.[ + "application.type" + ] as string[]; + const applicationTypePrefix = rawApplicationType?.[0]?.split(".")?.[0]; + const createSubmissionConfig: AxiosRequestConfig = { url: createSubmissionEndpoint, method: "POST", @@ -242,15 +256,15 @@ async function createSubmission( "Content-type": "application/json", }, data: JSON.stringify({ - entity: "dc", - module: "dc", + entity: applicationTypePrefix, + module: "dcplanx", organisation: organisation, organisationId: organisationId, submissionReference: sessionId, description: isStaging ? "Staging submission from PlanX" : "Production submission from PlanX", - submissionProcessorType: "API", + submissionProcessorType: "PLANX_QUEUE", }), }; @@ -283,8 +297,7 @@ async function attachArchive( return false; } - const attachArchiveEndpoint = `${process.env - .UNIFORM_SUBMISSION_URL!}/secure/submission/${submissionId}/archive`; + const attachArchiveEndpoint = `${process.env.IDOX_NEXUS_SUBMISSION_URL!}/secure/submission/${submissionId}/archive`; const formData = new FormData(); formData.append("file", fs.createReadStream(zipPath)); @@ -306,7 +319,7 @@ async function attachArchive( const isSuccess = response.status === 204; // Temp additional logging to debug failures - console.log("*** Uniform attachArchive response ***"); + console.log("*** Idox Nexus attachArchive response ***"); console.log({ status: response.status }); console.log(JSON.stringify(response.data, null, 2)); console.log("******"); @@ -323,7 +336,7 @@ async function retrieveSubmission( submissionId: string, ): Promise { const getSubmissionEndpoint = `${process.env - .UNIFORM_SUBMISSION_URL!}/secure/submission/${submissionId}`; + .IDOX_NEXUS_SUBMISSION_URL!}/secure/submission/${submissionId}`; const getSubmissionConfig: AxiosRequestConfig = { url: getSubmissionEndpoint, @@ -340,7 +353,7 @@ async function retrieveSubmission( /** * Get id and secret of Idox Nexus client */ -const getUniformClient = (): UniformClient => { +const getIdoxNexusClient = (): IdoxNexusClient => { const client = process.env["IDOX_NEXUS_CLIENT"]; if (!client) throw Error(`Unable to find Idox Nexus client`); @@ -356,12 +369,10 @@ const createUniformApplicationAuditRecord = async ({ submissionDetails, }: { idoxSubmissionId: string; - payload: SendToUniformPayload; + payload: SendToIdoxNexusPayload; localAuthority: string; submissionDetails: UniformSubmissionResponse; }): Promise => { - const xml = await $api.export.oneAppPayload(payload?.sessionId); - const application: Record< "insert_uniform_applications_one", UniformApplication @@ -400,7 +411,7 @@ const createUniformApplicationAuditRecord = async ({ destination: localAuthority, response: submissionDetails, payload, - xml, + xml: "ODP Schema", }, ); diff --git a/api.planx.uk/modules/send/utils/exportZip.test.ts b/api.planx.uk/modules/send/utils/exportZip.test.ts index d8e6b58c99..d38bee79f0 100644 --- a/api.planx.uk/modules/send/utils/exportZip.test.ts +++ b/api.planx.uk/modules/send/utils/exportZip.test.ts @@ -302,4 +302,39 @@ describe("buildSubmissionExportZip", () => { ).rejects.toThrow(/Failed to generate ODP Schema JSON/); }); }); + + describe("onlyDigitalPlanningJSON", () => { + test("ODP schema json is added to the zip", async () => { + await buildSubmissionExportZip({ + sessionId: "1234", + onlyDigitalPlanningJSON: true, + }); + expect(mockAddFile).toHaveBeenCalledWith( + "application.json", + expect.anything(), + ); + // ensure we haven't tried to build other files, even if we haven't added them + expect(mockGenerateOneAppXML).not.toHaveBeenCalled(); + }); + + test("the zip contains exactly one file", async () => { + await buildSubmissionExportZip({ + sessionId: "1234", + onlyDigitalPlanningJSON: true, + }); + expect(mockAddFile).toHaveBeenCalledTimes(1); + }); + + it("throws an error when ODP schema generation fails", async () => { + mockGenerateDigitalPlanningDataPayload.mockRejectedValueOnce( + new Error("validation test error"), + ); + await expect( + buildSubmissionExportZip({ + sessionId: "1234", + onlyDigitalPlanningJSON: true, + }), + ).rejects.toThrow(/Failed to generate ODP Schema JSON/); + }); + }); }); diff --git a/api.planx.uk/modules/send/utils/exportZip.ts b/api.planx.uk/modules/send/utils/exportZip.ts index bc6921a5ba..e67a32ce7c 100644 --- a/api.planx.uk/modules/send/utils/exportZip.ts +++ b/api.planx.uk/modules/send/utils/exportZip.ts @@ -40,25 +40,9 @@ export async function buildSubmissionExportZip({ const passport = sessionData.data?.passport as IPassport; const flowSlug = sessionData?.flow.slug; - // create zip + // create empty zip const zip = new ExportZip(sessionId, flowSlug); - // add OneApp XML to the zip - if (includeOneAppXML && !onlyDigitalPlanningJSON) { - try { - const xml = await $api.export.oneAppPayload(sessionId); - const xmlStream = str(xml.trim()); - await zip.addStream({ - name: "proposal.xml", // must be named "proposal.xml" to be processed by Uniform - stream: xmlStream, - }); - } catch (error) { - throw new Error( - `Failed to generate OneApp XML for ${sessionId} zip. Error - ${error}`, - ); - } - } - // add ODP Schema JSON to the zip, skipping validation if an unsupported application type if (includeDigitalPlanningJSON || onlyDigitalPlanningJSON) { try { @@ -67,10 +51,17 @@ export async function buildSubmissionExportZip({ ? await $api.export.digitalPlanningDataPayload(sessionId) : await $api.export.digitalPlanningDataPayload(sessionId, true); const schemaBuff = Buffer.from(JSON.stringify(schema, null, 2)); + zip.addFile({ name: "application.json", buffer: schemaBuff, }); + + // return early if onlyDigitalPlanningJSON + if (onlyDigitalPlanningJSON) { + zip.write(); + return zip; + } } catch (error) { throw new Error( `Failed to generate ODP Schema JSON for ${sessionId} zip. Error - ${error}`, @@ -78,122 +69,136 @@ export async function buildSubmissionExportZip({ } } - if (!onlyDigitalPlanningJSON) { - // add remote user-uploaded files on S3 to the zip - const files = new Passport(passport).files; - if (files.length) { - for (const file of files) { - // Ensure unique filename by combining original filename and S3 folder name, which is a nanoid - // Uniform requires all uploaded files to be present in the zip, even if they are duplicates - // Must match unique filename in editor.planx.uk/src/@planx/components/Send/uniform/xml.ts - const uniqueFilename = decodeURIComponent( - file.url.split("/").slice(-2).join("-"), - ); - await zip.addRemoteFile({ url: file.url, name: uniqueFilename }); - } - } - - // generate csv data - const responses = await $api.export.csvData(sessionId); - const redactedResponses = await $api.export.csvDataRedacted(sessionId); - - // write csv to the zip + // add OneApp XML to the zip + if (includeOneAppXML) { try { - const csvStream = stringify(responses, { - columns: ["question", "responses", "metadata"], - header: true, - }); + const xml = await $api.export.oneAppPayload(sessionId); + const xmlStream = str(xml.trim()); await zip.addStream({ - name: "application.csv", - stream: csvStream, + name: "proposal.xml", // must be named "proposal.xml" to be processed by Uniform + stream: xmlStream, }); } catch (error) { throw new Error( - `Failed to generate CSV for ${sessionId} zip. Error - ${error}`, + `Failed to generate OneApp XML for ${sessionId} zip. Error - ${error}`, + ); + } + } + + // add remote user-uploaded files on S3 to the zip + const files = new Passport(passport).files; + if (files.length) { + for (const file of files) { + // Ensure unique filename by combining original filename and S3 folder name, which is a nanoid + // Uniform requires all uploaded files to be present in the zip, even if they are duplicates + // Must match unique filename in editor.planx.uk/src/@planx/components/Send/uniform/xml.ts + const uniqueFilename = decodeURIComponent( + file.url.split("/").slice(-2).join("-"), ); + await zip.addRemoteFile({ url: file.url, name: uniqueFilename }); } + } - // add template files to zip - const templateNames = - await $api.getDocumentTemplateNamesForSession(sessionId); - for (const templateName of templateNames || []) { - try { - const isTemplateSupported = hasRequiredDataForTemplate({ + // generate csv data + const responses = await $api.export.csvData(sessionId); + const redactedResponses = await $api.export.csvDataRedacted(sessionId); + + // write csv to the zip + try { + const csvStream = stringify(responses, { + columns: ["question", "responses", "metadata"], + header: true, + }); + await zip.addStream({ + name: "application.csv", + stream: csvStream, + }); + } catch (error) { + throw new Error( + `Failed to generate CSV for ${sessionId} zip. Error - ${error}`, + ); + } + + // add template files to zip if specified in table `flow_document_templates` + const templateNames = + await $api.getDocumentTemplateNamesForSession(sessionId); + for (const templateName of templateNames || []) { + try { + const isTemplateSupported = hasRequiredDataForTemplate({ + passport, + templateName, + }); + if (isTemplateSupported) { + const templateStream = generateDocxTemplateStream({ passport, templateName, }); - if (isTemplateSupported) { - const templateStream = generateDocxTemplateStream({ - passport, - templateName, - }); - await zip.addStream({ - name: `${templateName}.doc`, - stream: templateStream, - }); - } - } catch (error) { - console.log( - `Template "${templateName}" could not be generated so has been skipped. Error - ${error}`, - ); - continue; + await zip.addStream({ + name: `${templateName}.doc`, + stream: templateStream, + }); } + } catch (error) { + console.log( + `Template "${templateName}" could not be generated so has been skipped. Error - ${error}`, + ); + continue; } + } - const boundingBox = passport.data["property.boundary.site.buffered"]; - const userAction = passport.data?.["drawBoundary.action"]; - // generate and add an HTML overview document for the submission to zip - const overviewHTML = generateApplicationHTML({ - planXExportData: responses as PlanXExportData[], - boundingBox, - userAction, - }); - await zip.addFile({ - name: "Overview.htm", - buffer: Buffer.from(overviewHTML), + const boundingBox = passport.data["property.boundary.site.buffered"]; + const userAction = passport.data?.["drawBoundary.action"]; + + // generate and add an HTML overview document for the submission to zip + const overviewHTML = generateApplicationHTML({ + planXExportData: responses as PlanXExportData[], + boundingBox, + userAction, + }); + zip.addFile({ + name: "Overview.htm", + buffer: Buffer.from(overviewHTML), + }); + + // generate and add a redacted HTML overview document for the submission to zip + const redactedOverviewHTML = generateApplicationHTML({ + planXExportData: redactedResponses as PlanXExportData[], + boundingBox, + userAction, + }); + zip.addFile({ + name: "RedactedOverview.htm", + buffer: Buffer.from(redactedOverviewHTML), + }); + + // add an optional GeoJSON file to zip + const geojson: GeoJSON.Feature | undefined = + passport?.data?.["property.boundary.site"]; + if (geojson) { + if (userAction) { + geojson["properties"] ??= {}; + geojson["properties"]["planx_user_action"] = userAction; + } + const geoBuff = Buffer.from(JSON.stringify(geojson, null, 2)); + zip.addFile({ + name: "LocationPlanGeoJSON.geojson", + buffer: geoBuff, }); - // generate and add an HTML overview document for the submission to zip - const redactedOverviewHTML = generateApplicationHTML({ - planXExportData: redactedResponses as PlanXExportData[], + // generate and add an HTML boundary document for the submission to zip + const boundaryHTML = generateMapHTML({ + geojson, boundingBox, userAction, }); - await zip.addFile({ - name: "RedactedOverview.htm", - buffer: Buffer.from(redactedOverviewHTML), + zip.addFile({ + name: "LocationPlan.htm", + buffer: Buffer.from(boundaryHTML), }); - - // add an optional GeoJSON file to zip - const geojson: GeoJSON.Feature | undefined = - passport?.data?.["property.boundary.site"]; - if (geojson) { - if (userAction) { - geojson["properties"] ??= {}; - geojson["properties"]["planx_user_action"] = userAction; - } - const geoBuff = Buffer.from(JSON.stringify(geojson, null, 2)); - zip.addFile({ - name: "LocationPlanGeoJSON.geojson", - buffer: geoBuff, - }); - - // generate and add an HTML boundary document for the submission to zip - const boundaryHTML = generateMapHTML({ - geojson, - boundingBox, - userAction, - }); - await zip.addFile({ - name: "LocationPlan.htm", - buffer: Buffer.from(boundaryHTML), - }); - } } // write the zip zip.write(); - return zip; } diff --git a/docker-compose.yml b/docker-compose.yml index 65a38b6e7d..4b06ee18ad 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -144,6 +144,9 @@ services: MINIO_PORT: ${MINIO_PORT} CORS_ALLOWLIST: ${EDITOR_URL_EXT}, ${API_URL_EXT} ENCRYPTION_KEY: ${ENCRYPTION_KEY} + IDOX_NEXUS_CLIENT: ${IDOX_NEXUS_CLIENT} + IDOX_NEXUS_TOKEN_URL: ${IDOX_NEXUS_TOKEN_URL} + IDOX_NEXUS_SUBMISSION_URL: ${IDOX_NEXUS_SUBMISSION_URL} # Local authority config # Lambeth UNIFORM_CLIENT_LAMBETH: ${UNIFORM_CLIENT_LAMBETH} diff --git a/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/down.sql b/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/down.sql new file mode 100644 index 0000000000..76b28bf822 --- /dev/null +++ b/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/down.sql @@ -0,0 +1,74 @@ +CREATE OR REPLACE VIEW "public"."submission_services_log" AS + WITH payments AS ( + SELECT ps.session_id, + ps.payment_id AS event_id, + 'Pay'::text AS event_type, + initcap(ps.status) AS status, + jsonb_build_object('status', ps.status, 'description', pse.comment, 'govuk_pay_reference', ps.payment_id) AS response, + ps.created_at, + false AS retry + FROM (payment_status ps + LEFT JOIN payment_status_enum pse ON ((pse.value = ps.status))) + WHERE ((ps.status <> 'created'::text) AND (ps.created_at >= '2024-01-01 00:00:00+00'::timestamp with time zone)) + ), retries AS ( + SELECT hdb_scheduled_event_invocation_logs.id + FROM hdb_catalog.hdb_scheduled_event_invocation_logs + WHERE ((hdb_scheduled_event_invocation_logs.event_id, hdb_scheduled_event_invocation_logs.created_at) IN ( SELECT seil.event_id, + max(seil.created_at) AS max + FROM (hdb_catalog.hdb_scheduled_event_invocation_logs seil + LEFT JOIN hdb_catalog.hdb_scheduled_events se ON ((se.id = seil.event_id))) + WHERE (se.tries > 1) + GROUP BY seil.event_id)) + ), submissions AS ( + SELECT ((((seil.request -> 'payload'::text) -> 'payload'::text) ->> 'sessionId'::text))::uuid AS session_id, + se.id AS event_id, + CASE + WHEN ((se.webhook_conf)::text ~~ '%bops%'::text) THEN 'Submit to BOPS'::text + WHEN ((se.webhook_conf)::text ~~ '%uniform%'::text) THEN 'Submit to Uniform'::text + WHEN ((se.webhook_conf)::text ~~ '%email-submission%'::text) THEN 'Send to email'::text + WHEN ((se.webhook_conf)::text ~~ '%upload-submission%'::text) THEN 'Upload to AWS S3'::text + ELSE (se.webhook_conf)::text + END AS event_type, + CASE + WHEN (seil.status = 200) THEN 'Success'::text + ELSE format('Failed (%s)'::text, seil.status) + END AS status, + (seil.response)::jsonb AS response, + seil.created_at, + (EXISTS ( SELECT 1 + FROM retries r + WHERE (r.id = seil.id))) AS retry + FROM (hdb_catalog.hdb_scheduled_events se + LEFT JOIN hdb_catalog.hdb_scheduled_event_invocation_logs seil ON ((seil.event_id = se.id))) + WHERE (((se.webhook_conf)::text !~~ '%email/%'::text) AND (seil.created_at >= '2024-01-01 00:00:00+00'::timestamp with time zone)) + ), all_events AS ( + SELECT payments.session_id, + payments.event_id, + payments.event_type, + payments.status, + payments.response, + payments.created_at, + payments.retry + FROM payments + UNION ALL + SELECT submissions.session_id, + submissions.event_id, + submissions.event_type, + submissions.status, + submissions.response, + submissions.created_at, + submissions.retry + FROM submissions + ) + SELECT ls.flow_id, + ae.session_id, + ae.event_id, + ae.event_type, + ae.status, + ae.response, + ae.created_at, + ae.retry + FROM (all_events ae + LEFT JOIN lowcal_sessions ls ON ((ls.id = ae.session_id))) + WHERE (ls.flow_id IS NOT NULL) + ORDER BY ae.created_at DESC; diff --git a/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/up.sql b/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/up.sql new file mode 100644 index 0000000000..d779c76103 --- /dev/null +++ b/hasura.planx.uk/migrations/1723816384301_update_submissions_log_view_format_idox_destination/up.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE VIEW "public"."submission_services_log" AS + WITH payments AS ( + SELECT ps.session_id, + ps.payment_id AS event_id, + 'Pay'::text AS event_type, + initcap(ps.status) AS status, + jsonb_build_object('status', ps.status, 'description', pse.comment, 'govuk_pay_reference', ps.payment_id) AS response, + ps.created_at, + false AS retry + FROM (payment_status ps + LEFT JOIN payment_status_enum pse ON ((pse.value = ps.status))) + WHERE ((ps.status <> 'created'::text) AND (ps.created_at >= '2024-01-01 00:00:00+00'::timestamp with time zone)) + ), retries AS ( + SELECT hdb_scheduled_event_invocation_logs.id + FROM hdb_catalog.hdb_scheduled_event_invocation_logs + WHERE ((hdb_scheduled_event_invocation_logs.event_id, hdb_scheduled_event_invocation_logs.created_at) IN ( SELECT seil.event_id, + max(seil.created_at) AS max + FROM (hdb_catalog.hdb_scheduled_event_invocation_logs seil + LEFT JOIN hdb_catalog.hdb_scheduled_events se ON ((se.id = seil.event_id))) + WHERE (se.tries > 1) + GROUP BY seil.event_id)) + ), submissions AS ( + SELECT ((((seil.request -> 'payload'::text) -> 'payload'::text) ->> 'sessionId'::text))::uuid AS session_id, + se.id AS event_id, + CASE + WHEN ((se.webhook_conf)::text ~~ '%bops%'::text) THEN 'Submit to BOPS'::text + WHEN ((se.webhook_conf)::text ~~ '%uniform%'::text) THEN 'Submit to Uniform'::text + WHEN ((se.webhook_conf)::text ~~ '%email-submission%'::text) THEN 'Send to email'::text + WHEN ((se.webhook_conf)::text ~~ '%upload-submission%'::text) THEN 'Upload to AWS S3'::text + WHEN ((se.webhook_conf)::text ~~ '%idox%'::text) THEN 'Submit to Idox Nexus'::text + ELSE (se.webhook_conf)::text + END AS event_type, + CASE + WHEN (seil.status = 200) THEN 'Success'::text + ELSE format('Failed (%s)'::text, seil.status) + END AS status, + (seil.response)::jsonb AS response, + seil.created_at, + (EXISTS ( SELECT 1 + FROM retries r + WHERE (r.id = seil.id))) AS retry + FROM (hdb_catalog.hdb_scheduled_events se + LEFT JOIN hdb_catalog.hdb_scheduled_event_invocation_logs seil ON ((seil.event_id = se.id))) + WHERE (((se.webhook_conf)::text !~~ '%email/%'::text) AND (seil.created_at >= '2024-01-01 00:00:00+00'::timestamp with time zone)) + ), all_events AS ( + SELECT payments.session_id, + payments.event_id, + payments.event_type, + payments.status, + payments.response, + payments.created_at, + payments.retry + FROM payments + UNION ALL + SELECT submissions.session_id, + submissions.event_id, + submissions.event_type, + submissions.status, + submissions.response, + submissions.created_at, + submissions.retry + FROM submissions + ) + SELECT ls.flow_id, + ae.session_id, + ae.event_id, + ae.event_type, + ae.status, + ae.response, + ae.created_at, + ae.retry + FROM (all_events ae + LEFT JOIN lowcal_sessions ls ON ((ls.id = ae.session_id))) + WHERE (ls.flow_id IS NOT NULL) + ORDER BY ae.created_at DESC; diff --git a/infrastructure/application/Pulumi.staging.yaml b/infrastructure/application/Pulumi.staging.yaml index 7927a328cc..1e82aeb6e0 100644 --- a/infrastructure/application/Pulumi.staging.yaml +++ b/infrastructure/application/Pulumi.staging.yaml @@ -26,6 +26,10 @@ config: application:hasura-memory: "2048" application:hasura-planx-api-key: secure: AAABANHLs3ItPxkteh0chwMP2bKuHO3ovuRLi4FsIrCqerzXVIaTLFDqNR+4KBTeMPz4cnF5tCTwsrJv9GruZdXU+lg= + application:idox-nexus-client: + secure: AAABABprDQomVM9wJQkTMTVtUKvj9lVVVJLdpEBR5p3ibZYvSMedTOb2jztPa0vm6UCH2hilyOV2fsd+akYd3sP8Up5G26mkEKSLSSN4Nc9fu/Hi3Apn1rXHnw== + application:idox-nexus-submission-url: https://dev.identity.idoxgroup.com/agw/submission-api + application:idox-nexus-token-url: https://dev.identity.idoxgroup.com/uaa/oauth/token application:jwt-secret: secure: AAABACbmLC4176IBxX5iL64/nycSXEsCYSQ0hTb7t2OCVlWUc627Vr/EpBhcqPrw9q+0z8UOvRJG5/c/DflZxfPxyJRUVNu+ application:metabase-encryption-secret-key: diff --git a/infrastructure/application/index.ts b/infrastructure/application/index.ts index a5817ffe67..b3162f1724 100644 --- a/infrastructure/application/index.ts +++ b/infrastructure/application/index.ts @@ -409,6 +409,18 @@ export = async () => { name: "ENCRYPTION_KEY", value: config.requireSecret("encryption-key"), }, + { + name: "IDOX_NEXUS_CLIENT", + value: config.requireSecret("idox-nexus-client"), + }, + { + name: "IDOX_NEXUS_TOKEN_URL", + value: config.requireSecret("idox-nexus-token-url"), + }, + { + name: "IDOX_NEXUS_SUBMISSION_URL", + value: config.requireSecret("idox-nexus-submission-url"), + }, generateCORSAllowList(CUSTOM_DOMAINS, DOMAIN), ...generateTeamSecrets(config, env), ],