Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Production deploy #3209

Merged
merged 15 commits into from
May 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pizza-teardown.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
action: destroy
api_key: ${{ secrets.VULTR_API_KEY }}
domain: ${{ env.DOMAIN }}
os_type: alpine
os_type: ubuntu
plan: vc2-1c-1gb
pull_request_id: ${{ env.PULLREQUEST_ID }}
region: lhr
Expand Down
26 changes: 7 additions & 19 deletions .github/workflows/pull-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ jobs:
working-directory: ${{ env.EDITOR_DIRECTORY }}
- run: pnpm build
working-directory: ${{ env.EDITOR_DIRECTORY }}
- run: pnpm test
- run: pnpm test:silent
working-directory: ${{ env.EDITOR_DIRECTORY }}

build_react_app:
Expand Down Expand Up @@ -307,7 +307,7 @@ jobs:
action: create
api_key: ${{ secrets.VULTR_API_KEY }}
domain: ${{ env.DOMAIN }}
os_type: alpine
os_type: ubuntu
plan: vc2-1c-1gb
pull_request_id: ${{ env.PULLREQUEST_ID }}
region: lhr
Expand All @@ -324,19 +324,13 @@ jobs:
password: ${{ steps.create.outputs.default_password }}
command_timeout: 20m
script: |
apk update
apk add docker
addgroup root docker
rc-update add docker default
service docker start
apk add docker-cli-compose

apk add git
apt-get update -y

git clone "${{ secrets.AUTHENTICATED_REPO_URL }}"
cd planx-new
git fetch origin "pull/${{ env.PULLREQUEST_ID }}/head" && git checkout FETCH_HEAD

apk add aws-cli
apt-get install awscli -y
export AWS_ACCESS_KEY_ID=${{ secrets.PIZZA_AWS_ACCESS_KEY_ID }}
export AWS_SECRET_ACCESS_KEY=${{ secrets.PIZZA_AWS_SECRET_ACCESS_KEY }}
export AWS_REGION=eu-west-2
Expand All @@ -358,21 +352,15 @@ jobs:
username: root
password: ${{ secrets.SSH_PASSWORD }}
command_timeout: 10m
# TODO: some of below script might be superfluous for server update (rather than create)
script: |
apk update
apk add docker
addgroup root docker
rc-update add docker default
service docker start
apk add docker-cli-compose
apt-get update -y

git clone "${{ secrets.AUTHENTICATED_REPO_URL }}"
cd planx-new
git add . && git stash
git fetch origin "pull/${{ env.PULLREQUEST_ID }}/head" && git checkout FETCH_HEAD

apk add aws-cli
apt-get install awscli -y
export AWS_ACCESS_KEY_ID=${{ secrets.PIZZA_AWS_ACCESS_KEY_ID }}
export AWS_SECRET_ACCESS_KEY=${{ secrets.PIZZA_AWS_SECRET_ACCESS_KEY }}
export AWS_REGION=eu-west-2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ jobs:
working-directory: ${{ env.EDITOR_DIRECTORY }}
- run: pnpm build
working-directory: ${{ env.EDITOR_DIRECTORY }}
- run: pnpm test
- run: pnpm test:silent
working-directory: ${{ env.EDITOR_DIRECTORY }}

end_to_end_tests:
Expand Down
6 changes: 3 additions & 3 deletions api.planx.uk/modules/gis/service/digitalLand.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
} from "@opensystemslab/planx-core/types";
import { gql } from "graphql-request";
import fetch from "isomorphic-fetch";
import { addDesignatedVariable, omitGeometry } from "./helpers";
import { addDesignatedVariable } from "./helpers";
import { baseSchema } from "./local_authorities/metadata/base";
import { $api } from "../../../client";

Expand Down Expand Up @@ -77,8 +77,8 @@
options,
)}${datasets}`;
const res = await fetch(url)
.then((response: { json: () => any }) => response.json())

Check warning on line 80 in api.planx.uk/modules/gis/service/digitalLand.ts

View workflow job for this annotation

GitHub Actions / Run API Tests

Unexpected any. Specify a different type
.catch((error: any) => console.log(error));

Check warning on line 81 in api.planx.uk/modules/gis/service/digitalLand.ts

View workflow job for this annotation

GitHub Actions / Run API Tests

Unexpected any. Specify a different type

// if analytics are "on", store an audit record of the raw response
if (extras?.analytics !== "false") {
Expand Down Expand Up @@ -112,21 +112,21 @@
// check for & add any 'positive' constraints to the formattedResult
let formattedResult: Record<string, Constraint> = {};
if (res && res.count > 0 && res.entities) {
res.entities.forEach((entity: { dataset: any }) => {

Check warning on line 115 in api.planx.uk/modules/gis/service/digitalLand.ts

View workflow job for this annotation

GitHub Actions / Run API Tests

Unexpected any. Specify a different type
// get the planx variable that corresponds to this entity's 'dataset', should never be null because our initial request is filtered on 'dataset'
const key = Object.keys(baseSchema).find((key) =>
baseSchema[key]["digital-land-datasets"]?.includes(entity.dataset),
);
// because there can be many digital land datasets per planx variable, check if this key is already in our result
if (key && Object.keys(formattedResult).includes(key)) {
formattedResult[key]["data"]?.push(omitGeometry(entity));
formattedResult[key]["data"]?.push(entity);
} else {
if (key) {
formattedResult[key] = {
fn: key,
value: true,
text: baseSchema[key].pos,
data: [omitGeometry(entity)],
data: [entity],
category: baseSchema[key].category,
};
}
Expand Down Expand Up @@ -160,7 +160,7 @@
formattedResult["designated.nationalPark"] &&
formattedResult["designated.nationalPark"].value
) {
formattedResult["designated.nationalPark"]?.data?.forEach((entity: any) => {

Check warning on line 163 in api.planx.uk/modules/gis/service/digitalLand.ts

View workflow job for this annotation

GitHub Actions / Run API Tests

Unexpected any. Specify a different type
if (
baseSchema[broads]["digital-land-entities"]?.includes(entity.entity)
) {
Expand Down
2 changes: 1 addition & 1 deletion api.planx.uk/modules/pay/controller.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import assert from "assert";
import { Request } from "express";
import { responseInterceptor } from "http-proxy-middleware";
import { logPaymentStatus } from "../send/utils/helpers";
import { logPaymentStatus } from "./helpers";
import { usePayProxy } from "./proxy";
import { $api } from "../../client";
import { ServerError } from "../../errors";
Expand Down
112 changes: 112 additions & 0 deletions api.planx.uk/modules/pay/helpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import { gql } from "graphql-request";
import airbrake from "../../airbrake";
import { $api } from "../../client";

export async function logPaymentStatus({
sessionId,
flowId,
teamSlug,
govUkResponse,
}: {
sessionId: string | undefined;
flowId: string | undefined;
teamSlug: string;
govUkResponse: {
amount: number;
payment_id: string;
state: {
status: string;
};
};
}): Promise<void> {
if (!flowId || !sessionId) {
reportError({
error: "Could not log the payment status due to missing context value(s)",
context: { sessionId, flowId, teamSlug },
});
} else {
// log payment status response
try {
await insertPaymentStatus({
sessionId,
flowId,
teamSlug,
paymentId: govUkResponse.payment_id,
status: govUkResponse.state?.status || "unknown",
amount: govUkResponse.amount,
});
} catch (e) {
reportError({
error: `Failed to insert a payment status: ${e}`,
context: { govUkResponse },
});
}
}
}

// TODO: this would ideally live in planx-client
async function insertPaymentStatus({
flowId,
sessionId,
paymentId,
teamSlug,
status,
amount,
}: {
flowId: string;
sessionId: string;
paymentId: string;
teamSlug: string;
status: string;
amount: number;
}): Promise<void> {
const _response = await $api.client.request(
gql`
mutation InsertPaymentStatus(
$flowId: uuid!
$sessionId: uuid!
$paymentId: String!
$teamSlug: String!
$status: payment_status_enum_enum
$amount: Int!
) {
insert_payment_status(
objects: {
flow_id: $flowId
session_id: $sessionId
payment_id: $paymentId
team_slug: $teamSlug
status: $status
amount: $amount
}
) {
affected_rows
}
}
`,
{
flowId,
sessionId,
teamSlug,
paymentId,
status,
amount,
},
);
}

// tmp explicit error handling
export function reportError(report: { error: any; context: object }) {
if (airbrake) {
airbrake.notify(report);
return;
}
log(report);
}

// tmp logger
function log(event: object | string) {
if (!process.env.SUPPRESS_LOGS) {
console.log(event);
}
}
14 changes: 12 additions & 2 deletions api.planx.uk/modules/send/s3/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { $api } from "../../../client";
import { uploadPrivateFile } from "../../file/service/uploadFile";
import { markSessionAsSubmitted } from "../../saveAndReturn/service/utils";
import axios from "axios";
import { isApplicationTypeSupported } from "../utils/helpers";
import { Passport } from "../../../types";

export async function sendToS3(
req: Request,
Expand Down Expand Up @@ -40,8 +42,14 @@ export async function sendToS3(
});
}

// Generate the ODP Schema JSON
const exportData = await $api.export.digitalPlanningDataPayload(sessionId);
const session = await $api.session.find(sessionId);
const passport = session?.data?.passport as Passport;

// Generate the ODP Schema JSON, skipping validation if not a supported application type
const doValidation = isApplicationTypeSupported(passport);
const exportData = doValidation
? await $api.export.digitalPlanningDataPayload(sessionId)
: await $api.export.digitalPlanningDataPayload(sessionId, true);

// Create and upload the data as an S3 file
const { fileUrl } = await uploadPrivateFile(
Expand All @@ -63,6 +71,7 @@ export async function sendToS3(
message: "New submission from PlanX",
environment: env,
file: fileUrl,
payload: doValidation ? "Validated ODP Schema" : "Discretionary",
},
})
.then((res) => {
Expand All @@ -80,6 +89,7 @@ export async function sendToS3(

return res.status(200).send({
message: `Successfully uploaded submission to S3: ${fileUrl}`,
payload: doValidation ? "Validated ODP Schema" : "Discretionary",
webhookResponse: webhookResponseStatus,
});
} catch (error) {
Expand Down
10 changes: 5 additions & 5 deletions api.planx.uk/modules/send/utils/exportZip.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ describe("buildSubmissionExportZip", () => {
);
});

test("ODP schema json is excluded if unsupported application type", async () => {
test("ODP schema json is included, but not validated, if unsupported application type", async () => {
// set-up mock session passport overwriting "application.type"
const lowcalSessionUnsupportedAppType: Partial<LowCalSession> = {
...mockLowcalSession,
Expand All @@ -243,7 +243,7 @@ describe("buildSubmissionExportZip", () => {
passport: {
data: {
...mockLowcalSession.data!.passport.data,
"application.type": ["listedBuildingConsent"],
"application.type": ["reportAPlanningBreach"],
},
},
},
Expand All @@ -255,13 +255,13 @@ describe("buildSubmissionExportZip", () => {
includeDigitalPlanningJSON: true,
});

expect(mockAddFile).not.toHaveBeenCalledWith(
expect(mockAddFile).toHaveBeenCalledWith(
"application.json",
expect.anything(),
);
});

test("ODP schema json is excluded if no application type", async () => {
test("ODP schema json is included, but not validated, if no application type", async () => {
// set-up mock session passport overwriting "application.type"
const lowcalSessionUnsupportedAppType: Partial<LowCalSession> = {
...mockLowcalSession,
Expand All @@ -284,7 +284,7 @@ describe("buildSubmissionExportZip", () => {
includeDigitalPlanningJSON: true,
});

expect(mockAddFile).not.toHaveBeenCalledWith(
expect(mockAddFile).toHaveBeenCalledWith(
"application.json",
expect.anything(),
);
Expand Down
20 changes: 9 additions & 11 deletions api.planx.uk/modules/send/utils/exportZip.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { Passport } from "@opensystemslab/planx-core";
import type { Passport as IPassport } from "../../../types";
import type { Stream } from "node:stream";
import type { PlanXExportData } from "@opensystemslab/planx-core/types";
import { isApplicationTypeSupported } from "./helpers";

export async function buildSubmissionExportZip({
sessionId,
Expand All @@ -30,7 +31,7 @@ export async function buildSubmissionExportZip({
const sessionData = await $api.session.find(sessionId);
if (!sessionData) {
throw new Error(
`session ${sessionId} not found so could not create Uniform submission zip`,
`session ${sessionId} not found so could not create submission zip`,
);
}
const passport = sessionData.data?.passport as IPassport;
Expand All @@ -50,21 +51,18 @@ export async function buildSubmissionExportZip({
});
} catch (error) {
throw new Error(
`Failed to generate OneApp XML for ${sessionId}. Error - ${error}`,
`Failed to generate OneApp XML for ${sessionId} zip. Error - ${error}`,
);
}
}

// add ODP Schema JSON to the zip for supported application types
const supportedApplicationPrefixes = ["ldc", "pa", "pp"];
const applicationType = passport.data?.["application.type"]?.[0];
if (
includeDigitalPlanningJSON &&
applicationType &&
supportedApplicationPrefixes.includes(applicationType.split(".")?.[0])
) {
// add ODP Schema JSON to the zip, skipping validation if an unsupported application type
if (includeDigitalPlanningJSON) {
try {
const schema = await $api.export.digitalPlanningDataPayload(sessionId);
const doValidation = isApplicationTypeSupported(passport);
const schema = doValidation
? await $api.export.digitalPlanningDataPayload(sessionId)
: await $api.export.digitalPlanningDataPayload(sessionId, true);
const schemaBuff = Buffer.from(JSON.stringify(schema, null, 2));
zip.addFile({
name: "application.json",
Expand Down
Loading
Loading