Skip to content

Commit

Permalink
chore: First pass at updating types
Browse files Browse the repository at this point in the history
  • Loading branch information
DafyddLlyr committed Nov 28, 2024
1 parent 3fa7a5a commit b51fd63
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 24 deletions.
1 change: 0 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ MICROSOFT_CLIENT_SECRET=👻

# AWS credentials for uploading user files from local and pull request environments to a staging S3 bucket
AWS_S3_REGION=eu-west-2
AWS_S3_ACL=public-read
AWS_S3_BUCKET=👻
AWS_ACCESS_KEY=👻
AWS_SECRET_KEY=👻
Expand Down
10 changes: 5 additions & 5 deletions api.planx.uk/modules/file/service/deleteFile.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
import type { DeleteObjectsCommandInput } from "@aws-sdk/client-s3";
import type { DeleteObjectsRequest } from "aws-sdk/clients/s3.js";
import { getS3KeyFromURL, s3Factory } from "./utils.js";

export const deleteFilesByURL = async (
fileURLs: string[],
): Promise<string[]> => {
const keys = fileURLs.map(getS3KeyFromURL);
return await deleteFilesByKey(keys);
const result = await deleteFilesByKey(keys);
return result;
};

export const deleteFilesByKey = async (keys: string[]): Promise<string[]> => {
const s3 = s3Factory();
const params = getDeleteFilesParams(keys) as DeleteObjectsCommandInput;
const params = getDeleteFilesParams(keys);
try {
s3.deleteObjects(params);
await s3.deleteObjects(params);
return keys;
} catch (error) {
throw Error(`Failed to delete S3 files: ${error}`);
}
};

const getDeleteFilesParams = (keys: string[]): DeleteObjectsRequest => ({
const getDeleteFilesParams = (keys: string[]): DeleteObjectsCommandInput => ({
Bucket: process.env.AWS_S3_BUCKET!,
Delete: {
Objects: keys.map((key) => ({ Key: key })),
Expand Down
12 changes: 9 additions & 3 deletions api.planx.uk/modules/file/service/getFile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,20 @@ import { s3Factory } from "./utils.js";
export const getFileFromS3 = async (fileId: string) => {
const s3 = s3Factory();

const params = {
const params: PutObjectCommandInput = {
Key: fileId,
} as PutObjectCommandInput;
Bucket: process.env.AWS_S3_BUCKET,
};

const file = await s3.getObject(params);

// TODO: test this
if (!file.Body) throw Error(`Missing body from S3 file ${fileId}`);

const body = Buffer.from(await file.Body.transformToByteArray());

return {
body: file.Body,
body,
isPrivate: file.Metadata?.is_private === "true",
headers: {
"Content-Type": file.ContentType,
Expand Down
11 changes: 6 additions & 5 deletions api.planx.uk/modules/file/service/uploadFile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export const uploadPublicFile = async (
const { params, key, fileType } = generateFileParams(file, filename, filekey);

await s3.putObject(params);
const fileUrl = buildFileUrl(key, "public");
const fileUrl = await buildFileUrl(key, "public");

return {
fileType,
Expand All @@ -41,7 +41,7 @@ export const uploadPrivateFile = async (
};

await s3.putObject(params);
const fileUrl = buildFileUrl(key, "private");
const fileUrl = await buildFileUrl(key, "private");

return {
fileType,
Expand Down Expand Up @@ -75,13 +75,14 @@ export function generateFileParams(
const fileType = mime.getType(filename);
const key = `${filekey || nanoid()}/${filename}`;

const params = {
ACL: process.env.AWS_S3_ACL,
const params: PutObjectCommandInput = {
ACL: "public-read",
Bucket: process.env.AWS_S3_BUCKET,
Key: key,
Body: file?.buffer || JSON.stringify(file),
ContentDisposition: `inline;filename="${filename}"`,
ContentType: file?.mimetype || "application/json",
} as PutObjectCommandInput;
};

return {
fileType,
Expand Down
10 changes: 3 additions & 7 deletions api.planx.uk/modules/file/service/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,11 @@ import { isLiveEnv } from "../../../helpers.js";

export function s3Factory() {
return new S3({
// The key params is no longer supported in v3, and can be removed.
// @deprecated The object needs to be passed to individual operations where it's intended.
params: { Bucket: process.env.AWS_S3_BUCKET },

region: process.env.AWS_S3_REGION,
region: process.env.AWS_S3_REGION!,

credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY!,
secretAccessKey: process.env.AWS_SECRET_KEY!,
},

...useMinio(),
Expand Down
2 changes: 1 addition & 1 deletion api.planx.uk/modules/send/utils/exportZip.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ export class ExportZip {
const { body } = await getFileFromS3(decodedS3Key);
if (!body) throw new Error("file not found");

this.zip.addFile(name, body as Buffer);
this.zip.addFile(name, body);
}

toBuffer(): Buffer {
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ services:
API_URL_EXT: ${API_URL_EXT}
APP_ENVIRONMENT: ${APP_ENVIRONMENT}
AWS_ACCESS_KEY: ${AWS_ACCESS_KEY}
AWS_S3_ACL: ${AWS_S3_ACL}
AWS_S3_BUCKET: ${AWS_S3_BUCKET}
AWS_S3_REGION: ${AWS_S3_REGION}
AWS_SECRET_KEY: ${AWS_SECRET_KEY}
Expand Down
1 change: 0 additions & 1 deletion infrastructure/application/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,6 @@ export = async () => {
name: "AWS_S3_BUCKET",
value: pulumi.interpolate`${apiBucket.bucket}`,
},
{ name: "AWS_S3_ACL", value: "public-read" },
{
name: "FILE_API_KEY",
value: config.requireSecret("file-api-key"),
Expand Down

0 comments on commit b51fd63

Please sign in to comment.