diff --git a/.env.example b/.env.example index e226500261..fbdd5ecd11 100644 --- a/.env.example +++ b/.env.example @@ -22,7 +22,6 @@ MICROSOFT_CLIENT_SECRET=👻 # AWS credentials for uploading user files from local and pull request environments to a staging S3 bucket AWS_S3_REGION=eu-west-2 -AWS_S3_ACL=public-read AWS_S3_BUCKET=👻 AWS_ACCESS_KEY=👻 AWS_SECRET_KEY=👻 diff --git a/api.planx.uk/modules/file/service/deleteFile.ts b/api.planx.uk/modules/file/service/deleteFile.ts index 8ea143fbca..87557b31ea 100644 --- a/api.planx.uk/modules/file/service/deleteFile.ts +++ b/api.planx.uk/modules/file/service/deleteFile.ts @@ -1,26 +1,26 @@ import type { DeleteObjectsCommandInput } from "@aws-sdk/client-s3"; -import type { DeleteObjectsRequest } from "aws-sdk/clients/s3.js"; import { getS3KeyFromURL, s3Factory } from "./utils.js"; export const deleteFilesByURL = async ( fileURLs: string[], ): Promise => { const keys = fileURLs.map(getS3KeyFromURL); - return await deleteFilesByKey(keys); + const result = await deleteFilesByKey(keys); + return result; }; export const deleteFilesByKey = async (keys: string[]): Promise => { const s3 = s3Factory(); - const params = getDeleteFilesParams(keys) as DeleteObjectsCommandInput; + const params = getDeleteFilesParams(keys); try { - s3.deleteObjects(params); + await s3.deleteObjects(params); return keys; } catch (error) { throw Error(`Failed to delete S3 files: ${error}`); } }; -const getDeleteFilesParams = (keys: string[]): DeleteObjectsRequest => ({ +const getDeleteFilesParams = (keys: string[]): DeleteObjectsCommandInput => ({ Bucket: process.env.AWS_S3_BUCKET!, Delete: { Objects: keys.map((key) => ({ Key: key })), diff --git a/api.planx.uk/modules/file/service/getFile.ts b/api.planx.uk/modules/file/service/getFile.ts index dae32f703f..db40874ba1 100644 --- a/api.planx.uk/modules/file/service/getFile.ts +++ b/api.planx.uk/modules/file/service/getFile.ts @@ -4,14 +4,20 @@ import { s3Factory } from "./utils.js"; export const getFileFromS3 = async (fileId: string) => { const s3 = s3Factory(); - const params = { + const params: PutObjectCommandInput = { Key: fileId, - } as PutObjectCommandInput; + Bucket: process.env.AWS_S3_BUCKET, + }; const file = await s3.getObject(params); + // TODO: test this + if (!file.Body) throw Error(`Missing body from S3 file ${fileId}`); + + const body = Buffer.from(await file.Body.transformToByteArray()); + return { - body: file.Body, + body, isPrivate: file.Metadata?.is_private === "true", headers: { "Content-Type": file.ContentType, diff --git a/api.planx.uk/modules/file/service/uploadFile.ts b/api.planx.uk/modules/file/service/uploadFile.ts index caa8a004be..5d4974b0a0 100644 --- a/api.planx.uk/modules/file/service/uploadFile.ts +++ b/api.planx.uk/modules/file/service/uploadFile.ts @@ -19,7 +19,7 @@ export const uploadPublicFile = async ( const { params, key, fileType } = generateFileParams(file, filename, filekey); await s3.putObject(params); - const fileUrl = buildFileUrl(key, "public"); + const fileUrl = await buildFileUrl(key, "public"); return { fileType, @@ -41,7 +41,7 @@ export const uploadPrivateFile = async ( }; await s3.putObject(params); - const fileUrl = buildFileUrl(key, "private"); + const fileUrl = await buildFileUrl(key, "private"); return { fileType, @@ -75,13 +75,14 @@ export function generateFileParams( const fileType = mime.getType(filename); const key = `${filekey || nanoid()}/${filename}`; - const params = { - ACL: process.env.AWS_S3_ACL, + const params: PutObjectCommandInput = { + ACL: "public-read", + Bucket: process.env.AWS_S3_BUCKET, Key: key, Body: file?.buffer || JSON.stringify(file), ContentDisposition: `inline;filename="${filename}"`, ContentType: file?.mimetype || "application/json", - } as PutObjectCommandInput; + }; return { fileType, diff --git a/api.planx.uk/modules/file/service/utils.ts b/api.planx.uk/modules/file/service/utils.ts index c1846eee95..0f95e170e5 100644 --- a/api.planx.uk/modules/file/service/utils.ts +++ b/api.planx.uk/modules/file/service/utils.ts @@ -3,15 +3,11 @@ import { isLiveEnv } from "../../../helpers.js"; export function s3Factory() { return new S3({ - // The key params is no longer supported in v3, and can be removed. - // @deprecated The object needs to be passed to individual operations where it's intended. - params: { Bucket: process.env.AWS_S3_BUCKET }, - - region: process.env.AWS_S3_REGION, + region: process.env.AWS_S3_REGION!, credentials: { - accessKeyId: process.env.AWS_ACCESS_KEY, - secretAccessKey: process.env.AWS_SECRET_KEY, + accessKeyId: process.env.AWS_ACCESS_KEY!, + secretAccessKey: process.env.AWS_SECRET_KEY!, }, ...useMinio(), diff --git a/api.planx.uk/modules/send/utils/exportZip.ts b/api.planx.uk/modules/send/utils/exportZip.ts index e67a32ce7c..da068d0aef 100644 --- a/api.planx.uk/modules/send/utils/exportZip.ts +++ b/api.planx.uk/modules/send/utils/exportZip.ts @@ -245,7 +245,7 @@ export class ExportZip { const { body } = await getFileFromS3(decodedS3Key); if (!body) throw new Error("file not found"); - this.zip.addFile(name, body as Buffer); + this.zip.addFile(name, body); } toBuffer(): Buffer { diff --git a/docker-compose.yml b/docker-compose.yml index 8569cc75dd..7243ae7c86 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,7 +117,6 @@ services: API_URL_EXT: ${API_URL_EXT} APP_ENVIRONMENT: ${APP_ENVIRONMENT} AWS_ACCESS_KEY: ${AWS_ACCESS_KEY} - AWS_S3_ACL: ${AWS_S3_ACL} AWS_S3_BUCKET: ${AWS_S3_BUCKET} AWS_S3_REGION: ${AWS_S3_REGION} AWS_SECRET_KEY: ${AWS_SECRET_KEY} diff --git a/infrastructure/application/index.ts b/infrastructure/application/index.ts index 5e599d415d..764e3913ca 100644 --- a/infrastructure/application/index.ts +++ b/infrastructure/application/index.ts @@ -339,7 +339,6 @@ export = async () => { name: "AWS_S3_BUCKET", value: pulumi.interpolate`${apiBucket.bucket}`, }, - { name: "AWS_S3_ACL", value: "public-read" }, { name: "FILE_API_KEY", value: config.requireSecret("file-api-key"),