Skip to content

Commit

Permalink
PS-586 fix upload resume
Browse files Browse the repository at this point in the history
  • Loading branch information
4rthem committed Nov 6, 2023
1 parent 3adb52b commit 590d538
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 80 deletions.
147 changes: 69 additions & 78 deletions uploader/client/src/multiPartUpload.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,100 +6,91 @@ const fileChunkSize = 5242880 // 5242880 is the minimum allowed by AWS S3;
export async function uploadMultipartFile(targetId, userId, file, onProgress) {
const fileUID = getUniqueFileId(file.file, fileChunkSize);

try {
const resumableUpload = uploadStateStorage.getUpload(userId, fileUID);
const uploadParts = [];
const resumableUpload = uploadStateStorage.getUpload(userId, fileUID);
const uploadParts = [];

let resumeChunkIndex = 1,
uploadId,
path;

if (resumableUpload) {
uploadId = resumableUpload.u;
resumeChunkIndex = resumableUpload.c.length + 1;
for (let i = 0; i < resumableUpload.c.length; i++) {
uploadParts.push({
ETag: resumableUpload.c[i],
PartNumber: i + 1,
});
}
} else {
file.abortController = new AbortController();

const res = await apiClient.post(`/uploads`, {
filename: file.file.name,
type: file.file.type,
size: file.file.size,
}, {
signal: file.abortController.signal,
});
console.debug('res', res);
uploadId = res.data.id;
path = res.data.path;
uploadStateStorage.initUpload(userId, fileUID, uploadId, path);
}
let resumeChunkIndex = 1,
uploadId;

const fileSize = file.file.size;
const numChunks = Math.floor(fileSize / fileChunkSize) + 1;

for (let index = resumeChunkIndex; index < numChunks + 1; index++) {
const start = (index - 1) * fileChunkSize;
const end = (index) * fileChunkSize;

file.abortController = new AbortController();

const getUploadUrlResp = await apiClient.post(`/uploads/${uploadId}/part`, {
part: index,
}, {
signal: file.abortController.signal,
if (resumableUpload) {
uploadId = resumableUpload.u;
resumeChunkIndex = resumableUpload.c.length + 1;
for (let i = 0; i < resumableUpload.c.length; i++) {
uploadParts.push({
ETag: resumableUpload.c[i],
PartNumber: i + 1,
});
console.debug('getUploadUrlResp', getUploadUrlResp);
}
} else {
file.abortController = new AbortController();

const {url} = getUploadUrlResp.data;
const res = await apiClient.post(`/uploads`, {
filename: file.file.name,
type: file.file.type,
size: file.file.size,
}, {
signal: file.abortController.signal,
});
uploadId = res.data.id;
uploadStateStorage.initUpload(userId, fileUID, uploadId);
}

const blob = (index < numChunks) ? file.file.slice(start, end) : file.file.slice(start);
const fileSize = file.file.size;
const numChunks = Math.floor(fileSize / fileChunkSize) + 1;

file.abortController = new AbortController();
for (let index = resumeChunkIndex; index < numChunks + 1; index++) {
const start = (index - 1) * fileChunkSize;
const end = (index) * fileChunkSize;

const uploadResp = await apiClient.put(url, blob, {
signal: file.abortController.signal,
anonymous: true,
onUploadProgress: (e) => {
const multiPartEvent = {
...e,
loaded: e.loaded + start,
};
file.abortController = new AbortController();

onProgress(multiPartEvent);
}
});
const getUploadUrlResp = await apiClient.post(`/uploads/${uploadId}/part`, {
part: index,
}, {
signal: file.abortController.signal,
});

const eTag = uploadResp.headers.etag;
uploadParts.push({
ETag: eTag,
PartNumber: index,
});
const {url} = getUploadUrlResp.data;

uploadStateStorage.updateUpload(userId, fileUID, eTag);
}
const blob = (index < numChunks) ? file.file.slice(start, end) : file.file.slice(start);

file.abortController = new AbortController();

const finalRes = await apiClient.post(`/assets`, {
targetId,
multipart: {
uploadId,
parts: uploadParts,
}
}, {
const uploadResp = await apiClient.put(url, blob, {
signal: file.abortController.signal,
anonymous: true,
onUploadProgress: (e) => {
const multiPartEvent = {
...e,
loaded: e.loaded + start,
};

onProgress(multiPartEvent);
}
});

uploadStateStorage.removeUpload(userId, fileUID);
const eTag = uploadResp.headers.etag;
uploadParts.push({
ETag: eTag,
PartNumber: index,
});

return finalRes;
} catch (e) {
uploadStateStorage.removeUpload(userId, fileUID);
throw e;
uploadStateStorage.updateUpload(userId, fileUID, eTag);
}

file.abortController = new AbortController();

const finalRes = await apiClient.post(`/assets`, {
targetId,
multipart: {
uploadId,
parts: uploadParts,
}
}, {
signal: file.abortController.signal,
});

uploadStateStorage.removeUpload(userId, fileUID);

return finalRes;
}
4 changes: 2 additions & 2 deletions uploader/client/src/uploadStateStorage.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,20 @@ class UploadStateStorage {
return d[userId][fileUID];
}

initUpload(userId, fileUID, uploadId, path) {
initUpload(userId, fileUID, uploadId) {
const d = this.getData();

d[userId] = d[userId] || {};
d[userId][fileUID] = {
u: uploadId,
p: path,
c: [],
};

this.setData(d);
}

updateUpload(userId, fileUID, chunkETag) {
console.debug('updateUpload', userId, fileUID, chunkETag);
const d = this.getData();
d[userId][fileUID].c.push(chunkETag);
this.setData(d);
Expand Down

0 comments on commit 590d538

Please sign in to comment.