Merge branch 'bugfix/upload-flood' into 'dev'

Upload chunks in batches

See merge request crafty-controller/crafty-4!788
This commit is contained in:
Iain Powrie 2024-08-07 00:26:26 +00:00
commit 589b3b5cc8
2 changed files with 72 additions and 53 deletions

View File

@ -1,7 +1,8 @@
# Changelog
## --- [4.4.2] - 2024/TBD
## --- [4.4.2] - 2024/08/07
### Bug fixes
- Migrations | Fix exception message on file not found for backups migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/789))
- UploadAPI | Upload chunks in batches to avoid overloading browser cache ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/788))
<br><br>
## --- [4.4.1] - 2024/08/06

View File

@ -1,67 +1,9 @@
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
if (file == null) {
try {
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.")
return;
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
const fileId = uuidv4();
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = ``
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`
}
console.log(url)
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadPromises = [];
let errors = []; // Array to store errors
try {
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
},
body: null,
});
if (!res.ok) {
let errorResponse = await res.json();
throw new Error(JSON.stringify(errorResponse));
}
let responseData = await res.json();
if (responseData.status !== "ok") {
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
const uploadPromise = fetch(url, {
async function uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i, file_num, updateProgressBar) {
return fetch(url, {
method: 'POST',
body: chunk,
headers: {
@ -92,22 +34,98 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), type, file_num);
})
});
}
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
if (file == null) {
try {
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.");
return;
}
}
const fileId = uuidv4();
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = '';
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`;
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`;
}
console.log(url);
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const errors = [];
const batchSize = 30; // Number of chunks to upload in each batch
try {
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
},
body: null,
});
if (!res.ok) {
let errorResponse = await res.json();
throw new Error(JSON.stringify(errorResponse));
}
let responseData = await res.json();
if (responseData.status !== "ok") {
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i += batchSize) {
const batchPromises = [];
for (let j = 0; j < batchSize && (i + j) < totalChunks; j++) {
const start = (i + j) * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
const uploadPromise = uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i + j, file_num, updateProgressBar)
.catch(error => {
errors.push(error); // Store the error
});
uploadPromises.push(uploadPromise);
batchPromises.push(uploadPromise);
}
await Promise.all(uploadPromises);
// Wait for the current batch to complete before proceeding to the next batch
await Promise.all(batchPromises);
// Optional delay between batches to account for rate limiting
await delay(2000); // Adjust the delay time (in milliseconds) as needed
}
} catch (error) {
errors.push(error); // Store the error
}
if (errors.length > 0) {
const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>');
console.log(errorMessage)
console.log(errorMessage);
bootbox.alert({
title: 'Error',
message: errorMessage,