Tweak upload js function to upload in batches

This commit is contained in:
amcmanu3 2024-08-06 17:48:17 -04:00
parent fc6f85a16b
commit 6ef93908ae

View File

@ -1,67 +1,9 @@
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) { function delay(ms) {
if (file == null) { return new Promise(resolve => setTimeout(resolve, ms));
try { }
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.")
return;
}
} async function uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i, file_num, updateProgressBar) {
const fileId = uuidv4(); return fetch(url, {
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = ``
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`
}
console.log(url)
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadPromises = [];
let errors = []; // Array to store errors
try {
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
},
body: null,
});
if (!res.ok) {
let errorResponse = await res.json();
throw new Error(JSON.stringify(errorResponse));
}
let responseData = await res.json();
if (responseData.status !== "ok") {
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
const uploadPromise = fetch(url, {
method: 'POST', method: 'POST',
body: chunk, body: chunk,
headers: { headers: {
@ -92,22 +34,98 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
// Update progress bar // Update progress bar
const progress = (i + 1) / totalChunks * 100; const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), type, file_num); updateProgressBar(Math.round(progress), type, file_num);
}) });
}
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
if (file == null) {
try {
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.");
return;
}
}
const fileId = uuidv4();
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = '';
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`;
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`;
}
console.log(url);
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const errors = [];
const batchSize = 30; // Number of chunks to upload in each batch
try {
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
},
body: null,
});
if (!res.ok) {
let errorResponse = await res.json();
throw new Error(JSON.stringify(errorResponse));
}
let responseData = await res.json();
if (responseData.status !== "ok") {
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i += batchSize) {
const batchPromises = [];
for (let j = 0; j < batchSize && (i + j) < totalChunks; j++) {
const start = (i + j) * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
const uploadPromise = uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i + j, file_num, updateProgressBar)
.catch(error => { .catch(error => {
errors.push(error); // Store the error errors.push(error); // Store the error
}); });
uploadPromises.push(uploadPromise); batchPromises.push(uploadPromise);
} }
await Promise.all(uploadPromises); // Wait for the current batch to complete before proceeding to the next batch
await Promise.all(batchPromises);
// Optional delay between batches to account for rate limiting
await delay(2000); // Adjust the delay time (in milliseconds) as needed
}
} catch (error) { } catch (error) {
errors.push(error); // Store the error errors.push(error); // Store the error
} }
if (errors.length > 0) { if (errors.length > 0) {
const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>'); const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>');
console.log(errorMessage) console.log(errorMessage);
bootbox.alert({ bootbox.alert({
title: 'Error', title: 'Error',
message: errorMessage, message: errorMessage,