Refactor upload route

Import uploads broke
This commit is contained in:
amcmanu3 2024-05-26 23:08:43 -04:00
parent 7c8781e09e
commit 45aacb97c8
5 changed files with 362 additions and 226 deletions

View File

@ -44,8 +44,8 @@ from app.classes.web.routes.api.servers.server.files import (
ApiServersServerFilesIndexHandler,
ApiServersServerFilesCreateHandler,
ApiServersServerFilesZipHandler,
ApiServersServerFilesUploadHandler,
)
from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
from app.classes.web.routes.api.servers.server.tasks.task.children import (
ApiServersServerTasksTaskChildrenHandler,
)
@ -239,9 +239,19 @@ def api_handlers(handler_args):
ApiServersServerFilesZipHandler,
handler_args,
),
(
r"/api/v2/crafty/admin/upload/?",
ApiFilesUploadHandler,
handler_args,
),
(
r"/api/v2/servers/import/upload/?",
ApiFilesUploadHandler,
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
ApiServersServerFilesUploadHandler,
ApiFilesUploadHandler,
handler_args,
),
(

View File

@ -0,0 +1,183 @@
import os
import logging
import json
import shutil
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.shared.helpers import Helpers
from app.classes.shared.main_controller import WebSocketManager, Controller
from app.classes.web.base_api_handler import BaseApiHandler
class ApiFilesUploadHandler(BaseApiHandler):
async def post(self, server_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
upload_type = self.request.headers.get("type")
for header, value in self.request.headers.items():
print(f"{header}: {value}")
if server_id:
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(
400, {"status": "error", "error": "NOT_AUTHORIZED"}
)
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(
400, {"status": "error", "error": "NOT_AUTHORIZED"}
)
u_type = "server_upload"
elif auth_data[4]["superuser"] and not upload_type != "import":
u_type = "admin_config"
self.upload_dir = os.path.join(
self.controller.project_root,
"app/frontend/static/assets/images/auth/custom",
)
elif upload_type == "import":
if not self.controller.crafty_perms.can_create_server(
auth_data[4]["user_id"]
):
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"data": {"message": ""},
},
)
self.upload_dir = os.path.join(
self.controller.project_root, "import", "upload"
)
u_type = "server_import"
# Get the headers from the request
fileHash = self.request.headers.get("fileHash", 0)
chunkHash = self.request.headers.get("chunk-hash", 0)
self.file_id = self.request.headers.get("fileId")
self.chunked = self.request.headers.get("chunked", True)
self.filename = self.request.headers.get("filename", None)
try:
file_size = int(self.request.headers.get("fileSize", None))
total_chunks = int(self.request.headers.get("total_chunks", None))
except TypeError:
return self.finish_json(
400, {"status": "error", "error": "TYPE ERROR", "data": {}}
)
self.chunk_index = self.request.headers.get("chunkId")
if u_type == "server_upload":
self.upload_dir = self.request.headers.get("location", None)
self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
_total, _used, free = shutil.disk_usage(self.upload_dir)
# Check to see if we have enough space
if free <= file_size:
self.finish_json(
507,
{
"status": "error",
"error": "NO STORAGE SPACE",
"data": {"message": "Out Of Space!"},
},
)
# If this has no chunk index we know it's the inital request
if self.chunked and not self.chunk_index:
return self.finish_json(
200, {"status": "ok", "data": {"file-id": self.file_id}}
)
if not self.chunked:
with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
while True:
chunk = self.request.body
if not chunk:
break
file.write(chunk)
self.finish_json(
200,
{
"status": "completed",
"data": {"message": "File uploaded successfully"},
},
)
# Create the upload and temp directories if they don't exist
os.makedirs(self.upload_dir, exist_ok=True)
os.makedirs(self.temp_dir, exist_ok=True)
# Read headers and query parameters
content_length = int(self.request.headers.get("Content-Length"))
if content_length <= 0:
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID CONTENT LENGTH",
"data": {"message": "Invalid content length"},
},
)
if not self.filename or self.chunk_index is None or total_chunks is None:
return self.finish_json(
400,
{
"status": "error",
"error": "INDEX ERROR",
"data": {
"message": "Filename, chunk_index,"
" and total_chunks are required"
},
},
)
# File paths
file_path = os.path.join(self.upload_dir, self.filename)
chunk_path = os.path.join(
self.temp_dir, f"{self.filename}.part{self.chunk_index}"
)
# Save the chunk
with open(chunk_path, "wb") as f:
f.write(self.request.body)
# Check if all chunks are received
received_chunks = [
f
for f in os.listdir(self.temp_dir)
if f.startswith(f"{self.filename}.part")
]
if len(received_chunks) == total_chunks:
with open(file_path, "wb") as outfile:
for i in range(total_chunks):
chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
with open(chunk_file, "rb") as infile:
outfile.write(infile.read())
os.remove(chunk_file)
self.finish_json(
200,
{
"status": "completed",
"data": {"message": "File uploaded successfully"},
},
)
else:
self.write(
json.dumps(
{
"status": "partial",
"message": f"Chunk {self.chunk_index} received",
}
)
)

View File

@ -578,91 +578,3 @@ class ApiServersServerFilesZipHandler(BaseApiHandler):
},
)
return self.finish_json(200, {"status": "ok"})
class ApiServersServerFilesUploadHandler(BaseApiHandler):
async def post(self, server_id: str):
for header, value in self.request.headers.items():
print(f"{header}: {value}")
fileHash = self.request.headers.get("fileHash", 0)
chunkHash = self.request.headers.get("chunk-hash", 0)
file_size = self.request.headers.get("fileSize", None)
self.file_id = self.request.headers.get("fileId")
self.chunked = self.request.headers.get("chunked", True)
self.filename = self.request.headers.get("filename", None)
try:
total_chunks = int(self.request.headers.get("total_chunks", None))
except TypeError:
return self.finish_json(
400, {"status": "error", "data": "INVALID CHUNK COUNT"}
)
self.chunk_index = self.request.headers.get("chunkId")
self.location = self.request.headers.get("location", None)
self.upload_dir = self.location
self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
if self.chunked and not self.chunk_index:
return self.finish_json(
200, {"status": "ok", "data": {"file-id": self.file_id}}
)
# Create the upload and temp directories if they don't exist
os.makedirs(self.upload_dir, exist_ok=True)
os.makedirs(self.temp_dir, exist_ok=True)
# Read headers and query parameters
content_length = int(self.request.headers.get("Content-Length"))
if content_length <= 0:
return self.finish_json(
400, {"status": "error", "data": {"message": "Invalid content length"}}
)
if not self.filename or self.chunk_index is None or total_chunks is None:
return self.finish_json(
400,
{
"status": "error",
"data": {
"message": "Filename, chunk_index,"
" and total_chunks are required"
},
},
)
# File paths
file_path = os.path.join(self.upload_dir, self.filename)
chunk_path = os.path.join(
self.temp_dir, f"{self.filename}.part{self.chunk_index}"
)
# Save the chunk
with open(chunk_path, "wb") as f:
f.write(self.request.body)
# Check if all chunks are received
received_chunks = [
f
for f in os.listdir(self.temp_dir)
if f.startswith(f"{self.filename}.part")
]
if len(received_chunks) == total_chunks:
with open(file_path, "wb") as outfile:
for i in range(total_chunks):
chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
with open(chunk_file, "rb") as infile:
outfile.write(infile.read())
os.remove(chunk_file)
self.write(
json.dumps(
{"status": "completed", "message": "File uploaded successfully"}
)
)
else:
self.write(
json.dumps(
{
"status": "partial",
"message": f"Chunk {self.chunk_index} received",
}
)
)

View File

@ -723,10 +723,9 @@
}
}
async function uploadFile(file, path, onProgress) {
async function uploadFile(file, path, file_num, onProgress) {
const fileId = uuidv4();
const token = getCookie("_xsrf")
const fileInput = document.getElementById('fileInput');
if (!file) {
alert("Please select a file first.");
return;
@ -741,6 +740,8 @@
headers: {
'X-XSRFToken': token,
'chunked': true,
'type': "server_upload",
'fileSize': file.size,
'total_chunks': totalChunks,
'location': path,
'filename': file.name,
@ -767,20 +768,61 @@
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size,
'chunked': true,
'type': "server_upload",
'fileSize': file.size,
'total_chunks': totalChunks,
'filename': file.name,
'location': path,
'filename': file.name,
'fileId': fileId,
'chunkId': i,
},
}).then(response => response.json())
.then(data => {
if (data.status === "completed") {
alert("File uploaded successfully!");
let caught = false;
try {
if (document.getElementById(path).classList.contains("clicked")) {
var expanded = true;
}
} catch {
var expanded = false;
}
try {
var par_el = document.getElementById(path + "ul");
var items = par_el.children;
} catch (err) {
console.log(err)
caught = true;
var par_el = document.getElementById("files-tree");
var items = par_el.children;
}
let name = file.name;
console.log(par_el)
let full_path = path + '/' + name
let flag = false;
for (var k = 0; k < items.length; ++k) {
if ($(items[k]).attr("data-name") == name) {
flag = true;
}
}
if (!flag) {
if (caught && expanded == false) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="d-block tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
} else if (expanded == true) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
}
setTreeViewContext();
}
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped");
$(`#upload-progress-bar-${i + 1}`).addClass("bg-success");
$(`#upload-progress-bar-${i + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>')
} else if (data.status !== "partial") {
throw new Error(data.message);
}
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), file_num);
});
uploadPromises.push(uploadPromise);
@ -792,7 +834,10 @@
alert("Error uploading file: " + error.message);
}
}
function updateProgressBar(progress, i) {
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
$(`#upload-progress-bar-${i + 1}`).html(progress + '%');
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0,
@ -801,103 +846,6 @@
});
}
async function sendFile(file, path, serverId, left, i, onProgress) {
let xmlHttpRequest = new XMLHttpRequest();
let token = getCookie("_xsrf")
let fileName = file.name
let target = '/upload?server_id=' + serverId
let mimeType = file.type
let size = file.size
xmlHttpRequest.upload.addEventListener('progress', function (e) {
if (e.loaded <= size) {
var percent = Math.round(e.loaded / size * 100);
$(`#upload-progress-bar-${i + 1}`).css('width', percent + '%');
$(`#upload-progress-bar-${i + 1}`).html(percent + '%');
}
});
xmlHttpRequest.open('POST', target, true);
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
xmlHttpRequest.setRequestHeader('X-Path', path);
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', 'server_files')
xmlHttpRequest.setRequestHeader('X-Files-Left', left);
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
xmlHttpRequest.setRequestHeader('X-ServerId', serverId);
xmlHttpRequest.upload.addEventListener('progress', (event) =>
onProgress(Math.floor(event.loaded / event.total * 100)), false);
xmlHttpRequest.addEventListener('load', (event) => {
if (event.target.responseText == 'success') {
console.log('Upload for file', file.name, 'was successful!');
let caught = false;
try {
if (document.getElementById(path).classList.contains("clicked")) {
var expanded = true;
}
} catch {
var expanded = false;
}
try {
var par_el = document.getElementById(path + "ul");
var items = par_el.children;
} catch (err) {
console.log(err)
caught = true;
var par_el = document.getElementById("files-tree");
var items = par_el.children;
}
let name = file.name;
console.log(par_el)
let full_path = path + '/' + name
let flag = false;
for (var k = 0; k < items.length; ++k) {
if ($(items[k]).attr("data-name") == name) {
flag = true;
}
}
if (!flag) {
if (caught && expanded == false) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="d-block tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
} else if (expanded == true) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
}
setTreeViewContext();
}
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped");
$(`#upload-progress-bar-${i + 1}`).addClass("bg-success");
$(`#upload-progress-bar-${i + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>')
}
else {
let response_text = JSON.parse(event.target.responseText);
var x = document.querySelector('.bootbox');
if (x) {
x.remove()
}
var x = document.querySelector('.modal-content');
if (x) {
x.remove()
}
console.log(JSON.parse(event.target.responseText).info)
bootbox.alert({
message: JSON.parse(event.target.responseText).info,
callback: function () {
window.location.reload();
}
});
doUpload = false;
}
}, false);
xmlHttpRequest.addEventListener('error', (e) => {
console.error('Error while uploading file', file.name + '.', 'Event:', e)
}, false);
xmlHttpRequest.send(file);
}
let uploadWaitDialog;
let doUpload = true;
@ -938,51 +886,44 @@
});
let nFiles = files.files.length;
for (i = 0; i < nFiles; i++) {
if (!doUpload) {
doUpload = true;
hideUploadBox();
break;
}
const uploadPromises = [];
for (let i = 0; i < nFiles; i++) {
const file = files.files[i];
const progressHtml = `
<div style="width: 100%; min-width: 100%;">
${files.files[i].name}:
<br><div
id="upload-progress-bar-${i + 1}"
class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style="width: 100%; height: 10px;"
aria-valuenow="0"
aria-valuemin="0"
aria-valuemax="100"
></div>
</div><br>
`;
<div style="width: 100%; min-width: 100%;">
${file.name}:
<br><div
id="upload-progress-bar-${i + 1}"
class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style="width: 100%; height: 10px;"
aria-valuenow="0"
aria-valuemin="0"
aria-valuemax="100"
></div>
</div><br>
`;
$('#upload-progress-bar-parent').append(progressHtml);
await uploadFile(files.files[i], path, (progress) => {
const uploadPromise = uploadFile(file, path, i, (progress) => {
$(`#upload-progress-bar-${i + 1}`).attr('aria-valuenow', progress)
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
});
uploadPromises.push(uploadPromise);
}
try {
await Promise.all(uploadPromises);
hideUploadBox();
} catch (error) {
alert("Error uploading file: " + error.message);
}
hideUploadBox();
//$('#upload_file').submit(); //.trigger('submit');
}
}
}
});
var fileList = document.getElementById("files");
fileList.addEventListener("change", function (e) {
var list = "";
let files = Array.from(this.files)
files.forEach(file => {
list += "<li class='col-xs-12 file-list'>" + file.name + "</li>"
})
document.getElementById("fileList").innerHTML = list;
}, false);
});
}

View File

@ -514,7 +514,7 @@
'labelZipFile', data['lang']) }}</label>
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info upload-button" id="upload-button" onclick="sendFile()"
<button type="button" class="btn btn-info upload-button" id="upload-button" onclick="uploadFile()"
disabled>{{ translate('serverWizard',
'uploadButton', data['lang']) }}</button>
</div>
@ -852,6 +852,96 @@
});
var upload = false;
var file;
async function uploadFile() {
file = $("#file")[0].files[0]
const fileId = uuidv4();
const token = getCookie("_xsrf")
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>'
if (!file) {
alert("Please select a file first.");
return;
}
const chunkSize = 1024 * 1024; // 1MB
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadPromises = [];
let res = await fetch(`/api/v2/servers/import/upload/`, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': "import",
'total_chunks': totalChunks,
'filename': file.name,
'fileId': fileId,
},
body: null,
});
let responseData = await res.json();
let file_id = ""
if (responseData.status === "ok") {
file_id = responseData.data["file-id"]
}
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const uploadPromise = fetch(`/api/v2/servers/import/upload/`, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size,
'fileSize': file.size,
'chunked': true,
'type': "import",
'total_chunks': totalChunks,
'filename': file.name,
'fileId': fileId,
'chunkId': i,
},
}).then(response => response.json())
.then(data => {
if (data.status === "completed") {
$("#upload_input").html(`<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%;"><input value="${file.name}" type="text" id="file-uploaded" disabled></input> 🔒</div>`);
document.getElementById("lower_half").style.visibility = "visible";
document.getElementById("lower_half").hidden = false;
} else if (data.status !== "partial") {
throw new Error(data.message);
}
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress));
});
uploadPromises.push(uploadPromise);
}
try {
await Promise.all(uploadPromises);
} catch (error) {
alert("Error uploading file: " + error.message);
}
}
function updateProgressBar(progress) {
$(`#upload-progress-bar`).css('width', progress + '%');
$(`#upload-progress-bar`).html(progress + '%');
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0,
v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
function sendFile() {
file = $("#file")[0].files[0]
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>'