Merge branch 'dev' into refactor/rework-css

This commit is contained in:
Silversthorn 2024-08-13 20:01:57 +02:00
commit 084b04dbd7
7 changed files with 105 additions and 66 deletions

View File

@ -1,6 +1,27 @@
# Changelog
## --- [4.4.1] - 2024/08/06
## --- [4.4.4] - 2024/TBD
### New features
TBD
### Bug fixes
TBD
### Tweaks
TBD
### Lang
TBD
<br><br>
## --- [4.4.3] - 2024/08/08
### Bug fixes
- Fix schedules creation fail due to missing action ID ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/791))
<br><br>
## --- [4.4.2] - 2024/08/07
### Bug fixes
- Migrations | Fix exception message on file not found for backups migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/789))
- UploadAPI | Upload chunks in batches to avoid overloading browser cache ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/788))
<br><br>
## --- [4.4.1] - 2024/08/06
### Patch Fixes
- Migrations | Fix orphan backup configurations crashing migration operation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
- Migrations | Fix missing default configuration if no server backup config exists during the migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))

View File

@ -1,5 +1,5 @@
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
# Crafty Controller 4.4.1
# Crafty Controller 4.4.4
> Python based Control Panel for your Minecraft Server
## What is Crafty Controller?

View File

@ -341,7 +341,7 @@ class TasksManager:
job_data["cron_string"],
job_data["parent"],
job_data["delay"],
job_data["action_id"],
job_data.get("action_id", None),
)
# Checks to make sure some doofus didn't actually make the newly
@ -372,7 +372,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -399,7 +399,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -416,7 +416,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -436,7 +436,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -529,7 +529,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -553,7 +553,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -570,7 +570,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -590,7 +590,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data["action_id"],
"action_id": job_data.get("action_id", None),
}
],
)

View File

@ -1,5 +1,5 @@
{
"major": 4,
"minor": 4,
"sub": 1
"sub": 4
}

View File

@ -1,33 +1,74 @@
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i, file_num, updateProgressBar) {
return fetch(url, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size,
'fileSize': file.size,
'chunkHash': chunk_hash,
'chunked': true,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
'chunkId': i,
},
})
.then(async response => {
if (!response.ok) {
const errorData = await response.json();
throw new Error(JSON.stringify(errorData) || 'Unknown error occurred');
}
return response.json(); // Return the JSON data
})
.then(data => {
if (data.status !== "completed" && data.status !== "partial") {
throw new Error(data.message || 'Unknown error occurred');
}
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), type, file_num);
});
}
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
if (file == null) {
try {
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.")
bootbox.alert("Please select a file first.");
return;
}
}
const fileId = uuidv4();
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = ``
let url = '';
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`
url = `/api/v2/crafty/admin/upload/`;
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`
url = `/api/v2/servers/import/upload/`;
}
console.log(url)
console.log(url);
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadPromises = [];
let errors = []; // Array to store errors
const errors = [];
const batchSize = 30; // Number of chunks to upload in each batch
try {
let res = await fetch(url, {
method: 'POST',
@ -55,59 +96,36 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i++) {
const start = i * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
for (let i = 0; i < totalChunks; i += batchSize) {
const batchPromises = [];
const uploadPromise = fetch(url, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size,
'fileSize': file.size,
'chunkHash': chunk_hash,
'chunked': true,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
'chunkId': i,
},
})
.then(async response => {
if (!response.ok) {
const errorData = await response.json();
throw new Error(JSON.stringify(errorData) || 'Unknown error occurred');
}
return response.json(); // Return the JSON data
})
.then(data => {
if (data.status !== "completed" && data.status !== "partial") {
throw new Error(data.message || 'Unknown error occurred');
}
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), type, file_num);
})
.catch(error => {
errors.push(error); // Store the error
});
for (let j = 0; j < batchSize && (i + j) < totalChunks; j++) {
const start = (i + j) * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
uploadPromises.push(uploadPromise);
const uploadPromise = uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i + j, file_num, updateProgressBar)
.catch(error => {
errors.push(error); // Store the error
});
batchPromises.push(uploadPromise);
}
// Wait for the current batch to complete before proceeding to the next batch
await Promise.all(batchPromises);
// Optional delay between batches to account for rate limiting
await delay(2000); // Adjust the delay time (in milliseconds) as needed
}
await Promise.all(uploadPromises);
} catch (error) {
errors.push(error); // Store the error
}
if (errors.length > 0) {
const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>');
console.log(errorMessage)
console.log(errorMessage);
bootbox.alert({
title: 'Error',
message: errorMessage,

View File

@ -207,7 +207,7 @@ def migrate(migrator: Migrator, database, **kwargs):
)
except FileNotFoundError as why:
logger.error(
f"Could not move backup {file} for {server.server_name} to new location with error {why}"
f"Could not move backups for {server.server_name} to new location with error {why}"
)
Console.debug("Migrations: Dropping old backup table")

View File

@ -3,7 +3,7 @@ sonar.organization=crafty-controller
# This is the name and version displayed in the SonarCloud UI.
sonar.projectName=Crafty 4
sonar.projectVersion=4.4.1
sonar.projectVersion=4.4.4
sonar.python.version=3.9, 3.10, 3.11
sonar.exclusions=app/migrations/**, app/frontend/static/assets/vendors/**