Merge branch 'dev' into refactor/server_ids

This commit is contained in:
Silversthorn 2024-02-21 23:00:21 +01:00
commit 303de39226
9 changed files with 356 additions and 141 deletions

View File

@ -2,8 +2,13 @@
## --- [4.2.4] - 2023/TBD
### New features
TBD
### Refactor
- Refactor remote file downloads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
### Bug fixes
TBD
- Fix Bedrock cert issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
- Make sure default.json is read from correct location ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/714))
- Do not allow users at server limit to clone servers ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/718))
- Fix bug where you cannot get to config with unloaded server ([Commit](https://gitlab.com/crafty-controller/crafty-4/-/commit/9de08973b6bb2ddf91283c5c6b0e189ff34f7e24))
### Tweaks
- Bump pyOpenSSL & cryptography for CVE-2024-0727, CVE-2023-50782 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/716))
### Lang

View File

@ -1,13 +1,14 @@
import os
import json
import threading
import time
import shutil
import logging
from datetime import datetime
import requests
from app.classes.controllers.servers_controller import ServersController
from app.classes.models.server_permissions import PermissionsServers
from app.classes.shared.file_helpers import FileHelpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
@ -24,6 +25,113 @@ class ServerJars:
def get_paper_jars():
return PAPERJARS
def get_paper_versions(self, project):
"""
Retrieves a list of versions for a specified project from the PaperMC API.
Parameters:
project (str): The project name to query for available versions.
Returns:
list: A list of version strings available for the project. Returns an empty
list if the API call fails or if no versions are found.
This function makes a GET request to the PaperMC API to fetch available project
versions, The versions are returned in reverse order, with the most recent
version first.
"""
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/", timeout=2
)
response.raise_for_status()
api_data = response.json()
except Exception as e:
logger.error(f"Error loading project versions for {project}: {e}")
return []
versions = api_data.get("versions", [])
versions.reverse() # Ensure the most recent version comes first
return versions
def get_paper_build(self, project, version):
"""
Fetches the latest build for a specified project and version from PaperMC API.
Parameters:
project (str): Project name, typically a server software like 'paper'.
version (str): Project version to fetch the build number for.
Returns:
int or None: Latest build number if successful, None if not or on error.
This method attempts to query the PaperMC API for the latest build and
handles exceptions by logging errors and returning None.
"""
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
timeout=2,
)
response.raise_for_status()
api_data = response.json()
except Exception as e:
logger.error(f"Error fetching build for {project} {version}: {e}")
return None
builds = api_data.get("builds", [])
return builds[-1] if builds else None
def get_fetch_url(self, jar, server, version):
"""
Constructs the URL for downloading a server JAR file based on the server type.
Supports two main types of server JAR sources:
- ServerJars API for servers not in PAPERJARS.
- Paper API for servers available through the Paper project.
Parameters:
jar (str): Name of the JAR file.
server (str): Server software name (e.g., "paper").
version (str): Server version.
Returns:
str or None: URL for downloading the JAR file, or None if URL cannot be
constructed or an error occurs.
"""
try:
# Check if the server type is not specifically handled by Paper.
if server not in PAPERJARS:
return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
# For Paper servers, attempt to get the build for the specified version.
paper_build_info = self.get_paper_build(server, version)
if paper_build_info is None:
# Log an error or handle the case where paper_build_info is None
logger.error(
"Error: Unable to get build information for server:"
f" {server}, version: {version}"
)
return None
build = paper_build_info.get("build")
if not build:
# Log an error or handle the case where build is None or not found
logger.error(
f"Error: Build number not found for server:"
f" {server}, version: {version}"
)
return None
# Construct and return the URL for downloading the Paper server JAR.
return (
f"{self.paper_base}/v2/projects/{server}/versions/{version}/"
f"builds/{build}/downloads/{server}-{version}-{build}.jar"
)
except Exception as e:
logger.error(f"An error occurred while constructing fetch URL: {e}")
return None
def _get_api_result(self, call_url: str):
full_url = f"{self.base_url}{call_url}"
@ -44,40 +152,6 @@ class ServerJars:
return api_response
def get_paper_versions(self, project):
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/", timeout=2
)
response.raise_for_status()
api_data = json.loads(response.content)
except Exception as e:
logger.error(
f"Unable to load https://api.papermc.io/v2/projects/{project}/"
f"api due to error: {e}"
)
return {}
versions = api_data.get("versions", [])
versions.reverse()
return versions
def get_paper_build(self, project, version):
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
timeout=2,
)
response.raise_for_status()
api_data = json.loads(response.content)
except Exception as e:
logger.error(
f"Unable to load https://api.papermc.io/v2/projects/{project}/"
f"api due to error: {e}"
)
return {}
build = api_data.get("builds", [])[-1]
return build
def _read_cache(self):
cache_file = self.helper.serverjar_cache
cache = {}
@ -213,55 +287,75 @@ class ServerJars:
update_thread.start()
def a_download_jar(self, jar, server, version, path, server_id):
"""
Downloads a server JAR file and performs post-download actions including
notifying users and setting import status.
This method waits for the server registration to complete, retrieves the
download URL for the specified server JAR file.
Upon successful download, it either runs the installer for
Forge servers or simply finishes the import process for other types. It
notifies server users about the completion of the download.
Parameters:
- jar (str): The name of the JAR file to download.
- server (str): The type of server software (e.g., 'forge', 'paper').
- version (str): The version of the server software.
- path (str): The local filesystem path where the JAR file will be saved.
- server_id (str): The unique identifier for the server being updated or
imported, used for notifying users and setting the import status.
Returns:
- bool: True if the JAR file was successfully downloaded and saved;
False otherwise.
The method ensures that the server is properly registered before proceeding
with the download and handles exceptions by logging errors and reverting
the import status if necessary.
"""
# delaying download for server register to finish
time.sleep(3)
if server not in PAPERJARS:
fetch_url = f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
else:
build = self.get_paper_build(server, version).get("build", None)
if not build:
return
fetch_url = (
f"{self.paper_base}/v2/projects"
f"/{server}/versions/{version}/builds/{build}/downloads/"
f"{server}-{version}-{build}.jar"
)
fetch_url = self.get_fetch_url(jar, server, version)
if not fetch_url:
return False
server_users = PermissionsServers.get_server_user_list(server_id)
# We need to make sure the server is registered before
# we submit a db update for it's stats.
# Make sure the server is registered before updating its stats
while True:
try:
ServersController.set_import(server_id)
for user in server_users:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
break
except Exception as ex:
logger.debug(f"server not registered yet. Delaying download - {ex}")
logger.debug(f"Server not registered yet. Delaying download - {ex}")
# open a file stream
with requests.get(fetch_url, timeout=2, stream=True) as r:
success = False
try:
with open(path, "wb") as output:
shutil.copyfileobj(r.raw, output)
# If this is the newer forge version we will run the installer
if server == "forge":
ServersController.finish_import(server_id, True)
else:
ServersController.finish_import(server_id)
# Initiate Download
jar_dir = os.path.dirname(path)
jar_name = os.path.basename(path)
logger.info(fetch_url)
success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
success = True
except Exception as e:
logger.error(f"Unable to save jar to {path} due to error:{e}")
# Post-download actions
if success:
if server == "forge":
# If this is the newer Forge version, run the installer
ServersController.finish_import(server_id, True)
else:
ServersController.finish_import(server_id)
server_users = PermissionsServers.get_server_user_list(server_id)
# Notify users
for user in server_users:
WebSocketManager().broadcast_user(
user, "notification", "Executable download finished"
)
time.sleep(3)
time.sleep(3) # Delay for user notification
WebSocketManager().broadcast_user(user, "send_start_reload", {})
return success
else:
logger.error(f"Unable to save jar to {path} due to download failure.")
ServersController.finish_import(server_id)
return success

View File

@ -5,6 +5,10 @@ import pathlib
import tempfile
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
import urllib.request
import ssl
import time
import certifi
from app.classes.shared.helpers import Helpers
from app.classes.shared.console import Console
@ -19,6 +23,92 @@ class FileHelpers:
def __init__(self, helper):
self.helper: Helpers = helper
@staticmethod
def ssl_get_file(
url, out_path, out_file, max_retries=3, backoff_factor=2, headers=None
):
"""
Downloads a file from a given URL using HTTPS with SSL context verification,
retries with exponential backoff and providing download progress feedback.
Parameters:
- url (str): The URL of the file to download. Must start with "https".
- out_path (str): The local path where the file will be saved.
- out_file (str): The name of the file to save the downloaded content as.
- max_retries (int, optional): The maximum number of retry attempts
in case of download failure. Defaults to 3.
- backoff_factor (int, optional): The factor by which the wait time
increases after each failed attempt. Defaults to 2.
- headers (dict, optional):
A dictionary of HTTP headers to send with the request.
Returns:
- bool: True if the download was successful, False otherwise.
Raises:
- urllib.error.URLError: If a URL error occurs during the download.
- ssl.SSLError: If an SSL error occurs during the download.
Exception: If an unexpected error occurs during the download.
Note:
This method logs critical errors and download progress information.
Ensure that the logger is properly configured to capture this information.
"""
if not url.lower().startswith("https"):
logger.error("SSL File Get - Error: URL must start with https.")
return False
ssl_context = ssl.create_default_context(cafile=certifi.where())
if not headers:
headers = {
"User-Agent": (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/58.0.3029.110 Safari/537.3"
)
}
req = urllib.request.Request(url, headers=headers)
write_path = os.path.join(out_path, out_file)
attempt = 0
logger.info(f"SSL File Get - Requesting remote: {url}")
file_path_full = os.path.join(out_path, out_file)
logger.info(f"SSL File Get - Download Destination: {file_path_full}")
while attempt < max_retries:
try:
with urllib.request.urlopen(req, context=ssl_context) as response:
total_size = response.getheader("Content-Length")
if total_size:
total_size = int(total_size)
downloaded = 0
with open(write_path, "wb") as file:
while True:
chunk = response.read(1024 * 1024) # 1 MB
if not chunk:
break
file.write(chunk)
downloaded += len(chunk)
if total_size:
progress = (downloaded / total_size) * 100
logger.info(
f"SSL File Get - Download progress: {progress:.2f}%"
)
return True
except (urllib.error.URLError, ssl.SSLError) as e:
logger.warning(f"SSL File Get - Attempt {attempt+1} failed: {e}")
time.sleep(backoff_factor**attempt)
except Exception as e:
logger.critical(f"SSL File Get - Unexpected error: {e}")
return False
finally:
attempt += 1
logger.error("SSL File Get - Maximum retries reached. Download failed.")
return False
@staticmethod
def del_dirs(path):
path = pathlib.Path(path)

View File

@ -1112,7 +1112,7 @@ class Helpers:
return os.path.normpath(path)
def find_default_password(self):
default_file = os.path.join(self.root_dir, "default.json")
default_file = os.path.join(self.root_dir, "app", "config", "default.json")
data = {}
if Helpers.check_file_exists(default_file):
@ -1180,25 +1180,6 @@ class Helpers:
return temp_dir
return False
@staticmethod
def download_file(executable_url, jar_path):
try:
response = requests.get(executable_url, timeout=5)
except Exception as ex:
logger.error("Could not download executable: %s", ex)
return False
if response.status_code != 200:
logger.error("Unable to download file from %s", executable_url)
return False
try:
with open(jar_path, "wb") as jar_file:
jar_file.write(response.content)
except Exception as e:
logger.error("Unable to finish executable download. Error: %s", e)
return False
return True
@staticmethod
def remove_prefix(text, prefix):
if text.startswith(prefix):

View File

@ -3,7 +3,6 @@ import time
import shutil
import logging
import threading
import urllib
from app.classes.controllers.server_perms_controller import PermissionsServers
from app.classes.controllers.servers_controller import ServersController
@ -227,25 +226,39 @@ class ImportHelpers:
download_thread.start()
def download_threaded_bedrock_server(self, path, new_id):
# downloads zip from remote url
"""
Downloads the latest Bedrock server, unzips it, sets necessary permissions.
Parameters:
path (str): The directory path to download and unzip the Bedrock server.
new_id (str): The identifier for the new server import operation.
This method handles exceptions and logs errors for each step of the process.
"""
try:
bedrock_url = Helpers.get_latest_bedrock_url()
if bedrock_url.lower().startswith("https"):
urllib.request.urlretrieve(
bedrock_url,
os.path.join(path, "bedrock_server.zip"),
if bedrock_url:
file_path = os.path.join(path, "bedrock_server.zip")
success = FileHelpers.ssl_get_file(
bedrock_url, path, "bedrock_server.zip"
)
if not success:
logger.error("Failed to download the Bedrock server zip.")
return
unzip_path = os.path.join(path, "bedrock_server.zip")
unzip_path = self.helper.wtol_path(unzip_path)
# unzips archive that was downloaded.
FileHelpers.unzip_file(unzip_path)
# adjusts permissions for execution if os is not windows
if not self.helper.is_os_windows():
os.chmod(os.path.join(path, "bedrock_server"), 0o0744)
unzip_path = self.helper.wtol_path(file_path)
# unzips archive that was downloaded.
FileHelpers.unzip_file(unzip_path)
# adjusts permissions for execution if os is not windows
# we'll delete the zip we downloaded now
os.remove(os.path.join(path, "bedrock_server.zip"))
if not self.helper.is_os_windows():
os.chmod(os.path.join(path, "bedrock_server"), 0o0744)
# we'll delete the zip we downloaded now
os.remove(file_path)
else:
logger.error("Bedrock download URL issue!")
except Exception as e:
logger.critical(
f"Failed to download bedrock executable during server creation! \n{e}"

View File

@ -10,7 +10,6 @@ import threading
import logging.config
import subprocess
import html
import urllib.request
import glob
import json
@ -1450,33 +1449,45 @@ class ServerInstance:
# lets download the files
if HelperServers.get_server_type_by_id(self.server_id) != "minecraft-bedrock":
# boolean returns true for false for success
downloaded = Helpers.download_file(
self.settings["executable_update_url"], current_executable
jar_dir = os.path.dirname(current_executable)
jar_file_name = os.path.basename(current_executable)
downloaded = FileHelpers.ssl_get_file(
self.settings["executable_update_url"], jar_dir, jar_file_name
)
else:
# downloads zip from remote url
try:
bedrock_url = Helpers.get_latest_bedrock_url()
if bedrock_url.lower().startswith("https"):
urllib.request.urlretrieve(
bedrock_url,
os.path.join(self.settings["path"], "bedrock_server.zip"),
if bedrock_url:
# Use the new method for secure download
download_path = os.path.join(
self.settings["path"], "bedrock_server.zip"
)
downloaded = FileHelpers.ssl_get_file(
bedrock_url, self.settings["path"], "bedrock_server.zip"
)
unzip_path = os.path.join(self.settings["path"], "bedrock_server.zip")
unzip_path = self.helper.wtol_path(unzip_path)
# unzips archive that was downloaded.
FileHelpers.unzip_file(unzip_path, server_update=True)
# adjusts permissions for execution if os is not windows
if not self.helper.is_os_windows():
os.chmod(
os.path.join(self.settings["path"], "bedrock_server"), 0o0744
)
if downloaded:
unzip_path = download_path
unzip_path = self.helper.wtol_path(unzip_path)
# we'll delete the zip we downloaded now
os.remove(os.path.join(self.settings["path"], "bedrock_server.zip"))
downloaded = True
# unzips archive that was downloaded.
FileHelpers.unzip_file(unzip_path, server_update=True)
# adjusts permissions for execution if os is not windows
if not self.helper.is_os_windows():
os.chmod(
os.path.join(self.settings["path"], "bedrock_server"),
0o0744,
)
# we'll delete the zip we downloaded now
os.remove(download_path)
else:
logger.error("Failed to download the Bedrock server zip.")
downloaded = False
except Exception as e:
logger.critical(
f"Failed to download bedrock executable for update \n{e}"

View File

@ -574,6 +574,7 @@ class PanelHandler(BaseHandler):
"crash_detection": server_temp_obj["crash_detection"],
"show_status": server_temp_obj["show_status"],
"ignored_exits": server_temp_obj["ignored_exits"],
"count_players": server_temp_obj["count_players"],
},
"running": False,
"crashed": False,

View File

@ -29,7 +29,15 @@ class ApiServersServerActionHandler(BaseApiHandler):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if action == "clone_server":
return self._clone_server(server_id, auth_data[4]["user_id"])
if (
self.controller.crafty_perms.can_create_server(auth_data[4]["user_id"])
or auth_data[4]["superuser"]
):
self._clone_server(server_id, auth_data[4]["user_id"])
return self.finish_json(200, {"status": "ok"})
return self.finish_json(
200, {"status": "error", "error": "SERVER_LIMIT_REACHED"}
)
if action == "eula":
return self._agree_eula(server_id, auth_data[4]["user_id"])
@ -96,6 +104,14 @@ class ApiServersServerActionHandler(BaseApiHandler):
server.execution_command = new_server_command
self.controller.servers.update_server(server)
for role in self.controller.server_perms.get_server_roles(server_id):
mask = self.controller.server_perms.get_permissions_mask(
role.role_id, server_id
)
self.controller.server_perms.add_role_server(
new_server_id, role.role_id, mask
)
self.controller.servers.init_all_servers()
self.finish_json(

View File

@ -598,26 +598,30 @@
</script>
<script>
function send_command(server_id, command) {
async function send_command(server_id, command) {
/* this getCookie function is in base.html */
const token = getCookie("_xsrf");
$.ajax({
type: "POST",
headers: { 'X-XSRFToken': token },
url: `/api/v2/servers/${server_id}/action/${command}`,
success: function (data) {
console.log("got response:");
console.log(data);
if (command === "clone_server" && data.status === "ok") {
window.location.reload();
}
/*setTimeout(function () {
if (command != 'start_server') {
location.reload();
}
}, 10000);*/
}
let res = await fetch(`/api/v2/servers/${server_id}/action/${command}`, {
method: 'POST',
headers: {
'token': token,
},
});
let responseData = await res.json();
if (responseData.status === "ok") {
if (command === "clone_server"){
window.location.reload()
}
console.log("Command received successfully")
} else {
setTimeout(function(){
$('.modal').modal('hide');
bootbox.alert({
title: responseData.status,
message: responseData.error
});
}, 2000)
}
}