diff --git a/.gitlab/scripts/lang_sort.sh b/.gitlab/scripts/lang_sort.sh
index 5710ce1b..9a1e1cf0 100644
--- a/.gitlab/scripts/lang_sort.sh
+++ b/.gitlab/scripts/lang_sort.sh
@@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}"
# Iterate over each .json file in the directory
for file in "${DIR}"/*.json; do
- # Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name
- if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then
+ # Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name
+ if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then
# Get keys and subkeys from the current file
current_keys=$(mktemp)
diff --git a/.gitlab/scripts/sort.py b/.gitlab/scripts/sort.py
index c78885a6..28eaf6fa 100644
--- a/.gitlab/scripts/sort.py
+++ b/.gitlab/scripts/sort.py
@@ -44,6 +44,7 @@ def main():
if (
"_incomplete" not in file
and file != "en_EN.json"
+ and file != "humanized_index.json"
and file.endswith(".json")
):
file_path = os.path.join(root, file)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c7ed7771..ef8b70a2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,38 @@
# Changelog
+## --- [4.4.1] - 2024/07/29
+### Refactor
+- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711))
+- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+### Bug fixes
+- Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764))
+- Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768))
+- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774))
+- Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771))
+- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775))
+- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765))
+- Remove unused and problematic "dropdown-menu" ident from [!722](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) CSS ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/782))
+### Tweaks
+- Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760))
+- Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766))
+- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767))
+- Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769))
+- Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770))
+- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776))
+- Change Role selector in server wizard to be a filter list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772))
+### Lang
+- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773))
+- Add remaining `he_IL`, `th_TH` translations from **4.4.0** Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Fix `fr_FR` syntax issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/780) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/778))
+- Add `th_TH` translations for [!772](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/781))
+
+
## --- [4.4.0] - 2024/05/11
### Refactor
- Refactor API keys "super user" to "full access" ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/731) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/759))
diff --git a/README.md b/README.md
index 8c70fdb4..2b382faf 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
-# Crafty Controller 4.4.0
+# Crafty Controller 4.4.1
> Python based Control Panel for your Minecraft Server
## What is Crafty Controller?
diff --git a/app/classes/controllers/management_controller.py b/app/classes/controllers/management_controller.py
index cab755b6..fc17b8be 100644
--- a/app/classes/controllers/management_controller.py
+++ b/app/classes/controllers/management_controller.py
@@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge
from app.classes.models.management import HelpersManagement, HelpersWebhooks
from app.classes.models.servers import HelperServers
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -75,7 +76,7 @@ class ManagementController:
# Commands Methods
# **********************************************************************************
- def send_command(self, user_id, server_id, remote_ip, command):
+ def send_command(self, user_id, server_id, remote_ip, command, action_id=None):
server_name = HelperServers.get_server_friendly_name(server_id)
# Example: Admin issued command start_server for server Survival
@@ -86,7 +87,12 @@ class ManagementController:
remote_ip,
)
self.queue_command(
- {"server_id": server_id, "user_id": user_id, "command": command}
+ {
+ "server_id": server_id,
+ "user_id": user_id,
+ "command": command,
+ "action_id": action_id,
+ }
)
def queue_command(self, command_data):
@@ -123,6 +129,7 @@ class ManagementController:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
return HelpersManagement.create_scheduled_task(
server_id,
@@ -137,6 +144,7 @@ class ManagementController:
cron_string,
parent,
delay,
+ action_id,
)
@staticmethod
@@ -175,34 +183,47 @@ class ManagementController:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- return HelpersManagement.get_backup_config(server_id)
+ def get_backup_config(backup_id):
+ return HelpersManagement.get_backup_config(backup_id)
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- return self.management_helper.set_backup_config(
- server_id,
- backup_path,
- max_backups,
- excluded_dirs,
- compress,
- shutdown,
- before,
- after,
+ @staticmethod
+ def get_backups_by_server(server_id, model=False):
+ return HelpersManagement.get_backups_by_server(server_id, model)
+
+ @staticmethod
+ def delete_backup_config(backup_id):
+ HelpersManagement.remove_backup_config(backup_id)
+
+ @staticmethod
+ def update_backup_config(backup_id, updates):
+ if "backup_location" in updates:
+ updates["backup_location"] = Helpers.wtol_path(updates["backup_location"])
+ return HelpersManagement.update_backup_config(backup_id, updates)
+
+ def add_backup_config(self, data) -> str:
+ if "backup_location" in data:
+ data["backup_location"] = Helpers.wtol_path(data["backup_location"])
+ return self.management_helper.add_backup_config(data)
+
+ def add_default_backup_config(self, server_id, backup_path):
+ return self.management_helper.add_backup_config(
+ {
+ "backup_name": "Default Backup",
+ "backup_location": Helpers.wtol_path(backup_path),
+ "max_backups": 0,
+ "before": "",
+ "after": "",
+ "compress": False,
+ "shutdown": False,
+ "server_id": server_id,
+ "excluded_dirs": [],
+ "default": True,
+ }
)
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- return HelpersManagement.get_excluded_backup_dirs(server_id)
+ def get_excluded_backup_dirs(backup_id: int):
+ return HelpersManagement.get_excluded_backup_dirs(backup_id)
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
diff --git a/app/classes/controllers/servers_controller.py b/app/classes/controllers/servers_controller.py
index 6a5cce4e..7743ad84 100644
--- a/app/classes/controllers/servers_controller.py
+++ b/app/classes/controllers/servers_controller.py
@@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton):
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -83,7 +82,6 @@ class ServersController(metaclass=Singleton):
server_uuid,
name,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -148,8 +146,7 @@ class ServersController(metaclass=Singleton):
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
# Remove roles from server
PermissionsServers.remove_roles_of_server(server_id)
- # Remove backup configs tied to server
- self.management_helper.remove_backup_config(server_id)
+ self.management_helper.remove_all_server_backups(server_id)
# Finally remove server
self.servers_helper.remove_server(server_id)
diff --git a/app/classes/controllers/users_controller.py b/app/classes/controllers/users_controller.py
index 5425fbf8..d45797bd 100644
--- a/app/classes/controllers/users_controller.py
+++ b/app/classes/controllers/users_controller.py
@@ -55,6 +55,7 @@ class UsersController:
"minLength": self.helper.minimum_password_length,
"examples": ["crafty"],
"title": "Password",
+ "error": "passLength",
},
"email": {
"type": "string",
diff --git a/app/classes/minecraft/stats.py b/app/classes/minecraft/stats.py
index a3f85c05..c3474d0d 100644
--- a/app/classes/minecraft/stats.py
+++ b/app/classes/minecraft/stats.py
@@ -86,7 +86,7 @@ class Stats:
def get_node_stats(self) -> NodeStatsReturnDict:
try:
cpu_freq = psutil.cpu_freq()
- except (NotImplementedError, FileNotFoundError):
+ except (NotImplementedError, AttributeError, FileNotFoundError):
cpu_freq = None
if cpu_freq is None:
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)
diff --git a/app/classes/models/management.py b/app/classes/models/management.py
index e59dd39c..1a3e4a11 100644
--- a/app/classes/models/management.py
+++ b/app/classes/models/management.py
@@ -16,6 +16,7 @@ from app.classes.models.base_model import BaseModel
from app.classes.models.users import HelperUsers
from app.classes.models.servers import Servers
from app.classes.models.server_permissions import PermissionsServers
+from app.classes.shared.helpers import Helpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
@@ -87,6 +88,7 @@ class Schedules(BaseModel):
interval_type = CharField()
start_time = CharField(null=True)
command = CharField(null=True)
+ action_id = CharField(null=True)
name = CharField()
one_time = BooleanField(default=False)
cron_string = CharField(default="")
@@ -102,13 +104,19 @@ class Schedules(BaseModel):
# Backups Class
# **********************************************************************************
class Backups(BaseModel):
+ backup_id = CharField(primary_key=True, default=Helpers.create_uuid)
+ backup_name = CharField(default="New Backup")
+ backup_location = CharField(default="")
excluded_dirs = CharField(null=True)
- max_backups = IntegerField()
+ max_backups = IntegerField(default=0)
server_id = ForeignKeyField(Servers, backref="backups_server")
compress = BooleanField(default=False)
shutdown = BooleanField(default=False)
before = CharField(default="")
after = CharField(default="")
+ default = BooleanField(default=False)
+ status = CharField(default='{"status": "Standby", "message": ""}')
+ enabled = BooleanField(default=True)
class Meta:
table_name = "backups"
@@ -263,6 +271,7 @@ class HelpersManagement:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
sch_id = Schedules.insert(
{
@@ -273,6 +282,7 @@ class HelpersManagement:
Schedules.interval_type: interval_type,
Schedules.start_time: start_time,
Schedules.command: command,
+ Schedules.action_id: action_id,
Schedules.name: name,
Schedules.one_time: one_time,
Schedules.cron_string: cron_string,
@@ -335,133 +345,81 @@ class HelpersManagement:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- try:
- row = (
- Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
- )
- conf = {
- "backup_path": row.server_id.backup_path,
- "excluded_dirs": row.excluded_dirs,
- "max_backups": row.max_backups,
- "server_id": row.server_id_id,
- "compress": row.compress,
- "shutdown": row.shutdown,
- "before": row.before,
- "after": row.after,
- }
- except IndexError:
- conf = {
- "backup_path": None,
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- return conf
+ def get_backup_config(backup_id):
+ return model_to_dict(Backups.get(Backups.backup_id == backup_id))
@staticmethod
- def remove_backup_config(server_id):
+ def get_backups_by_server(server_id, model=False):
+ if not model:
+ data = {}
+ for backup in (
+ Backups.select().where(Backups.server_id == server_id).execute()
+ ):
+ data[str(backup.backup_id)] = {
+ "backup_id": backup.backup_id,
+ "backup_name": backup.backup_name,
+ "backup_location": backup.backup_location,
+ "excluded_dirs": backup.excluded_dirs,
+ "max_backups": backup.max_backups,
+ "server_id": backup.server_id_id,
+ "compress": backup.compress,
+ "shutdown": backup.shutdown,
+ "before": backup.before,
+ "after": backup.after,
+ "default": backup.default,
+ "enabled": backup.enabled,
+ }
+ else:
+ data = Backups.select().where(Backups.server_id == server_id).execute()
+ return data
+
+ @staticmethod
+ def get_default_server_backup(server_id: str) -> dict:
+ print(server_id)
+ bu_query = Backups.select().where(
+ Backups.server_id == server_id,
+ Backups.default == True, # pylint: disable=singleton-comparison
+ )
+ for item in bu_query:
+ print("HI", item)
+ backup_model = bu_query.first()
+
+ if backup_model:
+ return model_to_dict(backup_model)
+ raise IndexError
+
+ @staticmethod
+ def remove_all_server_backups(server_id):
Backups.delete().where(Backups.server_id == server_id).execute()
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- logger.debug(f"Updating server {server_id} backup config with {locals()}")
- if Backups.select().where(Backups.server_id == server_id).exists():
- new_row = False
- conf = {}
- else:
- conf = {
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- new_row = True
- if max_backups is not None:
- conf["max_backups"] = max_backups
- if excluded_dirs is not None:
- dirs_to_exclude = ",".join(excluded_dirs)
+ @staticmethod
+ def remove_backup_config(backup_id):
+ Backups.delete().where(Backups.backup_id == backup_id).execute()
+
+ def add_backup_config(self, conf) -> str:
+ if "excluded_dirs" in conf:
+ dirs_to_exclude = ",".join(conf["excluded_dirs"])
conf["excluded_dirs"] = dirs_to_exclude
- conf["compress"] = compress
- conf["shutdown"] = shutdown
- conf["before"] = before
- conf["after"] = after
- if not new_row:
- with self.database.atomic():
- if backup_path is not None:
- server_rows = (
- Servers.update(backup_path=backup_path)
- .where(Servers.server_id == server_id)
- .execute()
- )
- else:
- server_rows = 0
- backup_rows = (
- Backups.update(conf).where(Backups.server_id == server_id).execute()
- )
- logger.debug(
- f"Updating existing backup record. "
- f"{server_rows}+{backup_rows} rows affected"
- )
- else:
- with self.database.atomic():
- conf["server_id"] = server_id
- if backup_path is not None:
- Servers.update(backup_path=backup_path).where(
- Servers.server_id == server_id
- )
- Backups.create(**conf)
- logger.debug("Creating new backup record.")
+ backup = Backups.create(**conf)
+ logger.debug("Creating new backup record.")
+ return backup.backup_id
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
+ def update_backup_config(backup_id, data):
+ if "excluded_dirs" in data:
+ dirs_to_exclude = ",".join(data["excluded_dirs"])
+ data["excluded_dirs"] = dirs_to_exclude
+ Backups.update(**data).where(Backups.backup_id == backup_id).execute()
+
+ @staticmethod
+ def get_excluded_backup_dirs(backup_id: int):
+ excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"]
if excluded_dirs is not None and excluded_dirs != "":
dir_list = excluded_dirs.split(",")
else:
dir_list = []
return dir_list
- def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_add not in dir_list:
- dir_list.append(dir_to_add)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not adding {dir_to_add} to excluded directories - "
- f"already in the excluded directory list for server ID {server_id}"
- )
-
- def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_del in dir_list:
- dir_list.remove(dir_to_del)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not removing {dir_to_del} from excluded directories - "
- f"not in the excluded directory list for server ID {server_id}"
- )
-
# **********************************************************************************
# Webhooks Class
diff --git a/app/classes/models/servers.py b/app/classes/models/servers.py
index 13d9096a..e5d85c69 100644
--- a/app/classes/models/servers.py
+++ b/app/classes/models/servers.py
@@ -26,7 +26,6 @@ class Servers(BaseModel):
created = DateTimeField(default=datetime.datetime.now)
server_name = CharField(default="Server", index=True)
path = CharField(default="")
- backup_path = CharField(default="")
executable = CharField(default="")
log_path = CharField(default="")
execution_command = CharField(default="")
@@ -65,7 +64,6 @@ class HelperServers:
server_id: str,
name: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -81,7 +79,6 @@ class HelperServers:
name: The name of the server
server_uuid: This is the UUID of the server
server_dir: The directory where the server is located
- backup_path: The path to the backup folder
server_command: The command to start the server
server_file: The name of the server file
server_log_file: The path to the server log file
@@ -111,7 +108,6 @@ class HelperServers:
server_port=server_port,
server_ip=server_host,
stop_command=server_stop,
- backup_path=backup_path,
type=server_type,
created_by=created_by,
).server_id
diff --git a/app/classes/models/users.py b/app/classes/models/users.py
index 3f96e651..6f6a6bde 100644
--- a/app/classes/models/users.py
+++ b/app/classes/models/users.py
@@ -38,7 +38,7 @@ class Users(BaseModel):
superuser = BooleanField(default=False)
lang = CharField(default="en_EN")
support_logs = CharField(default="")
- valid_tokens_from = DateTimeField(default=datetime.datetime.now)
+ valid_tokens_from = DateTimeField(default=Helpers.get_utc_now)
server_order = CharField(default="")
preparing = BooleanField(default=False)
hints = BooleanField(default=True)
@@ -119,7 +119,6 @@ class HelperUsers:
@staticmethod
def get_user_total():
count = Users.select().where(Users.username != "system").count()
- print(count)
return count
@staticmethod
diff --git a/app/classes/shared/authentication.py b/app/classes/shared/authentication.py
index fad8b730..94db5532 100644
--- a/app/classes/shared/authentication.py
+++ b/app/classes/shared/authentication.py
@@ -1,5 +1,6 @@
import logging
import time
+from datetime import datetime
from typing import Optional, Dict, Any, Tuple
import jwt
from jwt import PyJWTError
@@ -62,7 +63,17 @@ class Authentication:
user = HelperUsers.get_user(user_id)
# TODO: Have a cache or something so we don't constantly
# have to query the database
- if int(user.get("valid_tokens_from").timestamp()) < iat:
+ valid_tokens_from_str = user.get("valid_tokens_from")
+ # It's possible this will be a string or a dt coming from the DB
+ # We need to account for that
+ try:
+ valid_tokens_from_dt = datetime.strptime(
+ valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z"
+ )
+ except TypeError:
+ valid_tokens_from_dt = valid_tokens_from_str
+ # Convert the string to a datetime object
+ if int(valid_tokens_from_dt.timestamp()) < iat:
# Success!
return key, data, user
return None
diff --git a/app/classes/shared/file_helpers.py b/app/classes/shared/file_helpers.py
index 90d8e65c..23bf01dd 100644
--- a/app/classes/shared/file_helpers.py
+++ b/app/classes/shared/file_helpers.py
@@ -4,7 +4,10 @@ import logging
import pathlib
import tempfile
import zipfile
-from zipfile import ZipFile, ZIP_DEFLATED
+import hashlib
+from typing import BinaryIO
+import mimetypes
+from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
import urllib.request
import ssl
import time
@@ -22,6 +25,7 @@ class FileHelpers:
def __init__(self, helper):
self.helper: Helpers = helper
+ self.mime_types = mimetypes.MimeTypes()
@staticmethod
def ssl_get_file(
@@ -142,6 +146,32 @@ class FileHelpers:
logger.error(f"Path specified is not a file or does not exist. {path}")
return e
+ def check_mime_types(self, file_path):
+ m_type, _value = self.mime_types.guess_type(file_path)
+ return m_type
+
+ @staticmethod
+ def calculate_file_hash(file_path: str) -> str:
+ """
+ Takes one parameter of file path.
+ It will generate a SHA256 hash for the path and return it.
+ """
+ sha256_hash = hashlib.sha256()
+ with open(file_path, "rb") as f:
+ for byte_block in iter(lambda: f.read(4096), b""):
+ sha256_hash.update(byte_block)
+ return sha256_hash.hexdigest()
+
+ @staticmethod
+ def calculate_buffer_hash(buffer: BinaryIO) -> str:
+ """
+ Takes one argument of a stream buffer. Will return a
+ sha256 hash of the buffer
+ """
+ sha256_hash = hashlib.sha256()
+ sha256_hash.update(buffer)
+ return sha256_hash.hexdigest()
+
@staticmethod
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
# pylint: disable=unexpected-keyword-arg
@@ -229,74 +259,15 @@ class FileHelpers:
return True
- def make_compressed_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
- ):
- # create a ZipFile object
- path_to_destination += ".zip"
- ex_replace = [p.replace("\\", "/") for p in excluded_dirs]
- total_bytes = 0
- dir_bytes = Helpers.get_dir_size(path_to_zip)
- results = {
- "percent": 0,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
- with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
- zip_file.comment = bytes(
- comment, "utf-8"
- ) # comments over 65535 bytes will be truncated
- for root, dirs, files in os.walk(path_to_zip, topdown=True):
- for l_dir in dirs:
- if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace:
- dirs.remove(l_dir)
- ziproot = path_to_zip
- for file in files:
- if (
- str(os.path.join(root, file)).replace("\\", "/")
- not in ex_replace
- and file != "crafty.sqlite"
- ):
- try:
- logger.info(f"backing up: {os.path.join(root, file)}")
- if os.name == "nt":
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, ""), file),
- )
- else:
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, "/"), file),
- )
-
- except Exception as e:
- logger.warning(
- f"Error backing up: {os.path.join(root, file)}!"
- f" - Error was: {e}"
- )
- total_bytes += os.path.getsize(os.path.join(root, file))
- percent = round((total_bytes / dir_bytes) * 100, 2)
- results = {
- "percent": percent,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
-
- return True
-
def make_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
+ self,
+ path_to_destination,
+ path_to_zip,
+ excluded_dirs,
+ server_id,
+ backup_id,
+ comment="",
+ compressed=None,
):
# create a ZipFile object
path_to_destination += ".zip"
@@ -313,7 +284,15 @@ class FileHelpers:
"backup_status",
results,
)
- with ZipFile(path_to_destination, "w") as zip_file:
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
+ # Set the compression mode based on the `compressed` parameter
+ compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED
+ with ZipFile(path_to_destination, "w", compression_mode) as zip_file:
zip_file.comment = bytes(
comment, "utf-8"
) # comments over 65535 bytes will be truncated
@@ -364,6 +343,7 @@ class FileHelpers:
results = {
"percent": percent,
"total_files": self.helper.human_readable_file_size(dir_bytes),
+ "backup_id": backup_id,
}
# send status results to page.
WebSocketManager().broadcast_page_params(
@@ -372,6 +352,12 @@ class FileHelpers:
"backup_status",
results,
)
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
return True
@staticmethod
diff --git a/app/classes/shared/helpers.py b/app/classes/shared/helpers.py
index 55a588fc..e827d5b2 100644
--- a/app/classes/shared/helpers.py
+++ b/app/classes/shared/helpers.py
@@ -19,7 +19,7 @@ import shutil
import shlex
import subprocess
import itertools
-from datetime import datetime
+from datetime import datetime, timezone
from socket import gethostname
from contextlib import redirect_stderr, suppress
import libgravatar
@@ -508,7 +508,6 @@ class Helpers:
"max_log_lines": 700,
"max_audit_entries": 300,
"disabled_language_files": [],
- "stream_size_GB": 1,
"keywords": ["help", "chunk"],
"allow_nsfw_profile_pictures": False,
"enable_user_self_delete": False,
@@ -640,6 +639,10 @@ class Helpers:
version = f"{major}.{minor}.{sub}"
return str(version)
+ @staticmethod
+ def get_utc_now() -> datetime:
+ return datetime.fromtimestamp(time.time(), tz=timezone.utc)
+
def encode_pass(self, password):
return self.passhasher.hash(password)
@@ -1006,6 +1009,11 @@ class Helpers:
except PermissionError as e:
logger.critical(f"Check generated exception due to permssion error: {e}")
return False
+ except FileNotFoundError as e:
+ logger.critical(
+ f"Check generated exception due to file does not exist error: {e}"
+ )
+ return False
def create_self_signed_cert(self, cert_dir=None):
if cert_dir is None:
diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py
index 66feff91..0d90a967 100644
--- a/app/classes/shared/main_controller.py
+++ b/app/classes/shared/main_controller.py
@@ -1,4 +1,5 @@
import os
+import sys
import pathlib
from pathlib import Path
from datetime import datetime
@@ -251,6 +252,19 @@ class Controller:
# Copy crafty logs to archive dir
full_log_name = os.path.join(crafty_path, "logs")
FileHelpers.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
+ thread_dump = ""
+ for thread in threading.enumerate():
+ if sys.version_info >= (3, 8):
+ thread_dump += (
+ f"Name: {thread.name}\tIdentifier:"
+ f" {thread.ident}\tTID/PID: {thread.native_id}\n"
+ )
+ else:
+ print(f"Name: {thread.name}\tIdentifier: {thread.ident}")
+ with open(
+ os.path.join(temp_dir, "crafty_thread_dump.txt"), "a", encoding="utf-8"
+ ) as f:
+ f.write(thread_dump)
self.support_scheduler.add_job(
self.log_status,
"interval",
@@ -552,7 +566,6 @@ class Controller:
name=data["name"],
server_uuid=server_fs_uuid,
server_dir=new_server_path,
- backup_path=backup_path,
server_command=server_command,
server_file=server_file,
server_log_file=log_location,
@@ -562,7 +575,7 @@ class Controller:
server_host=monitoring_host,
server_type=monitoring_type,
)
- self.management.set_backup_config(
+ self.management.add_default_backup_config(
new_server_id,
backup_path,
)
@@ -708,7 +721,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_jar,
server_log_file,
@@ -762,7 +774,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -807,7 +818,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -855,7 +865,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -879,16 +888,13 @@ class Controller:
# **********************************************************************************
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
- server_data = self.servers.get_server_data_by_id(old_server_id)
server_obj = self.servers.get_server_obj(new_server_id)
- old_bu_path = server_data["backup_path"]
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
- backup_path = old_bu_path
+ backup_path = os.path.join(self.helper.backup_path, old_server_id)
backup_path = Path(backup_path)
backup_path_components = list(backup_path.parts)
backup_path_components[-1] = new_uuid
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
- server_obj.backup_path = new_bu_path
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
try:
os.rmdir(default_backup_dir)
@@ -902,7 +908,6 @@ class Controller:
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -917,7 +922,6 @@ class Controller:
name,
server_uuid,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -982,16 +986,16 @@ class Controller:
f"Unable to delete server files for server with ID: "
f"{server_id} with error logged: {e}"
)
- if Helpers.check_path_exists(
- self.servers.get_server_data_by_id(server_id)["backup_path"]
- ):
- FileHelpers.del_dirs(
- Helpers.get_os_understandable_path(
- self.servers.get_server_data_by_id(server_id)[
- "backup_path"
- ]
+ backup_configs = HelpersManagement.get_backups_by_server(
+ server_id, True
+ )
+ for config in backup_configs:
+ if Helpers.check_path_exists(config.backup_location):
+ FileHelpers.del_dirs(
+ Helpers.get_os_understandable_path(
+ config.backup_location
+ )
)
- )
# Cleanup scheduled tasks
try:
diff --git a/app/classes/shared/server.py b/app/classes/shared/server.py
index a6c98b89..ab8ca54a 100644
--- a/app/classes/shared/server.py
+++ b/app/classes/shared/server.py
@@ -207,9 +207,6 @@ class ServerInstance:
self.server_scheduler.start()
self.dir_scheduler.start()
self.start_dir_calc_task()
- self.backup_thread = threading.Thread(
- target=self.backup_server, daemon=True, name=f"backup_{self.name}"
- )
self.is_backingup = False
# Reset crash and update at initialization
self.stats_helper.server_crash_reset()
@@ -940,8 +937,7 @@ class ServerInstance:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
def restart_threaded_server(self, user_id):
- bu_conf = HelpersManagement.get_backup_config(self.server_id)
- if self.is_backingup and bu_conf["shutdown"]:
+ if self.is_backingup:
logger.info(
"Restart command detected. Supressing - server has"
" backup shutdown enabled and server is currently backing up."
@@ -1111,12 +1107,16 @@ class ServerInstance:
f.write("eula=true")
self.run_threaded_server(user_id)
- def a_backup_server(self):
- if self.settings["backup_path"] == "":
- logger.critical("Backup path is None. Canceling Backup!")
- return
+ def server_backup_threader(self, backup_id, update=False):
+ # Check to see if we're already backing up
+ if self.check_backup_by_id(backup_id):
+ return False
+
backup_thread = threading.Thread(
- target=self.backup_server, daemon=True, name=f"backup_{self.name}"
+ target=self.backup_server,
+ daemon=True,
+ name=f"backup_{backup_id}",
+ args=[backup_id, update],
)
logger.info(
f"Starting Backup Thread for server {self.settings['server_name']}."
@@ -1127,27 +1127,20 @@ class ServerInstance:
"Backup Thread - Local server path not defined. "
"Setting local server path variable."
)
- # checks if the backup thread is currently alive for this server
- if not self.is_backingup:
- try:
- backup_thread.start()
- self.is_backingup = True
- except Exception as ex:
- logger.error(f"Failed to start backup: {ex}")
- return False
- else:
- logger.error(
- f"Backup is already being processed for server "
- f"{self.settings['server_name']}. Canceling backup request"
- )
+
+ try:
+ backup_thread.start()
+ except Exception as ex:
+ logger.error(f"Failed to start backup: {ex}")
return False
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
@callback
- def backup_server(self):
+ def backup_server(self, backup_id, update):
was_server_running = None
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
server_users = PermissionsServers.get_server_user_list(self.server_id)
+ # Alert the start of the backup to the authorized users.
for user in server_users:
WebSocketManager().broadcast_user(
user,
@@ -1157,30 +1150,40 @@ class ServerInstance:
).format(self.name),
)
time.sleep(3)
- conf = HelpersManagement.get_backup_config(self.server_id)
+
+ # Get the backup config
+ conf = HelpersManagement.get_backup_config(backup_id)
+ # Adjust the location to include the backup ID for destination.
+ backup_location = os.path.join(conf["backup_location"], conf["backup_id"])
+
+ # Check if the backup location even exists.
+ if not backup_location:
+ Console.critical("No backup path found. Canceling")
+ return None
if conf["before"]:
- if self.check_running():
- logger.debug(
- "Found running server and send command option. Sending command"
- )
- self.send_command(conf["before"])
+ logger.debug(
+ "Found running server and send command option. Sending command"
+ )
+ self.send_command(conf["before"])
+ # Pause to let command run
+ time.sleep(5)
if conf["shutdown"]:
- if conf["before"]:
- # pause to let people read message.
- time.sleep(5)
logger.info(
"Found shutdown preference. Delaying"
+ "backup start. Shutting down server."
)
- if self.check_running():
- self.stop_server()
- was_server_running = True
+ if not update:
+ was_server_running = False
+ if self.check_running():
+ self.stop_server()
+ was_server_running = True
+
+ self.helper.ensure_dir_exists(backup_location)
- self.helper.ensure_dir_exists(self.settings["backup_path"])
try:
backup_filename = (
- f"{self.settings['backup_path']}/"
+ f"{backup_location}/"
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
)
logger.info(
@@ -1188,42 +1191,36 @@ class ServerInstance:
f" (ID#{self.server_id}, path={self.server_path}) "
f"at '{backup_filename}'"
)
- excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id)
+ excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id)
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
- if conf["compress"]:
- logger.debug(
- "Found compress backup to be true. Calling compressed archive"
- )
- self.file_helper.make_compressed_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
- else:
- logger.debug(
- "Found compress backup to be false. Calling NON-compressed archive"
- )
- self.file_helper.make_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
+
+ self.file_helper.make_backup(
+ Helpers.get_os_understandable_path(backup_filename),
+ server_dir,
+ excluded_dirs,
+ self.server_id,
+ backup_id,
+ conf["backup_name"],
+ conf["compress"],
+ )
while (
- len(self.list_backups()) > conf["max_backups"]
+ len(self.list_backups(conf)) > conf["max_backups"]
and conf["max_backups"] > 0
):
- backup_list = self.list_backups()
+ backup_list = self.list_backups(conf)
oldfile = backup_list[0]
- oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
+ oldfile_path = f"{backup_location}/{oldfile['path']}"
logger.info(f"Removing old backup '{oldfile['path']}'")
os.remove(Helpers.get_os_understandable_path(oldfile_path))
- self.is_backingup = False
logger.info(f"Backup of server: {self.name} completed")
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1248,7 +1245,6 @@ class ServerInstance:
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
time.sleep(3)
- self.last_backup_failed = False
if conf["after"]:
if self.check_running():
logger.debug(
@@ -1256,12 +1252,21 @@ class ServerInstance:
)
self.send_command(conf["after"])
# pause to let people read message.
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Standby", "message": ""})},
+ )
time.sleep(5)
- except:
+ except Exception as e:
logger.exception(
f"Failed to create backup of server {self.name} (ID {self.server_id})"
)
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1269,56 +1274,51 @@ class ServerInstance:
"backup_status",
results,
)
- self.is_backingup = False
if was_server_running:
logger.info(
"Backup complete. User had shutdown preference. Starting server."
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
- self.last_backup_failed = True
-
- def backup_status(self, source_path, dest_path):
- results = Helpers.calc_percent(source_path, dest_path)
- self.backup_stats = results
- if len(WebSocketManager().clients) > 0:
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(self.server_id)},
- "backup_status",
- results,
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Failed", "message": f"{e}"})},
)
+ self.set_backup_status()
def last_backup_status(self):
return self.last_backup_failed
- def send_backup_status(self):
- try:
- return self.backup_stats
- except:
- return {"percent": 0, "total_files": 0}
+ def set_backup_status(self):
+ backups = HelpersManagement.get_backups_by_server(self.server_id, True)
+ alert = False
+ for backup in backups:
+ if json.loads(backup.status)["status"] == "Failed":
+ alert = True
+ self.last_backup_failed = alert
- def list_backups(self):
- if not self.settings["backup_path"]:
+ def list_backups(self, backup_config: dict) -> list:
+ if not backup_config:
logger.info(
f"Error putting backup file list for server with ID: {self.server_id}"
)
return []
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
if not Helpers.check_path_exists(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
):
return []
files = Helpers.get_human_readable_files_sizes(
Helpers.list_dir_by_date(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
)
)
return [
{
"path": os.path.relpath(
f["path"],
- start=Helpers.get_os_understandable_path(
- self.settings["backup_path"]
- ),
+ start=Helpers.get_os_understandable_path(backup_location),
),
"size": f["size"],
}
@@ -1330,7 +1330,7 @@ class ServerInstance:
def jar_update(self):
self.stats_helper.set_update(True)
update_thread = threading.Thread(
- target=self.a_jar_update, daemon=True, name=f"exe_update_{self.name}"
+ target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}"
)
update_thread.start()
@@ -1371,10 +1371,13 @@ class ServerInstance:
def check_update(self):
return self.stats_helper.get_server_stats()["updating"]
- def a_jar_update(self):
+ def threaded_jar_update(self):
server_users = PermissionsServers.get_server_user_list(self.server_id)
was_started = "-1"
- self.a_backup_server()
+ # Get default backup configuration
+ backup_config = HelpersManagement.get_default_server_backup(self.server_id)
+ # start threaded backup
+ self.server_backup_threader(backup_config["backup_id"], True)
# checks if server is running. Calls shutdown if it is running.
if self.check_running():
was_started = True
@@ -1403,47 +1406,22 @@ class ServerInstance:
"string": message,
},
)
- backup_dir = os.path.join(
- Helpers.get_os_understandable_path(self.settings["path"]),
- "crafty_executable_backups",
- )
- # checks if backup directory already exists
- if os.path.isdir(backup_dir):
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
- else:
- logger.info(
- f"Executable backup directory not found for Server: {self.name}."
- f" Creating one."
- )
- os.mkdir(backup_dir)
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
-
- if len(os.listdir(backup_dir)) > 0:
- # removes old backup
- logger.info(f"Old backups found for server: {self.name}. Removing...")
- for item in os.listdir(backup_dir):
- os.remove(os.path.join(backup_dir, item))
- logger.info(f"Old backups removed for server: {self.name}.")
- else:
- logger.info(f"No old backups found for server: {self.name}")
-
current_executable = os.path.join(
Helpers.get_os_understandable_path(self.settings["path"]),
self.settings["executable"],
)
-
- try:
- # copies to backup dir
- FileHelpers.copy_file(current_executable, backup_executable)
- except FileNotFoundError:
- logger.error("Could not create backup of jarfile. File not found.")
-
+ backing_up = True
# wait for backup
- while self.is_backingup:
- time.sleep(10)
+ while backing_up:
+ # Check to see if we're already backing up
+ backing_up = self.check_backup_by_id(backup_config["backup_id"])
+ time.sleep(2)
# check if backup was successful
- if self.last_backup_failed:
+ backup_status = json.loads(
+ HelpersManagement.get_backup_config(backup_config["backup_id"])["status"]
+ )["status"]
+ if backup_status == "Failed":
for user in server_users:
WebSocketManager().broadcast_user(
user,
@@ -1528,12 +1506,6 @@ class ServerInstance:
WebSocketManager().broadcast_user_page(
user, "/panel/dashboard", "send_start_reload", {}
)
- WebSocketManager().broadcast_user(
- user,
- "notification",
- "Executable update finished for " + self.name,
- )
-
self.management_helper.add_to_audit_log_raw(
"Alert",
"-1",
@@ -1656,6 +1628,14 @@ class ServerInstance:
except:
Console.critical("Can't broadcast server status to websocket")
+ def check_backup_by_id(self, backup_id: str) -> bool:
+ # Check to see if we're already backing up
+ for thread in threading.enumerate():
+ if thread.getName() == f"backup_{backup_id}":
+ Console.debug(f"Backup with id {backup_id} already running!")
+ return True
+ return False
+
def get_servers_stats(self):
server_stats = {}
diff --git a/app/classes/shared/tasks.py b/app/classes/shared/tasks.py
index b9513441..da9735a9 100644
--- a/app/classes/shared/tasks.py
+++ b/app/classes/shared/tasks.py
@@ -140,7 +140,7 @@ class TasksManager:
)
elif command == "backup_server":
- svr.a_backup_server()
+ svr.server_backup_threader(cmd["action_id"])
elif command == "update_executable":
svr.jar_update()
@@ -240,6 +240,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -268,6 +269,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -284,6 +286,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -303,6 +306,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -337,6 +341,7 @@ class TasksManager:
job_data["cron_string"],
job_data["parent"],
job_data["delay"],
+ job_data["action_id"],
)
# Checks to make sure some doofus didn't actually make the newly
@@ -367,6 +372,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -393,6 +399,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -409,6 +416,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -428,6 +436,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -520,6 +529,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -543,6 +553,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -559,6 +570,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -578,6 +590,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -653,6 +666,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -785,6 +799,18 @@ class TasksManager:
self.helper.ensure_dir_exists(
os.path.join(self.controller.project_root, "import", "upload")
)
+ self.helper.ensure_dir_exists(
+ os.path.join(self.controller.project_root, "temp")
+ )
+ for file in os.listdir(os.path.join(self.controller.project_root, "temp")):
+ if self.helper.is_file_older_than_x_days(
+ os.path.join(self.controller.project_root, "temp", file)
+ ):
+ try:
+ os.remove(os.path.join(file))
+ except FileNotFoundError:
+ logger.debug("Could not clear out file from temp directory")
+
for file in os.listdir(
os.path.join(self.controller.project_root, "import", "upload")
):
@@ -793,7 +819,7 @@ class TasksManager:
):
try:
os.remove(os.path.join(file))
- except:
+ except FileNotFoundError:
logger.debug("Could not clear out file from import directory")
def log_watcher(self):
diff --git a/app/classes/shared/translation.py b/app/classes/shared/translation.py
index 0e441808..538856a8 100644
--- a/app/classes/shared/translation.py
+++ b/app/classes/shared/translation.py
@@ -20,7 +20,7 @@ class Translation:
def get_language_file(self, language: str):
return os.path.join(self.translations_path, str(language) + ".json")
- def translate(self, page, word, language):
+ def translate(self, page, word, language, error=True):
fallback_language = "en_EN"
translated_word = self.translate_inner(page, word, language)
@@ -37,7 +37,9 @@ class Translation:
if hasattr(translated_word, "__iter__"):
# Multiline strings
return "\n".join(translated_word)
- return "Error while getting translation"
+ if error:
+ return "Error while getting translation"
+ return word
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
language_file = self.get_language_file(language)
diff --git a/app/classes/web/base_handler.py b/app/classes/web/base_handler.py
index 7cca08e8..2d9261ea 100644
--- a/app/classes/web/base_handler.py
+++ b/app/classes/web/base_handler.py
@@ -6,6 +6,7 @@ import nh3
import tornado.web
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
+from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.users import ApiKeys
from app.classes.shared.helpers import Helpers
from app.classes.shared.file_helpers import FileHelpers
@@ -195,6 +196,8 @@ class BaseHandler(tornado.web.RequestHandler):
if api_key is not None:
superuser = superuser and api_key.full_access
server_permissions_api_mask = api_key.server_permissions
+ if api_key.full_access:
+ server_permissions_api_mask = "1" * len(EnumPermissionsServer)
exec_user_role = set()
if superuser:
authorized_servers = self.controller.servers.get_all_defined_servers()
diff --git a/app/classes/web/panel_handler.py b/app/classes/web/panel_handler.py
index bbbc9d9e..8df48431 100644
--- a/app/classes/web/panel_handler.py
+++ b/app/classes/web/panel_handler.py
@@ -41,6 +41,8 @@ SUBPAGE_PERMS = {
"webhooks": EnumPermissionsServer.CONFIG,
}
+SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules"
+
class PanelHandler(BaseHandler):
def get_user_roles(self) -> t.Dict[str, list]:
@@ -677,36 +679,18 @@ class PanelHandler(BaseHandler):
page_data["java_versions"] = page_java
if subpage == "backup":
server_info = self.controller.servers.get_server_data_by_id(server_id)
- page_data["backup_config"] = (
- self.controller.management.get_backup_config(server_id)
- )
- exclusions = []
- page_data["exclusions"] = (
- self.controller.management.get_excluded_backup_dirs(server_id)
+
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
)
page_data["backing_up"] = (
self.controller.servers.get_server_instance_by_id(
server_id
).is_backingup
)
- page_data["backup_stats"] = (
- self.controller.servers.get_server_instance_by_id(
- server_id
- ).send_backup_status()
- )
# makes it so relative path is the only thing shown
- for file in page_data["exclusions"]:
- if Helpers.is_os_windows():
- exclusions.append(file.replace(server_info["path"] + "\\", ""))
- else:
- exclusions.append(file.replace(server_info["path"] + "/", ""))
- page_data["exclusions"] = exclusions
+
self.controller.servers.refresh_server_settings(server_id)
- try:
- page_data["backup_list"] = server.list_backups()
- except:
- page_data["backup_list"] = []
- page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"])
if subpage == "metrics":
try:
@@ -780,20 +764,23 @@ class PanelHandler(BaseHandler):
elif page == "download_backup":
file = self.get_argument("file", "")
+ backup_id = self.get_argument("backup_id", "")
server_id = self.check_server_id()
if server_id is None:
return
-
+ backup_config = self.controller.management.get_backup_config(backup_id)
server_info = self.controller.servers.get_server_data_by_id(server_id)
+ backup_location = os.path.join(backup_config["backup_location"], backup_id)
backup_file = os.path.abspath(
os.path.join(
- Helpers.get_os_understandable_path(server_info["backup_path"]), file
+ Helpers.get_os_understandable_path(backup_location),
+ file,
)
)
if not self.helper.is_subdir(
backup_file,
- Helpers.get_os_understandable_path(server_info["backup_path"]),
+ Helpers.get_os_understandable_path(backup_location),
) or not os.path.isfile(backup_file):
self.redirect("/panel/error?error=Invalid path detected")
return
@@ -892,6 +879,8 @@ class PanelHandler(BaseHandler):
os.path.join(self.helper.root_dir, "app", "translations")
)
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@@ -1130,6 +1119,9 @@ class PanelHandler(BaseHandler):
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
@@ -1150,6 +1142,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["delay"] = 0
page_data["schedule"]["time"] = ""
page_data["schedule"]["interval"] = 1
+ page_data["schedule"]["action_id"] = ""
# we don't need to check difficulty here.
# We'll just default to basic for new schedules
page_data["schedule"]["difficulty"] = "basic"
@@ -1158,7 +1151,7 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
@@ -1195,6 +1188,9 @@ class PanelHandler(BaseHandler):
exec_user["user_id"], server_id
)
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
@@ -1209,6 +1205,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["server_id"] = server_id
page_data["schedule"]["schedule_id"] = schedule.schedule_id
page_data["schedule"]["action"] = schedule.action
+ page_data["schedule"]["action_id"] = schedule.action_id
if schedule.name:
page_data["schedule"]["name"] = schedule.name
else:
@@ -1252,11 +1249,141 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
+ elif page == "edit_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = self.controller.management.get_backup_config(
+ backup_id
+ )
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
+ )
+ exclusions = []
+ page_data["backing_up"] = self.controller.servers.get_server_instance_by_id(
+ server_id
+ ).is_backingup
+ self.controller.servers.refresh_server_settings(server_id)
+ try:
+ page_data["backup_list"] = server.list_backups(
+ page_data["backup_config"]
+ )
+ except:
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = (
+ self.controller.management.get_excluded_backup_dirs(backup_id)
+ )
+ # Make exclusion paths relative for page
+ for file in page_data["exclusions"]:
+ if Helpers.is_os_windows():
+ exclusions.append(file.replace(server_info["path"] + "\\", ""))
+ else:
+ exclusions.append(file.replace(server_info["path"] + "/", ""))
+ page_data["exclusions"] = exclusions
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
+ elif page == "add_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = {
+ "excluded_dirs": [],
+ "max_backups": 0,
+ "server_id": server_id,
+ "backup_location": os.path.join(self.helper.backup_path, server_id),
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ }
+ page_data["backing_up"] = False
+ self.controller.servers.refresh_server_settings(server_id)
+
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = []
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
elif page == "edit_user":
user_id = self.get_argument("id", None)
role_servers = self.controller.servers.get_authorized_servers(user_id)
@@ -1307,6 +1434,8 @@ class PanelHandler(BaseHandler):
for file in sorted(
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
diff --git a/app/classes/web/routes/api/api_handlers.py b/app/classes/web/routes/api/api_handlers.py
index a30350a5..78223efe 100644
--- a/app/classes/web/routes/api/api_handlers.py
+++ b/app/classes/web/routes/api/api_handlers.py
@@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import (
)
from app.classes.web.routes.api.servers.server.backups.backup.index import (
ApiServersServerBackupsBackupIndexHandler,
+ ApiServersServerBackupsBackupFilesIndexHandler,
)
from app.classes.web.routes.api.servers.server.files import (
ApiServersServerFilesIndexHandler,
ApiServersServerFilesCreateHandler,
ApiServersServerFilesZipHandler,
)
+from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
from app.classes.web.routes.api.servers.server.tasks.task.children import (
ApiServersServerTasksTaskChildrenHandler,
)
@@ -218,13 +220,13 @@ def api_handlers(handler_args):
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/backups/backup/?",
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?",
ApiServersServerBackupsBackupIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/files/?",
- ApiServersServerFilesIndexHandler,
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?",
+ ApiServersServerBackupsBackupFilesIndexHandler,
handler_args,
),
(
@@ -237,6 +239,26 @@ def api_handlers(handler_args):
ApiServersServerFilesZipHandler,
handler_args,
),
+ (
+ r"/api/v2/crafty/admin/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/import/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?",
+ ApiServersServerFilesIndexHandler,
+ handler_args,
+ ),
(
r"/api/v2/servers/([a-z0-9-]+)/tasks/?",
ApiServersServerTasksIndexHandler,
@@ -273,7 +295,8 @@ def api_handlers(handler_args):
handler_args,
),
(
- r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)/?",
+ # optional third argument when we need a action ID
+ r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?",
ApiServersServerActionHandler,
handler_args,
),
diff --git a/app/classes/web/routes/api/auth/invalidate_tokens.py b/app/classes/web/routes/api/auth/invalidate_tokens.py
index f15bf60d..9e38670a 100644
--- a/app/classes/web/routes/api/auth/invalidate_tokens.py
+++ b/app/classes/web/routes/api/auth/invalidate_tokens.py
@@ -1,6 +1,6 @@
-import datetime
import logging
from app.classes.web.base_api_handler import BaseApiHandler
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler):
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
self.controller.users.raw_update_user(
- auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
+ auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()}
)
self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/crafty/upload/index.py b/app/classes/web/routes/api/crafty/upload/index.py
new file mode 100644
index 00000000..b37ef796
--- /dev/null
+++ b/app/classes/web/routes/api/crafty/upload/index.py
@@ -0,0 +1,308 @@
+import os
+import logging
+import shutil
+from app.classes.models.server_permissions import EnumPermissionsServer
+from app.classes.shared.helpers import Helpers
+from app.classes.web.base_api_handler import BaseApiHandler
+
+logger = logging.getLogger(__name__)
+IMAGE_MIME_TYPES = [
+ "image/bmp",
+ "image/cis-cod",
+ "image/gif",
+ "image/ief",
+ "image/jpeg",
+ "image/pipeg",
+ "image/svg+xml",
+ "image/tiff",
+ "image/x-cmu-raster",
+ "image/x-cmx",
+ "image/x-icon",
+ "image/x-portable-anymap",
+ "image/x-portable-bitmap",
+ "image/x-portable-graymap",
+ "image/x-portable-pixmap",
+ "image/x-rgb",
+ "image/x-xbitmap",
+ "image/x-xpixmap",
+ "image/x-xwindowdump",
+ "image/png",
+ "image/webp",
+]
+
+ARCHIVE_MIME_TYPES = ["application/zip"]
+
+
+class ApiFilesUploadHandler(BaseApiHandler):
+ async def post(self, server_id=None):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ upload_type = self.request.headers.get("type")
+ accepted_types = []
+
+ if server_id:
+ # Check to make sure user is authorized for the server
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ # Make sure user has file access for the server
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
+ # if the user doesn't have Files permission, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+
+ u_type = "server_upload"
+ # Make sure user is a super user if they're changing panel settings
+ elif auth_data[4]["superuser"] and upload_type == "background":
+ u_type = "admin_config"
+ self.upload_dir = os.path.join(
+ self.controller.project_root,
+ "app/frontend/static/assets/images/auth/custom",
+ )
+ accepted_types = IMAGE_MIME_TYPES
+ elif upload_type == "import":
+ # Check that user can make servers
+ if (
+ not self.controller.crafty_perms.can_create_server(
+ auth_data[4]["user_id"]
+ )
+ and not auth_data[4]["superuser"]
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Set directory to upload import dir
+ self.upload_dir = os.path.join(
+ self.controller.project_root, "import", "upload"
+ )
+ u_type = "server_import"
+ accepted_types = ARCHIVE_MIME_TYPES
+ else:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Get the headers from the request
+ self.chunk_hash = self.request.headers.get("chunkHash", 0)
+ self.file_id = self.request.headers.get("fileId")
+ self.chunked = self.request.headers.get("chunked", False)
+ self.filename = self.request.headers.get("fileName", None)
+ try:
+ file_size = int(self.request.headers.get("fileSize", None))
+ total_chunks = int(self.request.headers.get("totalChunks", 0))
+ except TypeError:
+ return self.finish_json(
+ 400, {"status": "error", "error": "TYPE ERROR", "data": {}}
+ )
+ self.chunk_index = self.request.headers.get("chunkId")
+ if u_type == "server_upload":
+ self.upload_dir = self.request.headers.get("location", None)
+ self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
+
+ if u_type == "server_upload":
+ # If this is an upload from a server the path will be what
+ # Is requested
+ full_path = os.path.join(self.upload_dir, self.filename)
+
+ # Check to make sure the requested path is inside the server's directory
+ if not self.helper.is_subdir(
+ full_path,
+ Helpers.get_os_understandable_path(
+ self.controller.servers.get_server_data_by_id(server_id)["path"]
+ ),
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT AUTHORIZED",
+ "data": {"message": "Traversal detected"},
+ },
+ )
+ # Check to make sure the file type we're being sent is what we're expecting
+ if (
+ self.file_helper.check_mime_types(self.filename) not in accepted_types
+ and u_type != "server_upload"
+ ):
+ return self.finish_json(
+ 422,
+ {
+ "status": "error",
+ "error": "INVALID FILE TYPE",
+ "data": {
+ "message": f"Invalid File Type only accepts {accepted_types}"
+ },
+ },
+ )
+ _total, _used, free = shutil.disk_usage(self.upload_dir)
+
+ # Check to see if we have enough space
+ if free <= file_size:
+ return self.finish_json(
+ 507,
+ {
+ "status": "error",
+ "error": "NO STORAGE SPACE",
+ "data": {"message": "Out Of Space!"},
+ },
+ )
+
+ # If this has no chunk index we know it's the inital request
+ if self.chunked and not self.chunk_index:
+ return self.finish_json(
+ 200, {"status": "ok", "data": {"file-id": self.file_id}}
+ )
+ # Create the upload and temp directories if they don't exist
+ os.makedirs(self.upload_dir, exist_ok=True)
+
+ # Check for chunked header. We will handle this request differently
+ # if it doesn't exist
+ if not self.chunked:
+ # Write the file directly to the upload dir
+ with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
+ chunk = self.request.body
+ if chunk:
+ file.write(chunk)
+ # We'll check the file hash against the sent hash once the file is
+ # written. We cannot check this buffer.
+ calculated_hash = self.file_helper.calculate_file_hash(
+ os.path.join(self.upload_dir, self.filename)
+ )
+ logger.info(
+ f"File upload completed. Filename: {self.filename} Type: {u_type}"
+ )
+ return self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ # Since this is a chunked upload we'll create the temp dir for parts.
+ os.makedirs(self.temp_dir, exist_ok=True)
+
+ # Read headers and query parameters
+ content_length = int(self.request.headers.get("Content-Length"))
+ if content_length <= 0:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID CONTENT LENGTH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID CONTENT LENGTH",
+ "data": {"message": "Invalid content length"},
+ },
+ )
+
+ # At this point filename, chunk index and total chunks are required
+ # in the request
+ if not self.filename or self.chunk_index is None:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: CHUNK INDEX NOT FOUND"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INDEX ERROR",
+ "data": {
+ "message": "Filename, chunk_index,"
+ " and total_chunks are required"
+ },
+ },
+ )
+
+ # Calculate the hash of the buffer and compare it against the expected hash
+ calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body)
+ if str(self.chunk_hash) != str(calculated_hash):
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID HASH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_HASH",
+ "data": {
+ "message": "Hash recieved does not match reported sent hash.",
+ "chunk_id": self.chunk_index,
+ },
+ },
+ )
+
+ # File paths
+ file_path = os.path.join(self.upload_dir, self.filename)
+ chunk_path = os.path.join(
+ self.temp_dir, f"{self.filename}.part{self.chunk_index}"
+ )
+
+ # Save the chunk
+ with open(chunk_path, "wb") as f:
+ f.write(self.request.body)
+
+ # Check if all chunks are received
+ received_chunks = [
+ f
+ for f in os.listdir(self.temp_dir)
+ if f.startswith(f"{self.filename}.part")
+ ]
+ # When we've reached the total chunks we'll
+ # Compare the hash and write the file
+ if len(received_chunks) == total_chunks:
+ with open(file_path, "wb") as outfile:
+ for i in range(total_chunks):
+ chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
+ with open(chunk_file, "rb") as infile:
+ outfile.write(infile.read())
+ os.remove(chunk_file)
+ logger.info(
+ f"File upload completed. Filename: {self.filename}"
+ f" Path: {file_path} Type: {u_type}"
+ )
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Uploaded file {self.filename}",
+ server_id,
+ self.request.remote_ip,
+ )
+ self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ else:
+ self.finish_json(
+ 200,
+ {
+ "status": "partial",
+ "data": {"message": f"Chunk {self.chunk_index} received"},
+ },
+ )
diff --git a/app/classes/web/routes/api/roles/index.py b/app/classes/web/routes/api/roles/index.py
index a8612c75..45a00bf0 100644
--- a/app/classes/web/routes/api/roles/index.py
+++ b/app/classes/web/routes/api/roles/index.py
@@ -2,6 +2,7 @@ import typing as t
from jsonschema import ValidationError, validate
import orjson
from playhouse.shortcuts import model_to_dict
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
create_role_schema = {
@@ -10,6 +11,7 @@ create_role_schema = {
"name": {
"type": "string",
"minLength": 1,
+ "pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@@ -22,7 +24,7 @@ create_role_schema = {
},
"permissions": {
"type": "string",
- "pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
+ "pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@@ -71,7 +73,7 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
@@ -81,7 +83,10 @@ class ApiRolesIndexHandler(BaseApiHandler):
# GET /api/v2/roles?ids=true
get_only_ids = self.get_query_argument("ids", None) == "true"
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@@ -104,14 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
_,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -138,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler):
role_name = data["name"]
manager = data.get("manager", None)
+ if not superuser and not manager:
+ manager = auth_data[4]["user_id"]
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
manager = None
diff --git a/app/classes/web/routes/api/roles/role/index.py b/app/classes/web/routes/api/roles/role/index.py
index 73fd9ff3..1eab6183 100644
--- a/app/classes/web/routes/api/roles/role/index.py
+++ b/app/classes/web/routes/api/roles/role/index.py
@@ -1,6 +1,7 @@
from jsonschema import ValidationError, validate
import orjson
-from peewee import DoesNotExist
+from peewee import DoesNotExist, IntegrityError
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
modify_role_schema = {
@@ -9,6 +10,7 @@ modify_role_schema = {
"name": {
"type": "string",
"minLength": 1,
+ "pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@@ -21,7 +23,7 @@ modify_role_schema = {
},
"permissions": {
"type": "string",
- "pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
+ "pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@@ -70,14 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
_,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -100,8 +105,11 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
user,
_,
) = auth_data
-
- if not superuser:
+ role = self.controller.roles.get_role(role_id)
+ if (
+ str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.roles.remove_role(role_id)
@@ -124,7 +132,7 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
@@ -132,7 +140,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
) = auth_data
role = self.controller.roles.get_role(role_id)
- if not superuser and user["user_id"] != role["manager"]:
+ if not superuser and (
+ user["user_id"] != role["manager"]
+ or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(
400,
{
@@ -179,7 +190,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
)
except DoesNotExist:
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
-
+ except IntegrityError:
+ return self.finish_json(
+ 404, {"status": "error", "error": "ROLE_NAME_EXISTS"}
+ )
self.controller.management.add_to_audit_log(
user["user_id"],
f"modified role with ID {role_id}",
diff --git a/app/classes/web/routes/api/servers/index.py b/app/classes/web/routes/api/servers/index.py
index 43cf01e2..ca551326 100644
--- a/app/classes/web/routes/api/servers/index.py
+++ b/app/classes/web/routes/api/servers/index.py
@@ -23,6 +23,7 @@ new_server_schema = {
"type": "string",
"examples": ["My Server"],
"minLength": 2,
+ "pattern": "^[^/\\\\]*$",
},
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
"stop_command": {
diff --git a/app/classes/web/routes/api/servers/server/action.py b/app/classes/web/routes/api/servers/server/action.py
index aba06da3..d8e58b2f 100644
--- a/app/classes/web/routes/api/servers/server/action.py
+++ b/app/classes/web/routes/api/servers/server/action.py
@@ -1,5 +1,6 @@
import logging
import os
+import json
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.servers import Servers
from app.classes.shared.file_helpers import FileHelpers
@@ -10,7 +11,7 @@ logger = logging.getLogger(__name__)
class ApiServersServerActionHandler(BaseApiHandler):
- def post(self, server_id: str, action: str):
+ def post(self, server_id: str, action: str, action_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -54,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler):
return self._agree_eula(server_id, auth_data[4]["user_id"])
self.controller.management.send_command(
- auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
+ auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id
)
self.finish_json(
@@ -82,6 +83,20 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_id = self.helper.create_uuid()
new_server_path = os.path.join(self.helper.servers_dir, new_server_id)
new_backup_path = os.path.join(self.helper.backup_path, new_server_id)
+ backup_data = {
+ "backup_name": f"{new_server_name} Backup",
+ "backup_location": new_backup_path,
+ "excluded_dirs": "",
+ "max_backups": 0,
+ "server_id": new_server_id,
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ "default": True,
+ "status": json.dumps({"status": "Standby", "message": ""}),
+ "enabled": True,
+ }
new_server_command = str(server_data.get("execution_command")).replace(
server_id, new_server_id
)
@@ -93,7 +108,6 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_name,
new_server_id,
new_server_path,
- new_backup_path,
new_server_command,
server_data.get("executable"),
new_server_log_path,
@@ -103,6 +117,8 @@ class ApiServersServerActionHandler(BaseApiHandler):
server_data.get("type"),
)
+ self.controller.management.add_backup_config(backup_data)
+
self.controller.management.add_to_audit_log(
user_id,
f"is cloning server {server_id} named {server_data.get('server_name')}",
diff --git a/app/classes/web/routes/api/servers/server/backups/backup/index.py b/app/classes/web/routes/api/servers/server/backups/backup/index.py
index 1b9ff915..5d8fd2b5 100644
--- a/app/classes/web/routes/api/servers/server/backups/backup/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/backup/index.py
@@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
-backup_schema = {
+BACKUP_SCHEMA = {
"type": "object",
"properties": {
"filename": {"type": "string", "minLength": 5},
@@ -19,11 +19,44 @@ backup_schema = {
"additionalProperties": False,
"minProperties": 1,
}
+BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+
+BASIC_BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+ID_MISMATCH = "Server ID backup server ID different"
+GENERAL_AUTH_ERROR = "Authorization Error"
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
- def get(self, server_id: str):
+ def get(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
@@ -32,15 +65,40 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
),
auth_data[5],
)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.finish_json(200, backup_conf)
- def delete(self, server_id: str):
+ def delete(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
- backup_conf = self.controller.management.get_backup_config(server_id)
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
@@ -52,7 +110,66 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Edited server {server_id}: removed backup config"
+ f" {backup_conf['backup_name']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+ if backup_conf["default"]:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "NOT_ALLOWED",
+ "error_data": "Cannot delete default backup",
+ },
+ )
+ self.controller.management.delete_backup_config(backup_id)
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def post(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
try:
data = json.loads(self.request.body)
@@ -61,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
- validate(data, backup_schema)
+ validate(data, BACKUP_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
@@ -72,9 +189,246 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
},
)
+ svr_obj = self.controller.servers.get_server_obj(server_id)
+ server_data = self.controller.servers.get_server_data_by_id(server_id)
+ zip_name = data["filename"]
+ # import the server again based on zipfile
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
+ if Helpers.validate_traversal(backup_location, zip_name):
+ try:
+ temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name)
+ except (FileNotFoundError, NotADirectoryError) as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
+ )
+ if server_data["type"] == "minecraft-java":
+ new_server = self.controller.restore_java_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ "1",
+ "2",
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ elif server_data["type"] == "minecraft-bedrock":
+ new_server = self.controller.restore_bedrock_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ new_server_id = new_server
+ new_server = self.controller.servers.get_server_data(new_server)
+ self.controller.rename_backup_dir(
+ server_id,
+ new_server_id,
+ new_server["server_id"],
+ )
+ # preserve current schedules
+ for schedule in self.controller.management.get_schedules_by_server(
+ server_id
+ ):
+ job_data = self.controller.management.get_scheduled_task(
+ schedule.schedule_id
+ )
+ job_data["server_id"] = new_server_id
+ del job_data["schedule_id"]
+ self.tasks_manager.update_job(schedule.schedule_id, job_data)
+ # preserve execution command
+ new_server_obj = self.controller.servers.get_server_obj(new_server_id)
+ new_server_obj.execution_command = server_data["execution_command"]
+ # reset executable path
+ if svr_obj.path in svr_obj.executable:
+ new_server_obj.executable = str(svr_obj.executable).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ # reset run command path
+ if svr_obj.path in svr_obj.execution_command:
+ new_server_obj.execution_command = str(
+ svr_obj.execution_command
+ ).replace(svr_obj.path, new_server_obj.path)
+ # reset log path
+ if svr_obj.path in svr_obj.log_path:
+ new_server_obj.log_path = str(svr_obj.log_path).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ self.controller.servers.update_server(new_server_obj)
+
+ # preserve backup config
+ server_backups = self.controller.management.get_backups_by_server(server_id)
+ for backup in server_backups:
+ old_backup_id = server_backups[backup]["backup_id"]
+ del server_backups[backup]["backup_id"]
+ server_backups[backup]["server_id"] = new_server_id
+ if str(server_id) in (server_backups[backup]["backup_location"]):
+ server_backups[backup]["backup_location"] = str(
+ server_backups[backup]["backup_location"]
+ ).replace(str(server_id), str(new_server_id))
+ new_backup_id = self.controller.management.add_backup_config(
+ server_backups[backup]
+ )
+ os.listdir(server_backups[backup]["backup_location"])
+ FileHelpers.move_dir(
+ os.path.join(
+ server_backups[backup]["backup_location"], old_backup_id
+ ),
+ os.path.join(
+ server_backups[backup]["backup_location"], new_backup_id
+ ),
+ )
+ # remove old server's tasks
+ try:
+ self.tasks_manager.remove_all_server_tasks(server_id)
+ except JobLookupError as e:
+ logger.info("No active tasks found for server: {e}")
+ self.controller.remove_server(server_id, True)
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Restored server {server_id} backup {data['filename']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def patch(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+
+ try:
+ if auth_data[4]["superuser"]:
+ validate(data, BACKUP_PATCH_SCHEMA)
+ else:
+ validate(data, BASIC_BACKUP_PATCH_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.controller.management.update_backup_config(backup_id, data)
+ return self.finish_json(200, {"status": "ok"})
+
+
+class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler):
+ def delete(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+ try:
+ validate(data, BACKUP_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ self.helper.validate_traversal(
+ os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]),
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ ),
+ )
try:
FileHelpers.del_file(
- os.path.join(backup_conf["backup_path"], data["filename"])
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ )
)
except Exception as e:
return self.finish_json(
@@ -88,136 +442,3 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
)
return self.finish_json(200, {"status": "ok"})
-
- def post(self, server_id: str):
- auth_data = self.authenticate_user()
- if not auth_data:
- return
- mask = self.controller.server_perms.get_lowest_api_perm_mask(
- self.controller.server_perms.get_user_permissions_mask(
- auth_data[4]["user_id"], server_id
- ),
- auth_data[5],
- )
- server_permissions = self.controller.server_perms.get_permissions(mask)
- if EnumPermissionsServer.BACKUP not in server_permissions:
- # if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- try:
- data = json.loads(self.request.body)
- except json.decoder.JSONDecodeError as e:
- return self.finish_json(
- 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
- )
- try:
- validate(data, backup_schema)
- except ValidationError as e:
- return self.finish_json(
- 400,
- {
- "status": "error",
- "error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
- },
- )
-
- try:
- svr_obj = self.controller.servers.get_server_obj(server_id)
- server_data = self.controller.servers.get_server_data_by_id(server_id)
- zip_name = data["filename"]
- # import the server again based on zipfile
- backup_path = svr_obj.backup_path
- if Helpers.validate_traversal(backup_path, zip_name):
- temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
- if server_data["type"] == "minecraft-java":
- new_server = self.controller.restore_java_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- "1",
- "2",
- server_data["server_port"],
- server_data["created_by"],
- )
- elif server_data["type"] == "minecraft-bedrock":
- new_server = self.controller.restore_bedrock_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- server_data["server_port"],
- server_data["created_by"],
- )
- new_server_id = new_server
- new_server = self.controller.servers.get_server_data(new_server)
- self.controller.rename_backup_dir(
- server_id, new_server_id, new_server["server_id"]
- )
- # preserve current schedules
- for schedule in self.controller.management.get_schedules_by_server(
- server_id
- ):
- job_data = self.controller.management.get_scheduled_task(
- schedule.schedule_id
- )
- job_data["server_id"] = new_server_id
- del job_data["schedule_id"]
- self.tasks_manager.update_job(schedule.schedule_id, job_data)
- # preserve execution command
- new_server_obj = self.controller.servers.get_server_obj(new_server_id)
- new_server_obj.execution_command = server_data["execution_command"]
- # reset executable path
- if svr_obj.path in svr_obj.executable:
- new_server_obj.executable = str(svr_obj.executable).replace(
- svr_obj.path, new_server_obj.path
- )
- # reset run command path
- if svr_obj.path in svr_obj.execution_command:
- new_server_obj.execution_command = str(
- svr_obj.execution_command
- ).replace(svr_obj.path, new_server_obj.path)
- # reset log path
- if svr_obj.path in svr_obj.log_path:
- new_server_obj.log_path = str(svr_obj.log_path).replace(
- svr_obj.path, new_server_obj.path
- )
- self.controller.servers.update_server(new_server_obj)
-
- # preserve backup config
- backup_config = self.controller.management.get_backup_config(server_id)
- excluded_dirs = []
- server_obj = self.controller.servers.get_server_obj(server_id)
- loop_backup_path = self.helper.wtol_path(server_obj.path)
- for item in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- item_path = self.helper.wtol_path(item)
- bu_path = os.path.relpath(item_path, loop_backup_path)
- bu_path = os.path.join(new_server_obj.path, bu_path)
- excluded_dirs.append(bu_path)
- self.controller.management.set_backup_config(
- new_server_id,
- new_server_obj.backup_path,
- backup_config["max_backups"],
- excluded_dirs,
- backup_config["compress"],
- backup_config["shutdown"],
- )
- # remove old server's tasks
- try:
- self.tasks_manager.remove_all_server_tasks(server_id)
- except JobLookupError as e:
- logger.info("No active tasks found for server: {e}")
- self.controller.remove_server(server_id, True)
- except (FileNotFoundError, NotADirectoryError) as e:
- return self.finish_json(
- 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
- )
- self.controller.management.add_to_audit_log(
- auth_data[4]["user_id"],
- f"Restored server {server_id} backup {data['filename']}",
- server_id,
- self.get_remote_ip(),
- )
-
- return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/backups/index.py b/app/classes/web/routes/api/servers/server/backups/index.py
index 865fe25a..a155f943 100644
--- a/app/classes/web/routes/api/servers/server/backups/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/index.py
@@ -1,3 +1,4 @@
+import os
import logging
import json
from jsonschema import validate
@@ -10,13 +11,14 @@ logger = logging.getLogger(__name__)
backup_patch_schema = {
"type": "object",
"properties": {
- "backup_path": {"type": "string", "minLength": 1},
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -25,12 +27,13 @@ backup_patch_schema = {
basic_backup_patch_schema = {
"type": "object",
"properties": {
+ "backup_name": {"type": "string", "minLength": 3},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -52,9 +55,11 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ self.finish_json(
+ 200, self.controller.management.get_backups_by_server(server_id)
+ )
- def patch(self, server_id: str):
+ def post(self, server_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -80,7 +85,6 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -94,33 +98,12 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- self.controller.management.set_backup_config(
- server_id,
- data.get(
- "backup_path",
- self.controller.management.get_backup_config(server_id)["backup_path"],
- ),
- data.get(
- "max_backups",
- self.controller.management.get_backup_config(server_id)["max_backups"],
- ),
- data.get("exclusions"),
- data.get(
- "compress",
- self.controller.management.get_backup_config(server_id)["compress"],
- ),
- data.get(
- "shutdown",
- self.controller.management.get_backup_config(server_id)["shutdown"],
- ),
- data.get(
- "backup_before",
- self.controller.management.get_backup_config(server_id)["before"],
- ),
- data.get(
- "backup_after",
- self.controller.management.get_backup_config(server_id)["after"],
- ),
- )
+ # Set the backup location automatically for non-super users. We should probably
+ # make the default location configurable for SU eventually
+ if not auth_data[4]["superuser"]:
+ data["backup_location"] = os.path.join(self.helper.backup_path, server_id)
+ data["server_id"] = server_id
+ if not data.get("excluded_dirs", None):
+ data["excluded_dirs"] = []
+ self.controller.management.add_backup_config(data)
return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/files.py b/app/classes/web/routes/api/servers/server/files.py
index 2951ff25..2699ae0c 100644
--- a/app/classes/web/routes/api/servers/server/files.py
+++ b/app/classes/web/routes/api/servers/server/files.py
@@ -72,7 +72,7 @@ file_delete_schema = {
class ApiServersServerFilesIndexHandler(BaseApiHandler):
- def post(self, server_id: str):
+ def post(self, server_id: str, backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -149,21 +149,35 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
filename = html.escape(raw_filename)
rel = os.path.join(folder, raw_filename)
dpath = os.path.join(folder, filename)
- if str(dpath) in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- if os.path.isdir(rel):
- return_json[filename] = {
- "path": dpath,
- "dir": True,
- "excluded": True,
- }
+ if backup_id:
+ if str(
+ dpath
+ ) in self.controller.management.get_excluded_backup_dirs(backup_id):
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": True,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": True,
+ }
else:
- return_json[filename] = {
- "path": dpath,
- "dir": False,
- "excluded": True,
- }
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": False,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": False,
+ }
else:
if os.path.isdir(rel):
return_json[filename] = {
@@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
)
self.finish_json(200, {"status": "ok", "data": file_contents})
- def delete(self, server_id: str):
+ def delete(self, server_id: str, _backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -247,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
return self.finish_json(200, {"status": "ok"})
return self.finish_json(500, {"status": "error", "error": str(proc)})
- def patch(self, server_id: str):
+ def patch(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -301,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
file_object.write(file_contents)
return self.finish_json(200, {"status": "ok"})
- def put(self, server_id: str):
+ def put(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
diff --git a/app/classes/web/routes/api/servers/server/index.py b/app/classes/web/routes/api/servers/server/index.py
index 9bfc3a9a..3562334c 100644
--- a/app/classes/web/routes/api/servers/server/index.py
+++ b/app/classes/web/routes/api/servers/server/index.py
@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
server_patch_schema = {
"type": "object",
"properties": {
- "server_name": {"type": "string", "minLength": 1},
+ "server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"},
"backup_path": {"type": "string"},
"executable": {"type": "string"},
"log_path": {"type": "string", "minLength": 1},
diff --git a/app/classes/web/routes/api/servers/server/tasks/index.py b/app/classes/web/routes/api/servers/server/tasks/index.py
index 0c03319c..ed8b9df9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/index.py
@@ -21,6 +21,9 @@ new_task_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@@ -110,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
)
if "parent" not in data:
data["parent"] = None
+ if data.get("action_id"):
+ backup_config = self.controller.management.get_backup_config(
+ data["action_id"]
+ )
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "Server ID Mismatch",
+ },
+ )
task_id = self.tasks_manager.schedule_job(data)
self.controller.management.add_to_audit_log(
diff --git a/app/classes/web/routes/api/servers/server/tasks/task/index.py b/app/classes/web/routes/api/servers/server/tasks/task/index.py
index dac60762..05c8cee9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/task/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/task/index.py
@@ -22,6 +22,9 @@ task_patch_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
diff --git a/app/classes/web/routes/api/users/index.py b/app/classes/web/routes/api/users/index.py
index dbdb1ac0..32ebd283 100644
--- a/app/classes/web/routes/api/users/index.py
+++ b/app/classes/web/routes/api/users/index.py
@@ -2,6 +2,7 @@ import logging
import json
from jsonschema import validate
from jsonschema.exceptions import ValidationError
+from app.classes.shared.translation import Translation
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.roles import Roles, HelperRoles
from app.classes.models.users import PUBLIC_USER_ATTRS
@@ -54,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
)
def post(self):
+ self.translator = Translation(self.helper)
new_user_schema = {
"type": "object",
"properties": {
@@ -87,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler):
try:
validate(data, new_user_schema)
except ValidationError as e:
+ err = self.translator.translate(
+ "validators",
+ e.schema["error"],
+ self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]),
+ )
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
+ "error_data": f"{str(err)}",
},
)
username = data["username"]
@@ -153,7 +160,11 @@ class ApiUsersIndexHandler(BaseApiHandler):
for role in roles:
role = self.controller.roles.get_role(role)
- if int(role["manager"]) != int(auth_data[4]["user_id"]) and not superuser:
+ if (
+ str(role.get("manager", "no manager found"))
+ != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
)
diff --git a/app/classes/web/routes/api/users/user/api.py b/app/classes/web/routes/api/users/user/api.py
index 3891ef83..4baac898 100644
--- a/app/classes/web/routes/api/users/user/api.py
+++ b/app/classes/web/routes/api/users/user/api.py
@@ -217,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
)
if (
- target_key.user_id != auth_data[4]["user_id"]
+ str(target_key.user_id) != str(auth_data[4]["user_id"])
and not auth_data[4]["superuser"]
):
return self.finish_json(
diff --git a/app/classes/web/routes/api/users/user/index.py b/app/classes/web/routes/api/users/user/index.py
index 9fa46200..b05e4ac3 100644
--- a/app/classes/web/routes/api/users/user/index.py
+++ b/app/classes/web/routes/api/users/user/index.py
@@ -132,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
-
try:
validate(data, user_patch_schema)
except ValidationError as e:
@@ -144,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if user_id == "@me":
user_id = user["user_id"]
-
if (
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
and str(user["user_id"]) != str(user_id)
@@ -215,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
)
+ user_modify = self.controller.users.get_user_roles_id(user_id)
+
+ for role in data["roles"]:
+ # Check if user is not a super user and that the exec user is the role
+ # manager or that the role already exists in the user's list
+ if not superuser and (
+ str(
+ self.controller.roles.get_role(role).get(
+ "manager", "no manager found"
+ )
+ )
+ != str(auth_data[4]["user_id"])
+ and role not in user_modify
+ ):
+ for item in user_modify:
+ print(type(role), type(item))
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
+ )
user_obj = HelperUsers.get_user_model(user_id)
if "password" in data and str(user["user_id"]) != str(user_id):
diff --git a/app/classes/web/tornado_handler.py b/app/classes/web/tornado_handler.py
index 6285edfc..0b8140e3 100644
--- a/app/classes/web/tornado_handler.py
+++ b/app/classes/web/tornado_handler.py
@@ -24,7 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers
from app.classes.web.server_handler import ServerHandler
from app.classes.web.websocket_handler import WebSocketHandler
from app.classes.web.static_handler import CustomStaticHandler
-from app.classes.web.upload_handler import UploadHandler
from app.classes.web.status_handler import StatusHandler
@@ -142,7 +141,6 @@ class Webserver:
(r"/panel/(.*)", PanelHandler, handler_args),
(r"/server/(.*)", ServerHandler, handler_args),
(r"/ws", WebSocketHandler, handler_args),
- (r"/upload", UploadHandler, handler_args),
(r"/status", StatusHandler, handler_args),
# API Routes V2
*api_handlers(handler_args),
diff --git a/app/classes/web/upload_handler.py b/app/classes/web/upload_handler.py
deleted file mode 100644
index 747fa63b..00000000
--- a/app/classes/web/upload_handler.py
+++ /dev/null
@@ -1,331 +0,0 @@
-import logging
-import os
-import time
-import urllib.parse
-import tornado.web
-import tornado.options
-import tornado.httpserver
-from app.classes.models.crafty_permissions import EnumPermissionsCrafty
-
-from app.classes.models.server_permissions import EnumPermissionsServer
-from app.classes.shared.console import Console
-from app.classes.shared.helpers import Helpers
-from app.classes.shared.main_controller import Controller
-from app.classes.web.base_handler import BaseHandler
-from app.classes.shared.websocket_manager import WebSocketManager
-
-logger = logging.getLogger(__name__)
-
-
-@tornado.web.stream_request_body
-class UploadHandler(BaseHandler):
- # noinspection PyAttributeOutsideInit
- def initialize(
- self,
- helper: Helpers = None,
- controller: Controller = None,
- tasks_manager=None,
- translator=None,
- file_helper=None,
- ):
- self.helper = helper
- self.controller = controller
- self.tasks_manager = tasks_manager
- self.translator = translator
- self.file_helper = file_helper
-
- def prepare(self):
- # Class & Function Defination
- api_key, _token_data, exec_user = self.current_user
- self.upload_type = str(self.request.headers.get("X-Content-Upload-Type"))
-
- if self.upload_type == "server_import":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_api_key_permissions_list(api_key)
- )
- else:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_crafty_permissions_list(
- exec_user["user_id"]
- )
- )
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if (
- EnumPermissionsCrafty.SERVER_CREATION
- not in exec_user_server_permissions
- and not exec_user["superuser"]
- ):
- logger.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- self.do_upload = False
-
- path = os.path.join(self.controller.project_root, "import", "upload")
- self.helper.ensure_dir_exists(path)
- # Delete existing files
- if len(os.listdir(path)) > 0:
- for item in os.listdir():
- try:
- os.remove(os.path.join(path, item))
- except:
- logger.debug("Could not delete file on user server upload")
-
- self.helper.ensure_dir_exists(path)
- filename = urllib.parse.unquote(
- self.request.headers.get("X-FileName", None)
- )
- if not str(filename).endswith(".zip"):
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- elif self.upload_type == "background":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if not superuser:
- return self.finish_json(
- 401,
- {
- "status": "error",
- "error": "UNAUTHORIZED ACCESS",
- "info": self.helper.translation.translate(
- "error",
- "superError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if not self.request.headers.get("X-Content-Type", None).startswith(
- "image/"
- ):
- return self.finish_json(
- 415,
- {
- "status": "error",
- "error": "TYPE ERROR",
- "info": self.helper.translation.translate(
- "error",
- "fileError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- path = os.path.join(
- self.controller.project_root,
- "app/frontend/static/assets/images/auth/custom",
- )
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
- else:
- server_id = self.get_argument("server_id", None)
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.full_access
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.server_perms.get_api_key_permissions_list(
- api_key, server_id
- )
- )
- else:
- exec_user_server_permissions = (
- self.controller.server_perms.get_user_id_permissions_list(
- exec_user["user_id"], server_id
- )
- )
-
- server_id = self.request.headers.get("X-ServerId", None)
- if server_id is None:
- logger.warning("Server ID not found in upload handler call")
- Console.warning("Server ID not found in upload handler call")
- self.do_upload = False
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if EnumPermissionsServer.FILES not in exec_user_server_permissions:
- logger.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- self.do_upload = False
-
- path = self.request.headers.get("X-Path", None)
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if not self.helper.is_subdir(
- full_path,
- Helpers.get_os_understandable_path(
- self.controller.servers.get_server_data_by_id(server_id)["path"]
- ),
- ):
- logger.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- self.do_upload = False
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- def post(self):
- logger.info("Upload completed")
- if self.upload_type == "server_files":
- files_left = int(self.request.headers.get("X-Files-Left", None))
- else:
- files_left = 0
-
- if self.do_upload:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "success")
- self.finish("success") # Nope, I'm sending "success"
- self.f.close()
- else:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
-
- def data_received(self, chunk):
- if self.do_upload:
- self.f.write(chunk)
diff --git a/app/config/version.json b/app/config/version.json
index 53c90a03..c8db4444 100644
--- a/app/config/version.json
+++ b/app/config/version.json
@@ -1,5 +1,5 @@
{
"major": 4,
"minor": 4,
- "sub": 0
+ "sub": 1
}
diff --git a/app/frontend/static/assets/css/crafty.css b/app/frontend/static/assets/css/crafty.css
index 43dd2e6a..b765bca6 100644
--- a/app/frontend/static/assets/css/crafty.css
+++ b/app/frontend/static/assets/css/crafty.css
@@ -12,6 +12,16 @@ nav.sidebar {
position: fixed;
}
+td {
+ -ms-overflow-style: none;
+ /* IE and Edge */
+ scrollbar-width: none;
+ /* Firefox */
+}
+
+td::-webkit-scrollbar {
+ display: none;
+}
@media (min-width: 992px) {
nav.sidebar {
@@ -267,4 +277,7 @@ div.warnings div.wssError a:hover {
font-family: 'Sarabun', 'roboto', sans-serif;
}
-/**************************************************************/
\ No newline at end of file
+/**************************************************************/
+.hidden-input {
+ margin-left: -40px;
+}
\ No newline at end of file
diff --git a/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css b/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
new file mode 100644
index 00000000..079eeb1f
--- /dev/null
+++ b/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
@@ -0,0 +1,537 @@
+/*!
+ * Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
+ *
+ * Copyright 2012-2020 SnapAppointments, LLC
+ * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
+ */
+@-webkit-keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+@-o-keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+@keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+.bootstrap-select>select.bs-select-hidden,
+select.bs-select-hidden,
+select.selectpicker {
+ display: none !important
+}
+
+.bootstrap-select {
+ width: 220px;
+ vertical-align: middle
+}
+
+.bootstrap-select>.dropdown-toggle {
+ position: relative;
+ width: 100%;
+ text-align: right;
+ white-space: nowrap;
+ display: -webkit-inline-box;
+ display: -webkit-inline-flex;
+ display: -ms-inline-flexbox;
+ display: inline-flex;
+ -webkit-box-align: center;
+ -webkit-align-items: center;
+ -ms-flex-align: center;
+ align-items: center;
+ -webkit-box-pack: justify;
+ -webkit-justify-content: space-between;
+ -ms-flex-pack: justify;
+ justify-content: space-between
+}
+
+.bootstrap-select>.dropdown-toggle:after {
+ margin-top: -1px
+}
+
+.bootstrap-select>.dropdown-toggle.bs-placeholder,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:hover {
+ color: #999
+}
+
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:hover {
+ color: rgba(255, 255, 255, .5)
+}
+
+.bootstrap-select>select {
+ position: absolute !important;
+ bottom: 0;
+ left: 50%;
+ display: block !important;
+ width: .5px !important;
+ height: 100% !important;
+ padding: 0 !important;
+ opacity: 0 !important;
+ border: none;
+ z-index: 0 !important
+}
+
+.bootstrap-select>select.mobile-device {
+ top: 0;
+ left: 0;
+ display: block !important;
+ width: 100% !important;
+ z-index: 2 !important
+}
+
+.bootstrap-select.is-invalid .dropdown-toggle,
+.error .bootstrap-select .dropdown-toggle,
+.has-error .bootstrap-select .dropdown-toggle,
+.was-validated .bootstrap-select select:invalid+.dropdown-toggle {
+ border-color: #b94a48
+}
+
+.bootstrap-select.is-valid .dropdown-toggle,
+.was-validated .bootstrap-select select:valid+.dropdown-toggle {
+ border-color: #28a745
+}
+
+.bootstrap-select.fit-width {
+ width: auto !important
+}
+
+.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn) {
+ width: 220px
+}
+
+.bootstrap-select .dropdown-toggle:focus,
+.bootstrap-select>select.mobile-device:focus+.dropdown-toggle {
+ outline: thin dotted #333 !important;
+ outline: 5px auto -webkit-focus-ring-color !important;
+ outline-offset: -2px
+}
+
+.bootstrap-select.form-control {
+ margin-bottom: 0;
+ padding: 0;
+ border: none;
+ height: auto
+}
+
+:not(.input-group)>.bootstrap-select.form-control:not([class*=col-]) {
+ width: 100%
+}
+
+.bootstrap-select.form-control.input-group-btn {
+ float: none;
+ z-index: auto
+}
+
+.form-inline .bootstrap-select,
+.form-inline .bootstrap-select.form-control:not([class*=col-]) {
+ width: auto
+}
+
+.bootstrap-select:not(.input-group-btn),
+.bootstrap-select[class*=col-] {
+ float: none;
+ display: inline-block;
+ margin-left: 0
+}
+
+.bootstrap-select.dropdown-menu-right,
+.bootstrap-select[class*=col-].dropdown-menu-right,
+.row .bootstrap-select[class*=col-].dropdown-menu-right {
+ float: right
+}
+
+.form-group .bootstrap-select,
+.form-horizontal .bootstrap-select,
+.form-inline .bootstrap-select {
+ margin-bottom: 0
+}
+
+.form-group-lg .bootstrap-select.form-control,
+.form-group-sm .bootstrap-select.form-control {
+ padding: 0
+}
+
+.form-group-lg .bootstrap-select.form-control .dropdown-toggle,
+.form-group-sm .bootstrap-select.form-control .dropdown-toggle {
+ height: 100%;
+ font-size: inherit;
+ line-height: inherit;
+ border-radius: inherit
+}
+
+.bootstrap-select.form-control-lg .dropdown-toggle,
+.bootstrap-select.form-control-sm .dropdown-toggle {
+ font-size: inherit;
+ line-height: inherit;
+ border-radius: inherit
+}
+
+.bootstrap-select.form-control-sm .dropdown-toggle {
+ padding: .25rem .5rem
+}
+
+.bootstrap-select.form-control-lg .dropdown-toggle {
+ padding: .5rem 1rem
+}
+
+.form-inline .bootstrap-select .form-control {
+ width: 100%
+}
+
+.bootstrap-select.disabled,
+.bootstrap-select>.disabled {
+ cursor: not-allowed
+}
+
+.bootstrap-select.disabled:focus,
+.bootstrap-select>.disabled:focus {
+ outline: 0 !important
+}
+
+.bootstrap-select.bs-container {
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 0 !important;
+ padding: 0 !important
+}
+
+.bootstrap-select.bs-container .dropdown-menu {
+ z-index: 1060
+}
+
+.bootstrap-select .dropdown-toggle .filter-option {
+ position: static;
+ top: 0;
+ left: 0;
+ float: left;
+ height: 100%;
+ width: 100%;
+ text-align: left;
+ overflow: hidden;
+ -webkit-box-flex: 0;
+ -webkit-flex: 0 1 auto;
+ -ms-flex: 0 1 auto;
+ flex: 0 1 auto
+}
+
+.bs3.bootstrap-select .dropdown-toggle .filter-option {
+ padding-right: inherit
+}
+
+.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option {
+ position: absolute;
+ padding-top: inherit;
+ padding-bottom: inherit;
+ padding-left: inherit;
+ float: none
+}
+
+.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option .filter-option-inner {
+ padding-right: inherit
+}
+
+.bootstrap-select .dropdown-toggle .filter-option-inner-inner {
+ overflow: hidden
+}
+
+.bootstrap-select .dropdown-toggle .filter-expand {
+ width: 0 !important;
+ float: left;
+ opacity: 0 !important;
+ overflow: hidden
+}
+
+.bootstrap-select .dropdown-toggle .caret {
+ position: absolute;
+ top: 50%;
+ right: 12px;
+ margin-top: -2px;
+ vertical-align: middle
+}
+
+.input-group .bootstrap-select.form-control .dropdown-toggle {
+ border-radius: inherit
+}
+
+.bootstrap-select[class*=col-] .dropdown-toggle {
+ width: 100%
+}
+
+.bootstrap-select .dropdown-menu {
+ min-width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bootstrap-select .dropdown-menu>.inner:focus {
+ outline: 0 !important
+}
+
+.bootstrap-select .dropdown-menu.inner {
+ position: static;
+ float: none;
+ border: 0;
+ padding: 0;
+ margin: 0;
+ border-radius: 0;
+ -webkit-box-shadow: none;
+ box-shadow: none
+}
+
+.bootstrap-select .dropdown-menu li {
+ position: relative
+}
+
+.bootstrap-select .dropdown-menu li.active small {
+ color: rgba(255, 255, 255, .5) !important
+}
+
+.bootstrap-select .dropdown-menu li.disabled a {
+ cursor: not-allowed
+}
+
+.bootstrap-select .dropdown-menu li a {
+ cursor: pointer;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none
+}
+
+.bootstrap-select .dropdown-menu li a.opt {
+ position: relative;
+ padding-left: 2.25em
+}
+
+.bootstrap-select .dropdown-menu li a span.check-mark {
+ display: none
+}
+
+.bootstrap-select .dropdown-menu li a span.text {
+ display: inline-block
+}
+
+.bootstrap-select .dropdown-menu li small {
+ padding-left: .5em
+}
+
+.bootstrap-select .dropdown-menu .notify {
+ position: absolute;
+ bottom: 5px;
+ width: 96%;
+ margin: 0 2%;
+ min-height: 26px;
+ padding: 3px 5px;
+ background: #f5f5f5;
+ border: 1px solid #e3e3e3;
+ -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
+ box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
+ pointer-events: none;
+ opacity: .9;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bootstrap-select .dropdown-menu .notify.fadeOut {
+ -webkit-animation: .3s linear 750ms forwards bs-notify-fadeOut;
+ -o-animation: .3s linear 750ms forwards bs-notify-fadeOut;
+ animation: .3s linear 750ms forwards bs-notify-fadeOut
+}
+
+.bootstrap-select .no-results {
+ padding: 3px;
+ background: #f5f5f5;
+ margin: 0 5px;
+ white-space: nowrap
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .filter-option {
+ position: static;
+ display: inline;
+ padding: 0
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner,
+.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner-inner {
+ display: inline
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .bs-caret:before {
+ content: '\00a0'
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .caret {
+ position: static;
+ top: auto;
+ margin-top: -1px
+}
+
+.bootstrap-select.show-tick .dropdown-menu .selected span.check-mark {
+ position: absolute;
+ display: inline-block;
+ right: 15px;
+ top: 5px
+}
+
+.bootstrap-select.show-tick .dropdown-menu li a span.text {
+ margin-right: 34px
+}
+
+.bootstrap-select .bs-ok-default:after {
+ content: '';
+ display: block;
+ width: .5em;
+ height: 1em;
+ border-style: solid;
+ border-width: 0 .26em .26em 0;
+ -webkit-transform-style: preserve-3d;
+ transform-style: preserve-3d;
+ -webkit-transform: rotate(45deg);
+ -ms-transform: rotate(45deg);
+ -o-transform: rotate(45deg);
+ transform: rotate(45deg)
+}
+
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle {
+ z-index: 1061
+}
+
+.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:before {
+ content: '';
+ border-left: 7px solid transparent;
+ border-right: 7px solid transparent;
+ border-bottom: 7px solid rgba(204, 204, 204, .2);
+ position: absolute;
+ bottom: -4px;
+ left: 9px;
+ display: none
+}
+
+.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:after {
+ content: '';
+ border-left: 6px solid transparent;
+ border-right: 6px solid transparent;
+ border-bottom: 6px solid #fff;
+ position: absolute;
+ bottom: -4px;
+ left: 10px;
+ display: none
+}
+
+.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:before {
+ bottom: auto;
+ top: -4px;
+ border-top: 7px solid rgba(204, 204, 204, .2);
+ border-bottom: 0
+}
+
+.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:after {
+ bottom: auto;
+ top: -4px;
+ border-top: 6px solid #fff;
+ border-bottom: 0
+}
+
+.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:before {
+ right: 12px;
+ left: auto
+}
+
+.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:after {
+ right: 13px;
+ left: auto
+}
+
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:after,
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:before,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:after,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:before {
+ display: block
+}
+
+.bs-actionsbox,
+.bs-donebutton,
+.bs-searchbox {
+ padding: 4px 8px
+}
+
+.bs-actionsbox {
+ width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bs-actionsbox .btn-group button {
+ width: 50%
+}
+
+.bs-donebutton {
+ float: left;
+ width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bs-donebutton .btn-group button {
+ width: 100%
+}
+
+.bs-searchbox+.bs-actionsbox {
+ padding: 0 8px 4px
+}
+
+.bs-searchbox .form-control {
+ margin-bottom: 0;
+ width: 100%;
+ float: none
+}
\ No newline at end of file
diff --git a/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js b/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
new file mode 100644
index 00000000..85e9683d
--- /dev/null
+++ b/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
@@ -0,0 +1,9 @@
+/*!
+ * Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
+ *
+ * Copyright 2012-2020 SnapAppointments, LLC
+ * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
+ */
+
+!function (e, t) { void 0 === e && void 0 !== window && (e = window), "function" == typeof define && define.amd ? define(["jquery"], function (e) { return t(e) }) : "object" == typeof module && module.exports ? module.exports = t(require("jquery")) : t(e.jQuery) }(this, function (e) { !function (P) { "use strict"; var d = ["sanitize", "whiteList", "sanitizeFn"], r = ["background", "cite", "href", "itemtype", "longdesc", "poster", "src", "xlink:href"], e = { "*": ["class", "dir", "id", "lang", "role", "tabindex", "style", /^aria-[\w-]*$/i], a: ["target", "href", "title", "rel"], area: [], b: [], br: [], col: [], code: [], div: [], em: [], hr: [], h1: [], h2: [], h3: [], h4: [], h5: [], h6: [], i: [], img: ["src", "alt", "title", "width", "height"], li: [], ol: [], p: [], pre: [], s: [], small: [], span: [], sub: [], sup: [], strong: [], u: [], ul: [] }, l = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi, a = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i; function v(e, t) { var i = e.nodeName.toLowerCase(); if (-1 !== P.inArray(i, t)) return -1 === P.inArray(i, r) || Boolean(e.nodeValue.match(l) || e.nodeValue.match(a)); for (var s = P(t).filter(function (e, t) { return t instanceof RegExp }), n = 0, o = s.length; n < o; n++)if (i.match(s[n])) return !0; return !1 } function W(e, t, i) { if (i && "function" == typeof i) return i(e); for (var s = Object.keys(t), n = 0, o = e.length; n < o; n++)for (var r = e[n].querySelectorAll("*"), l = 0, a = r.length; l < a; l++) { var c = r[l], d = c.nodeName.toLowerCase(); if (-1 !== s.indexOf(d)) for (var h = [].slice.call(c.attributes), p = [].concat(t["*"] || [], t[d] || []), u = 0, f = h.length; u < f; u++) { var m = h[u]; v(m, p) || c.removeAttribute(m.nodeName) } else c.parentNode.removeChild(c) } } "classList" in document.createElement("_") || function (e) { if ("Element" in e) { var t = "classList", i = "prototype", s = e.Element[i], n = Object, o = function () { var i = P(this); return { add: function (e) { return e = Array.prototype.slice.call(arguments).join(" "), i.addClass(e) }, remove: function (e) { return e = Array.prototype.slice.call(arguments).join(" "), i.removeClass(e) }, toggle: function (e, t) { return i.toggleClass(e, t) }, contains: function (e) { return i.hasClass(e) } } }; if (n.defineProperty) { var r = { get: o, enumerable: !0, configurable: !0 }; try { n.defineProperty(s, t, r) } catch (e) { void 0 !== e.number && -2146823252 !== e.number || (r.enumerable = !1, n.defineProperty(s, t, r)) } } else n[i].__defineGetter__ && s.__defineGetter__(t, o) } }(window); var t, c, i = document.createElement("_"); if (i.classList.add("c1", "c2"), !i.classList.contains("c2")) { var s = DOMTokenList.prototype.add, n = DOMTokenList.prototype.remove; DOMTokenList.prototype.add = function () { Array.prototype.forEach.call(arguments, s.bind(this)) }, DOMTokenList.prototype.remove = function () { Array.prototype.forEach.call(arguments, n.bind(this)) } } if (i.classList.toggle("c3", !1), i.classList.contains("c3")) { var o = DOMTokenList.prototype.toggle; DOMTokenList.prototype.toggle = function (e, t) { return 1 in arguments && !this.contains(e) == !t ? t : o.call(this, e) } } function h(e) { if (null == this) throw new TypeError; var t = String(this); if (e && "[object RegExp]" == c.call(e)) throw new TypeError; var i = t.length, s = String(e), n = s.length, o = 1 < arguments.length ? arguments[1] : void 0, r = o ? Number(o) : 0; r != r && (r = 0); var l = Math.min(Math.max(r, 0), i); if (i < n + l) return !1; for (var a = -1; ++a < n;)if (t.charCodeAt(l + a) != s.charCodeAt(a)) return !1; return !0 } function O(e, t) { var i, s = e.selectedOptions, n = []; if (t) { for (var o = 0, r = s.length; o < r; o++)(i = s[o]).disabled || "OPTGROUP" === i.parentNode.tagName && i.parentNode.disabled || n.push(i); return n } return s } function z(e, t) { for (var i, s = [], n = t || e.selectedOptions, o = 0, r = n.length; o < r; o++)(i = n[o]).disabled || "OPTGROUP" === i.parentNode.tagName && i.parentNode.disabled || s.push(i.value); return e.multiple ? s : s.length ? s[0] : null } i = null, String.prototype.startsWith || (t = function () { try { var e = {}, t = Object.defineProperty, i = t(e, e, e) && t } catch (e) { } return i }(), c = {}.toString, t ? t(String.prototype, "startsWith", { value: h, configurable: !0, writable: !0 }) : String.prototype.startsWith = h), Object.keys || (Object.keys = function (e, t, i) { for (t in i = [], e) i.hasOwnProperty.call(e, t) && i.push(t); return i }), HTMLSelectElement && !HTMLSelectElement.prototype.hasOwnProperty("selectedOptions") && Object.defineProperty(HTMLSelectElement.prototype, "selectedOptions", { get: function () { return this.querySelectorAll(":checked") } }); var p = { useDefault: !1, _set: P.valHooks.select.set }; P.valHooks.select.set = function (e, t) { return t && !p.useDefault && P(e).data("selected", !0), p._set.apply(this, arguments) }; var T = null, u = function () { try { return new Event("change"), !0 } catch (e) { return !1 } }(); function k(e, t, i, s) { for (var n = ["display", "subtext", "tokens"], o = !1, r = 0; r < n.length; r++) { var l = n[r], a = e[l]; if (a && (a = a.toString(), "display" === l && (a = a.replace(/<[^>]+>/g, "")), s && (a = w(a)), a = a.toUpperCase(), o = "contains" === i ? 0 <= a.indexOf(t) : a.startsWith(t))) break } return o } function N(e) { return parseInt(e, 10) || 0 } P.fn.triggerNative = function (e) { var t, i = this[0]; i.dispatchEvent ? (u ? t = new Event(e, { bubbles: !0 }) : (t = document.createEvent("Event")).initEvent(e, !0, !1), i.dispatchEvent(t)) : i.fireEvent ? ((t = document.createEventObject()).eventType = e, i.fireEvent("on" + e, t)) : this.trigger(e) }; var f = { "\xc0": "A", "\xc1": "A", "\xc2": "A", "\xc3": "A", "\xc4": "A", "\xc5": "A", "\xe0": "a", "\xe1": "a", "\xe2": "a", "\xe3": "a", "\xe4": "a", "\xe5": "a", "\xc7": "C", "\xe7": "c", "\xd0": "D", "\xf0": "d", "\xc8": "E", "\xc9": "E", "\xca": "E", "\xcb": "E", "\xe8": "e", "\xe9": "e", "\xea": "e", "\xeb": "e", "\xcc": "I", "\xcd": "I", "\xce": "I", "\xcf": "I", "\xec": "i", "\xed": "i", "\xee": "i", "\xef": "i", "\xd1": "N", "\xf1": "n", "\xd2": "O", "\xd3": "O", "\xd4": "O", "\xd5": "O", "\xd6": "O", "\xd8": "O", "\xf2": "o", "\xf3": "o", "\xf4": "o", "\xf5": "o", "\xf6": "o", "\xf8": "o", "\xd9": "U", "\xda": "U", "\xdb": "U", "\xdc": "U", "\xf9": "u", "\xfa": "u", "\xfb": "u", "\xfc": "u", "\xdd": "Y", "\xfd": "y", "\xff": "y", "\xc6": "Ae", "\xe6": "ae", "\xde": "Th", "\xfe": "th", "\xdf": "ss", "\u0100": "A", "\u0102": "A", "\u0104": "A", "\u0101": "a", "\u0103": "a", "\u0105": "a", "\u0106": "C", "\u0108": "C", "\u010a": "C", "\u010c": "C", "\u0107": "c", "\u0109": "c", "\u010b": "c", "\u010d": "c", "\u010e": "D", "\u0110": "D", "\u010f": "d", "\u0111": "d", "\u0112": "E", "\u0114": "E", "\u0116": "E", "\u0118": "E", "\u011a": "E", "\u0113": "e", "\u0115": "e", "\u0117": "e", "\u0119": "e", "\u011b": "e", "\u011c": "G", "\u011e": "G", "\u0120": "G", "\u0122": "G", "\u011d": "g", "\u011f": "g", "\u0121": "g", "\u0123": "g", "\u0124": "H", "\u0126": "H", "\u0125": "h", "\u0127": "h", "\u0128": "I", "\u012a": "I", "\u012c": "I", "\u012e": "I", "\u0130": "I", "\u0129": "i", "\u012b": "i", "\u012d": "i", "\u012f": "i", "\u0131": "i", "\u0134": "J", "\u0135": "j", "\u0136": "K", "\u0137": "k", "\u0138": "k", "\u0139": "L", "\u013b": "L", "\u013d": "L", "\u013f": "L", "\u0141": "L", "\u013a": "l", "\u013c": "l", "\u013e": "l", "\u0140": "l", "\u0142": "l", "\u0143": "N", "\u0145": "N", "\u0147": "N", "\u014a": "N", "\u0144": "n", "\u0146": "n", "\u0148": "n", "\u014b": "n", "\u014c": "O", "\u014e": "O", "\u0150": "O", "\u014d": "o", "\u014f": "o", "\u0151": "o", "\u0154": "R", "\u0156": "R", "\u0158": "R", "\u0155": "r", "\u0157": "r", "\u0159": "r", "\u015a": "S", "\u015c": "S", "\u015e": "S", "\u0160": "S", "\u015b": "s", "\u015d": "s", "\u015f": "s", "\u0161": "s", "\u0162": "T", "\u0164": "T", "\u0166": "T", "\u0163": "t", "\u0165": "t", "\u0167": "t", "\u0168": "U", "\u016a": "U", "\u016c": "U", "\u016e": "U", "\u0170": "U", "\u0172": "U", "\u0169": "u", "\u016b": "u", "\u016d": "u", "\u016f": "u", "\u0171": "u", "\u0173": "u", "\u0174": "W", "\u0175": "w", "\u0176": "Y", "\u0177": "y", "\u0178": "Y", "\u0179": "Z", "\u017b": "Z", "\u017d": "Z", "\u017a": "z", "\u017c": "z", "\u017e": "z", "\u0132": "IJ", "\u0133": "ij", "\u0152": "Oe", "\u0153": "oe", "\u0149": "'n", "\u017f": "s" }, m = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g, g = RegExp("[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff\\u1ab0-\\u1aff\\u1dc0-\\u1dff]", "g"); function b(e) { return f[e] } function w(e) { return (e = e.toString()) && e.replace(m, b).replace(g, "") } var I, x, y, $, S = (I = { "&": "&", "<": "<", ">": ">", '"': """, "'": "'", "`": "`" }, x = "(?:" + Object.keys(I).join("|") + ")", y = RegExp(x), $ = RegExp(x, "g"), function (e) { return e = null == e ? "" : "" + e, y.test(e) ? e.replace($, E) : e }); function E(e) { return I[e] } var C = { 32: " ", 48: "0", 49: "1", 50: "2", 51: "3", 52: "4", 53: "5", 54: "6", 55: "7", 56: "8", 57: "9", 59: ";", 65: "A", 66: "B", 67: "C", 68: "D", 69: "E", 70: "F", 71: "G", 72: "H", 73: "I", 74: "J", 75: "K", 76: "L", 77: "M", 78: "N", 79: "O", 80: "P", 81: "Q", 82: "R", 83: "S", 84: "T", 85: "U", 86: "V", 87: "W", 88: "X", 89: "Y", 90: "Z", 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7", 104: "8", 105: "9" }, A = 27, L = 13, D = 32, H = 9, B = 38, R = 40, M = { success: !1, major: "3" }; try { M.full = (P.fn.dropdown.Constructor.VERSION || "").split(" ")[0].split("."), M.major = M.full[0], M.success = !0 } catch (e) { } var U = 0, j = ".bs.select", V = { DISABLED: "disabled", DIVIDER: "divider", SHOW: "open", DROPUP: "dropup", MENU: "dropdown-menu", MENURIGHT: "dropdown-menu-right", MENULEFT: "dropdown-menu-left", BUTTONCLASS: "btn-default", POPOVERHEADER: "popover-title", ICONBASE: "glyphicon", TICKICON: "glyphicon-ok" }, F = { MENU: "." + V.MENU }, _ = { div: document.createElement("div"), span: document.createElement("span"), i: document.createElement("i"), subtext: document.createElement("small"), a: document.createElement("a"), li: document.createElement("li"), whitespace: document.createTextNode("\xa0"), fragment: document.createDocumentFragment() }; _.noResults = _.li.cloneNode(!1), _.noResults.className = "no-results", _.a.setAttribute("role", "option"), _.a.className = "dropdown-item", _.subtext.className = "text-muted", _.text = _.span.cloneNode(!1), _.text.className = "text", _.checkMark = _.span.cloneNode(!1); var G = new RegExp(B + "|" + R), q = new RegExp("^" + H + "$|" + A), K = { li: function (e, t, i) { var s = _.li.cloneNode(!1); return e && (1 === e.nodeType || 11 === e.nodeType ? s.appendChild(e) : s.innerHTML = e), void 0 !== t && "" !== t && (s.className = t), null != i && s.classList.add("optgroup-" + i), s }, a: function (e, t, i) { var s = _.a.cloneNode(!0); return e && (11 === e.nodeType ? s.appendChild(e) : s.insertAdjacentHTML("beforeend", e)), void 0 !== t && "" !== t && s.classList.add.apply(s.classList, t.split(/\s+/)), i && s.setAttribute("style", i), s }, text: function (e, t) { var i, s, n = _.text.cloneNode(!1); if (e.content) n.innerHTML = e.content; else { if (n.textContent = e.text, e.icon) { var o = _.whitespace.cloneNode(!1); (s = (!0 === t ? _.i : _.span).cloneNode(!1)).className = this.options.iconBase + " " + e.icon, _.fragment.appendChild(s), _.fragment.appendChild(o) } e.subtext && ((i = _.subtext.cloneNode(!1)).textContent = e.subtext, n.appendChild(i)) } if (!0 === t) for (; 0 < n.childNodes.length;)_.fragment.appendChild(n.childNodes[0]); else _.fragment.appendChild(n); return _.fragment }, label: function (e) { var t, i, s = _.text.cloneNode(!1); if (s.innerHTML = e.display, e.icon) { var n = _.whitespace.cloneNode(!1); (i = _.span.cloneNode(!1)).className = this.options.iconBase + " " + e.icon, _.fragment.appendChild(i), _.fragment.appendChild(n) } return e.subtext && ((t = _.subtext.cloneNode(!1)).textContent = e.subtext, s.appendChild(t)), _.fragment.appendChild(s), _.fragment } }; var Y = function (e, t) { var i = this; p.useDefault || (P.valHooks.select.set = p._set, p.useDefault = !0), this.$element = P(e), this.$newElement = null, this.$button = null, this.$menu = null, this.options = t, this.selectpicker = { main: {}, search: {}, current: {}, view: {}, isSearching: !1, keydown: { keyHistory: "", resetKeyHistory: { start: function () { return setTimeout(function () { i.selectpicker.keydown.keyHistory = "" }, 800) } } } }, this.sizeInfo = {}, null === this.options.title && (this.options.title = this.$element.attr("title")); var s = this.options.windowPadding; "number" == typeof s && (this.options.windowPadding = [s, s, s, s]), this.val = Y.prototype.val, this.render = Y.prototype.render, this.refresh = Y.prototype.refresh, this.setStyle = Y.prototype.setStyle, this.selectAll = Y.prototype.selectAll, this.deselectAll = Y.prototype.deselectAll, this.destroy = Y.prototype.destroy, this.remove = Y.prototype.remove, this.show = Y.prototype.show, this.hide = Y.prototype.hide, this.init() }; function Z(e) { var l, a = arguments, c = e; if ([].shift.apply(a), !M.success) { try { M.full = (P.fn.dropdown.Constructor.VERSION || "").split(" ")[0].split(".") } catch (e) { Y.BootstrapVersion ? M.full = Y.BootstrapVersion.split(" ")[0].split(".") : (M.full = [M.major, "0", "0"], console.warn("There was an issue retrieving Bootstrap's version. Ensure Bootstrap is being loaded before bootstrap-select and there is no namespace collision. If loading Bootstrap asynchronously, the version may need to be manually specified via $.fn.selectpicker.Constructor.BootstrapVersion.", e)) } M.major = M.full[0], M.success = !0 } if ("4" === M.major) { var t = []; Y.DEFAULTS.style === V.BUTTONCLASS && t.push({ name: "style", className: "BUTTONCLASS" }), Y.DEFAULTS.iconBase === V.ICONBASE && t.push({ name: "iconBase", className: "ICONBASE" }), Y.DEFAULTS.tickIcon === V.TICKICON && t.push({ name: "tickIcon", className: "TICKICON" }), V.DIVIDER = "dropdown-divider", V.SHOW = "show", V.BUTTONCLASS = "btn-light", V.POPOVERHEADER = "popover-header", V.ICONBASE = "", V.TICKICON = "bs-ok-default"; for (var i = 0; i < t.length; i++) { e = t[i]; Y.DEFAULTS[e.name] = V[e.className] } } var s = this.each(function () { var e = P(this); if (e.is("select")) { var t = e.data("selectpicker"), i = "object" == typeof c && c; if (t) { if (i) for (var s in i) Object.prototype.hasOwnProperty.call(i, s) && (t.options[s] = i[s]) } else { var n = e.data(); for (var o in n) Object.prototype.hasOwnProperty.call(n, o) && -1 !== P.inArray(o, d) && delete n[o]; var r = P.extend({}, Y.DEFAULTS, P.fn.selectpicker.defaults || {}, n, i); r.template = P.extend({}, Y.DEFAULTS.template, P.fn.selectpicker.defaults ? P.fn.selectpicker.defaults.template : {}, n.template, i.template), e.data("selectpicker", t = new Y(this, r)) } "string" == typeof c && (l = t[c] instanceof Function ? t[c].apply(t, a) : t.options[c]) } }); return void 0 !== l ? l : s } Y.VERSION = "1.13.18", Y.DEFAULTS = { noneSelectedText: "Nothing selected", noneResultsText: "No results matched {0}", countSelectedText: function (e, t) { return 1 == e ? "{0} item selected" : "{0} items selected" }, maxOptionsText: function (e, t) { return [1 == e ? "Limit reached ({n} item max)" : "Limit reached ({n} items max)", 1 == t ? "Group limit reached ({n} item max)" : "Group limit reached ({n} items max)"] }, selectAllText: "Select All", deselectAllText: "Deselect All", doneButton: !1, doneButtonText: "Close", multipleSeparator: ", ", styleBase: "btn", style: V.BUTTONCLASS, size: "auto", title: null, selectedTextFormat: "values", width: !1, container: !1, hideDisabled: !1, showSubtext: !1, showIcon: !0, showContent: !0, dropupAuto: !0, header: !1, liveSearch: !1, liveSearchPlaceholder: null, liveSearchNormalize: !1, liveSearchStyle: "contains", actionsBox: !1, iconBase: V.ICONBASE, tickIcon: V.TICKICON, showTick: !1, template: { caret: '' }, maxOptions: !1, mobile: !1, selectOnTab: !1, dropdownAlignRight: !1, windowPadding: 0, virtualScroll: 600, display: !1, sanitize: !0, sanitizeFn: null, whiteList: e }, Y.prototype = { constructor: Y, init: function () { var i = this, e = this.$element.attr("id"), t = this.$element[0], s = t.form; U++, this.selectId = "bs-select-" + U, t.classList.add("bs-select-hidden"), this.multiple = this.$element.prop("multiple"), this.autofocus = this.$element.prop("autofocus"), t.classList.contains("show-tick") && (this.options.showTick = !0), this.$newElement = this.createDropdown(), this.buildData(), this.$element.after(this.$newElement).prependTo(this.$newElement), s && null === t.form && (s.id || (s.id = "form-" + this.selectId), t.setAttribute("form", s.id)), this.$button = this.$newElement.children("button"), this.$menu = this.$newElement.children(F.MENU), this.$menuInner = this.$menu.children(".inner"), this.$searchbox = this.$menu.find("input"), t.classList.remove("bs-select-hidden"), !0 === this.options.dropdownAlignRight && this.$menu[0].classList.add(V.MENURIGHT), void 0 !== e && this.$button.attr("data-id", e), this.checkDisabled(), this.clickListener(), this.options.liveSearch ? (this.liveSearchListener(), this.focusedParent = this.$searchbox[0]) : this.focusedParent = this.$menuInner[0], this.setStyle(), this.render(), this.setWidth(), this.options.container ? this.selectPosition() : this.$element.on("hide" + j, function () { if (i.isVirtual()) { var e = i.$menuInner[0], t = e.firstChild.cloneNode(!1); e.replaceChild(t, e.firstChild), e.scrollTop = 0 } }), this.$menu.data("this", this), this.$newElement.data("this", this), this.options.mobile && this.mobile(), this.$newElement.on({ "hide.bs.dropdown": function (e) { i.$element.trigger("hide" + j, e) }, "hidden.bs.dropdown": function (e) { i.$element.trigger("hidden" + j, e) }, "show.bs.dropdown": function (e) { i.$element.trigger("show" + j, e) }, "shown.bs.dropdown": function (e) { i.$element.trigger("shown" + j, e) } }), t.hasAttribute("required") && this.$element.on("invalid" + j, function () { i.$button[0].classList.add("bs-invalid"), i.$element.on("shown" + j + ".invalid", function () { i.$element.val(i.$element.val()).off("shown" + j + ".invalid") }).on("rendered" + j, function () { this.validity.valid && i.$button[0].classList.remove("bs-invalid"), i.$element.off("rendered" + j) }), i.$button.on("blur" + j, function () { i.$element.trigger("focus").trigger("blur"), i.$button.off("blur" + j) }) }), setTimeout(function () { i.buildList(), i.$element.trigger("loaded" + j) }) }, createDropdown: function () { var e = this.multiple || this.options.showTick ? " show-tick" : "", t = this.multiple ? ' aria-multiselectable="true"' : "", i = "", s = this.autofocus ? " autofocus" : ""; M.major < 4 && this.$element.parent().hasClass("input-group") && (i = " input-group-btn"); var n, o = "", r = "", l = "", a = ""; return this.options.header && (o = '
Backing up {{data['server_stats']['world_size']}}
- {% end %} - -- {{ translate('serverWizard', 'addRole', data['lang']) }} - - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-{{ translate('serverWizard', 'addRole', data['lang']) + }} + - {{ translate('serverWizard', 'autoCreate', + data['lang']) }} +
+- {{ translate('serverWizard', 'addRole', data['lang']) }} - - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-{{ translate('serverWizard', 'addRole', data['lang']) + }} + - {{ translate('serverWizard', 'autoCreate', + data['lang']) }} +
+- {{ translate('serverWizard', 'addRole', data['lang']) - }} - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-- {{ translate('serverWizard', 'addRole', - data['lang']) - }} - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-- {{ translate('serverWizard', 'addRole', data['lang']) }} - - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-{{ translate('serverWizard', 'addRole', data['lang']) + }} + - {{ translate('serverWizard', 'autoCreate', + data['lang']) }} +
+- {{ translate('serverWizard', 'addRole', data['lang']) - }} - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-- {{ translate('serverWizard', 'addRole', - data['lang']) - }} - {{ translate('serverWizard', 'autoCreate', - data['lang']) }} -
-