diff --git a/.gitlab/issue_templates/Bug.md b/.gitlab/issue_templates/Bug.md
index 88b2a3b4..77136b69 100644
--- a/.gitlab/issue_templates/Bug.md
+++ b/.gitlab/issue_templates/Bug.md
@@ -3,16 +3,16 @@
- **Install Type:** Git Cloned(Manual) / Installer / WinPackage / Docker
## What Happened?
-*A brief description of what happened when you tried to perform an action*
+
## Expected result
-*What should have happened when you performed the actions*
+
## Steps to reproduce
-*List the steps required to produce the error. These should be as few as possible*
+
## Screenshots
-Any relevant screenshots which show the issue* !-->*
+
## Priority/Severity
- [ ] High (anything that impacts the normal user flow or blocks app usage)
diff --git a/.gitlab/issue_templates/Change Request.md b/.gitlab/issue_templates/Change Request.md
index 995e7ad1..f767ee15 100644
--- a/.gitlab/issue_templates/Change Request.md
+++ b/.gitlab/issue_templates/Change Request.md
@@ -1,13 +1,14 @@
## Summary
-*Outline the issue being faced, and why this needs to change*
+
## Area of the system
-*This might only be one part, but may involve multiple sections, Login/Dashboad/Terminal/Config*
+
## How does this currently work?
+
## What is the desired way of working?
-*After the change, what should the process/operation be?*
+
## Priority/Severity
- [ ] High (This will bring a huge increase in performance/productivity/usability)
diff --git a/.gitlab/issue_templates/Feature Request.md b/.gitlab/issue_templates/Feature Request.md
index 2450aa60..b26067d6 100644
--- a/.gitlab/issue_templates/Feature Request.md
+++ b/.gitlab/issue_templates/Feature Request.md
@@ -1,8 +1,8 @@
## Problem Statement
-*What is the issue being faced and needs addressing?*
+
## Who will benefit?
-*Will this fix a problem that only one user has, or will it benefit a lot of people*
+
## Benefits and risks
What benefits does this bring?
@@ -16,10 +16,10 @@
## Proposed solution
-*How would you like to see this issue resolved?*
+
## Examples
-*Are there any examples of this which exist in other software?*
+
## Priority/Severity
- [ ] High (This will bring a huge increase in performance/productivity/usability)
diff --git a/.gitlab/lint.yml b/.gitlab/lint.yml
index bc797808..37649e1a 100644
--- a/.gitlab/lint.yml
+++ b/.gitlab/lint.yml
@@ -5,7 +5,7 @@ yamllint:
stage: lint
image: registry.gitlab.com/pipeline-components/yamllint:latest
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@@ -18,7 +18,7 @@ jsonlint:
stage: lint
image: registry.gitlab.com/pipeline-components/jsonlint:latest
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@@ -33,7 +33,7 @@ black:
stage: lint
image: registry.gitlab.com/pipeline-components/black:latest
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@@ -46,7 +46,7 @@ pylint:
stage: lint
image: registry.gitlab.com/pipeline-components/pylint:latest
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@@ -69,7 +69,7 @@ sonarcloud-check:
name: sonarsource/sonar-scanner-cli:latest
entrypoint: [""]
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$SONAR_TOKEN == null"
when: never
@@ -91,7 +91,7 @@ lang-check:
stage: lint
image: alpine:latest
tags:
- - docker
+ - saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
diff --git a/.gitlab/merge_request_templates/Default.md b/.gitlab/merge_request_templates/Default.md
index a82cb3f8..70bcd7db 100644
--- a/.gitlab/merge_request_templates/Default.md
+++ b/.gitlab/merge_request_templates/Default.md
@@ -1,22 +1,22 @@
## What does this MR do and why?
-___Describe in detail what your merge request does and why.___
-> *Please keep this description updated with any discussion that takes place so*
-*that reviewers can understand your intent. Keeping the description updated is*
-*especially important if they didn't participate in the discussion.*
+
+
+
+
## Screenshots or screen recordings
-___These are strongly recommended to assist reviewers and reduce the time to merge your change.___
-> *Please include any relevant screenshots or screen recordings that will assist*
-*reviewers and future readers. If you need help visually verifying the change,*
-*please leave a comment and ping a GitLab reviewer, maintainer, or MR coach.*
+
+
+
+
## How to set up and validate locally
-___Numbered steps to set up and validate the change are strongly suggested.___
+
## MR acceptance checklist
diff --git a/.gitlab/scripts/lang_sort.sh b/.gitlab/scripts/lang_sort.sh
index 5710ce1b..9a1e1cf0 100644
--- a/.gitlab/scripts/lang_sort.sh
+++ b/.gitlab/scripts/lang_sort.sh
@@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}"
# Iterate over each .json file in the directory
for file in "${DIR}"/*.json; do
- # Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name
- if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then
+ # Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name
+ if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then
# Get keys and subkeys from the current file
current_keys=$(mktemp)
diff --git a/.gitlab/scripts/linux_perms_fix.sh b/.gitlab/scripts/linux_perms_fix.sh
new file mode 100644
index 00000000..d727b16b
--- /dev/null
+++ b/.gitlab/scripts/linux_perms_fix.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# Prompt the user for the directory path
+read -p "Enter the directory path to set permissions (/var/opt/minecraft/crafty): " directory_path
+
+# Count the total number of directories
+total_dirs=$(find "$directory_path" -type d 2>/dev/null | wc -l)
+
+# Count the total number of files
+total_files=$(find "$directory_path" -type f 2>/dev/null | wc -l)
+
+# Initialize a counter for directories and files
+dir_count=0
+file_count=0
+
+# Function to print progress
+print_progress() {
+ echo -ne "\rDirectories: $dir_count/$total_dirs Files: $file_count/$total_files"
+}
+
+# Check if the script is running within a Docker container
+if [ -f "/.dockerenv" ]; then
+ echo "Script is running within a Docker container. Exiting with error."
+ exit 1 # Exit with an error code if running in Docker
+else
+ echo "Script is not running within a Docker container. Executing permissions changes..."
+
+ # Run the commands to set permissions for directories
+ echo "Changing permissions for directories:"
+ for dir in $(find "$directory_path" -type d 2>/dev/null); do
+ if [ -e "$dir" ]; then
+ sudo chmod 700 "$dir" && ((dir_count++))
+ fi
+ print_progress
+ done
+
+ # Run the commands to set permissions for files
+ echo -e "\nChanging permissions for files:"
+ for file in $(find "$directory_path" -type f 2>/dev/null); do
+ if [ -e "$file" ]; then
+ sudo chmod 644 "$file" && ((file_count++))
+ fi
+ print_progress
+ done
+ echo "You will now need to execute a chmod +x on all bedrock executables"
+fi
+
+echo "" # Adding a new line after the loop for better readability
\ No newline at end of file
diff --git a/.gitlab/scripts/sort.py b/.gitlab/scripts/sort.py
index c78885a6..28eaf6fa 100644
--- a/.gitlab/scripts/sort.py
+++ b/.gitlab/scripts/sort.py
@@ -44,6 +44,7 @@ def main():
if (
"_incomplete" not in file
and file != "en_EN.json"
+ and file != "humanized_index.json"
and file.endswith(".json")
):
file_path = os.path.join(root, file)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8bf1ed17..080cd6f3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,20 +1,125 @@
# Changelog
-## --- [4.2.4] - 2023/TBD
+## --- [4.4.2] - 2024/TBD
### New features
TBD
+### Bug fixes
+TBD
+### Tweaks
+TBD
+### Lang
+TBD
+
+
+## --- [4.4.1] - 2024/08/06
+### Patch Fixes
+- Migrations | Fix orphan backup configurations crashing migration operation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
+- Migrations | Fix missing default configuration if no server backup config exists during the migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
+- Migrations | Fix extended runtime on move procedure during migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
+
+**-----------------------------------------------------------------------------**
+
+**Initial release was reverted for patching (See Merge Request: [!784](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/784))** *2024/07/28*
+
+**-----------------------------------------------------------------------------**
+### Refactor
+- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711))
+- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
+### Bug fixes
+- Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764))
+- Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768))
+- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774))
+- Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771))
+- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775))
+- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765))
+- Remove unused and problematic "dropdown-menu" ident from [!722](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) CSS ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/782))
+### Tweaks
+- Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760))
+- Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766))
+- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767))
+- Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769))
+- Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770))
+- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776))
+- Change Role selector in server wizard to be a filter list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772))
+### Lang
+- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773))
+- Add remaining `he_IL`, `th_TH` translations from **4.4.0** Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
+- Fix `fr_FR` syntax issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/780) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/778))
+- Add ru_RU Translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/779))
+- Add `th_TH` translations for [!772](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/781))
+
+
+## --- [4.4.0] - 2024/05/11
+### Refactor
+- Refactor API keys "super user" to "full access" ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/731) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/759))
+- Refactor SBuilder to use Big Bucket Svc ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/755))
+### Bug fixes
+- Reset query arguments on login if `?next` is not available ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/750))
+- Fix child schedule failing to load after del parent ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/753))
+### Tweaks
+- Add link to go back to dashboard on error page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/743))
+- Set audit logging to logfile instead of DB ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/751))
+### Lang
+- Changes of phrase in `cs_CS` translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/749))
+
+
+## --- [4.3.2] - 2024/04/07
+### Refactor
+- Refactor ServerJars caching and move to api.serverjars.com ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/744) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/746))
+### Bug fixes
+- Fix migrator issue when jumping versions ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/734))
+- Fix backend issue causing error when restoring backups in 4.3.x ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/736))
+- Fix backend issue causing error when cloning servers in 4.3.x ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/741))
+- Bump orjson for CVE-2024-27454 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/747))
+- Fix calling of orjson JSONDecodeError class ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/747))
+- Fix stack on Crafty permissions route request in API ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/745))
+### Tweaks
+- Clean up remaining http handler references ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/733))
+- Remove version disclosure on login page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/737))
+- Add openjdk-21 for recent versions of MC ([Commit](https://gitlab.com/crafty-controller/crafty-4/-/commit/77b0c2c9d2eac124a7504a3d3916fa22d29fa9d1))
+### Lang
+- Update `it_IT, cs_CS` ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/739) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/742))
+
+
+## --- [4.3.1] - 2024/03/18
+### Bug fixes
+- Fix Server ID Rework for backups, schedules, and roles (INT ID to UUID migration) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/729))
+### Tweaks
+- Remove http re-direct handler. Users should implement nginx configurations for port 80 redirects ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/730))
+
+
+
+## --- [4.3.0] - 2024/03/09
+### Breaking Changes
+- This release includes database migrations that are not revertable. Once you update to this version you will not be able to rollback to a previous version.
+- In this release, we've implemented a breaking change to enhance server identification within Crafty: instead of relying on numerical integers (1, 2, 3, etc.), Servers are now uniquely identified by their UUIDs. Please adapt your API clients accordingly.
+
### Refactor
- Refactor remote file downloads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
### Bug fixes
- Fix Bedrock cert issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
- Make sure default.json is read from correct location ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/714))
- Do not allow users at server limit to clone servers ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/718))
+- Fix bug where you cannot get to config with unloaded server ([Commit](https://gitlab.com/crafty-controller/crafty-4/-/commit/9de08973b6bb2ddf91283c5c6b0e189ff34f7e24))
+- Fix forge install v1.20, 1.20.1 and 1.20.2 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/710))
+- Fix Sanitisation on Passwords ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/715) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/725))
+- Fix `Upload Imports` on unix systems, that have a space in the root dir name ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/722))
+- Fix Bedrock downloads, add `www` to download URL ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/723))
+- Fire backup webhook 'after' backup has finished ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/727))
### Tweaks
- Bump pyOpenSSL & cryptography for CVE-2024-0727, CVE-2023-50782 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/716))
+- Bump cryptography for CVE-2024-26130 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/724))
### Lang
-TBD
+- Update `de_DE, en_EN, es_ES, fr_FR, he_IL, lol_EN, lv_LV, nl_BE pl_PL, th_TH, tr_TR, uk_UA, zh_CN` translations for `4.3.0` ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/715))
-## --- [4.2.3] - 2023/02/02
+## --- [4.2.3] - 2024/02/02
### New features
- Use Papermc Group's API for `paper` & `folia` builds in server builder ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/688))
- Allow omission of player count from Dashboard (e.g. for proxy servers) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/692))
diff --git a/Dockerfile b/Dockerfile
index 88627428..db98fd9a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -25,6 +25,7 @@ RUN apt-get update \
openjdk-8-jre-headless \
openjdk-11-jre-headless \
openjdk-17-jre-headless \
+ openjdk-21-jre-headless \
lib32stdc++6 \
tzdata \
&& apt-get autoremove \
diff --git a/README.md b/README.md
index b1b401d7..a776b0c4 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
-# Crafty Controller 4.2.4
+# Crafty Controller 4.4.2
> Python based Control Panel for your Minecraft Server
## What is Crafty Controller?
diff --git a/app/classes/controllers/management_controller.py b/app/classes/controllers/management_controller.py
index 7085b503..fc17b8be 100644
--- a/app/classes/controllers/management_controller.py
+++ b/app/classes/controllers/management_controller.py
@@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge
from app.classes.models.management import HelpersManagement, HelpersWebhooks
from app.classes.models.servers import HelperServers
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -75,7 +76,7 @@ class ManagementController:
# Commands Methods
# **********************************************************************************
- def send_command(self, user_id, server_id, remote_ip, command):
+ def send_command(self, user_id, server_id, remote_ip, command, action_id=None):
server_name = HelperServers.get_server_friendly_name(server_id)
# Example: Admin issued command start_server for server Survival
@@ -86,7 +87,12 @@ class ManagementController:
remote_ip,
)
self.queue_command(
- {"server_id": server_id, "user_id": user_id, "command": command}
+ {
+ "server_id": server_id,
+ "user_id": user_id,
+ "command": command,
+ "action_id": action_id,
+ }
)
def queue_command(self, command_data):
@@ -95,9 +101,6 @@ class ManagementController:
# **********************************************************************************
# Audit_Log Methods
# **********************************************************************************
- @staticmethod
- def get_activity_log():
- return HelpersManagement.get_activity_log()
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
return self.management_helper.add_to_audit_log(
@@ -126,6 +129,7 @@ class ManagementController:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
return HelpersManagement.create_scheduled_task(
server_id,
@@ -140,6 +144,7 @@ class ManagementController:
cron_string,
parent,
delay,
+ action_id,
)
@staticmethod
@@ -178,34 +183,47 @@ class ManagementController:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- return HelpersManagement.get_backup_config(server_id)
+ def get_backup_config(backup_id):
+ return HelpersManagement.get_backup_config(backup_id)
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- return self.management_helper.set_backup_config(
- server_id,
- backup_path,
- max_backups,
- excluded_dirs,
- compress,
- shutdown,
- before,
- after,
+ @staticmethod
+ def get_backups_by_server(server_id, model=False):
+ return HelpersManagement.get_backups_by_server(server_id, model)
+
+ @staticmethod
+ def delete_backup_config(backup_id):
+ HelpersManagement.remove_backup_config(backup_id)
+
+ @staticmethod
+ def update_backup_config(backup_id, updates):
+ if "backup_location" in updates:
+ updates["backup_location"] = Helpers.wtol_path(updates["backup_location"])
+ return HelpersManagement.update_backup_config(backup_id, updates)
+
+ def add_backup_config(self, data) -> str:
+ if "backup_location" in data:
+ data["backup_location"] = Helpers.wtol_path(data["backup_location"])
+ return self.management_helper.add_backup_config(data)
+
+ def add_default_backup_config(self, server_id, backup_path):
+ return self.management_helper.add_backup_config(
+ {
+ "backup_name": "Default Backup",
+ "backup_location": Helpers.wtol_path(backup_path),
+ "max_backups": 0,
+ "before": "",
+ "after": "",
+ "compress": False,
+ "shutdown": False,
+ "server_id": server_id,
+ "excluded_dirs": [],
+ "default": True,
+ }
)
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- return HelpersManagement.get_excluded_backup_dirs(server_id)
+ def get_excluded_backup_dirs(backup_id: int):
+ return HelpersManagement.get_excluded_backup_dirs(backup_id)
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
diff --git a/app/classes/controllers/server_perms_controller.py b/app/classes/controllers/server_perms_controller.py
index f6632cd7..4586b4aa 100644
--- a/app/classes/controllers/server_perms_controller.py
+++ b/app/classes/controllers/server_perms_controller.py
@@ -17,6 +17,10 @@ class ServerPermsController:
def get_server_user_list(server_id):
return PermissionsServers.get_server_user_list(server_id)
+ @staticmethod
+ def get_permissions(permissions_mask):
+ return PermissionsServers.get_permissions(permissions_mask)
+
@staticmethod
def list_defined_permissions():
permissions_list = PermissionsServers.get_permissions_list()
@@ -47,7 +51,7 @@ class ServerPermsController:
new_server_id,
role.role_id,
PermissionsServers.get_permissions_mask(
- int(role.role_id), int(old_server_id)
+ int(role.role_id), old_server_id
),
)
# Permissions_Servers.add_role_server(
@@ -61,6 +65,22 @@ class ServerPermsController:
def get_permissions_mask(role_id, server_id):
return PermissionsServers.get_permissions_mask(role_id, server_id)
+ @staticmethod
+ def get_lowest_api_perm_mask(user_server_permissions_mask, api_key_permssions_mask):
+ mask = ""
+ # If this isn't an API key we'll know the request came from basic
+ # authentication and ignore the API key permissions mask.
+ if not api_key_permssions_mask:
+ return user_server_permissions_mask
+ for _index, (user_perm, api_perm) in enumerate(
+ zip(user_server_permissions_mask, api_key_permssions_mask)
+ ):
+ if user_perm == "1" and api_perm == "1":
+ mask += "1"
+ else:
+ mask += "0"
+ return mask
+
@staticmethod
def set_permission(
permission_mask, permission_tested: EnumPermissionsServer, value
@@ -82,6 +102,11 @@ class ServerPermsController:
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
return PermissionsServers.get_api_key_permissions_list(key, server_id)
+ @staticmethod
+ def get_user_permissions_mask(user_id: str, server_id: str):
+ user = HelperUsers.get_user_model(user_id)
+ return PermissionsServers.get_user_permissions_mask(user, server_id)
+
@staticmethod
def get_authorized_servers_stats_from_roles(user_id):
user_roles = HelperUsers.get_user_roles_id(user_id)
diff --git a/app/classes/controllers/servers_controller.py b/app/classes/controllers/servers_controller.py
index 650562f4..0e438145 100644
--- a/app/classes/controllers/servers_controller.py
+++ b/app/classes/controllers/servers_controller.py
@@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton):
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -81,10 +80,9 @@ class ServersController(metaclass=Singleton):
PeeweeException: If the server already exists
"""
return HelperServers.create_server(
- name,
server_uuid,
+ name,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -150,8 +148,7 @@ class ServersController(metaclass=Singleton):
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
# Remove roles from server
PermissionsServers.remove_roles_of_server(server_id)
- # Remove backup configs tied to server
- self.management_helper.remove_backup_config(server_id)
+ self.management_helper.remove_all_server_backups(server_id)
# Finally remove server
self.servers_helper.remove_server(server_id)
@@ -163,9 +160,9 @@ class ServersController(metaclass=Singleton):
# Servers Methods
# **********************************************************************************
- def get_server_instance_by_id(self, server_id: t.Union[str, int]) -> ServerInstance:
+ def get_server_instance_by_id(self, server_id: t.Union[str, str]) -> ServerInstance:
for server in self.servers_list:
- if int(server["server_id"]) == int(server_id):
+ if server["server_id"] == server_id:
return server["server_obj"]
logger.warning(f"Unable to find server object for server id {server_id}")
diff --git a/app/classes/controllers/users_controller.py b/app/classes/controllers/users_controller.py
index 5c6dd3d2..d45797bd 100644
--- a/app/classes/controllers/users_controller.py
+++ b/app/classes/controllers/users_controller.py
@@ -52,9 +52,10 @@ class UsersController:
},
"password": {
"type": "string",
- "minLength": 8,
+ "minLength": self.helper.minimum_password_length,
"examples": ["crafty"],
"title": "Password",
+ "error": "passLength",
},
"email": {
"type": "string",
diff --git a/app/classes/logging/log_formatter.py b/app/classes/logging/log_formatter.py
new file mode 100644
index 00000000..e3f2b4f7
--- /dev/null
+++ b/app/classes/logging/log_formatter.py
@@ -0,0 +1,53 @@
+import logging
+import logging.config
+import json
+from datetime import datetime
+
+
+class JsonEncoderStrFallback(json.JSONEncoder):
+ def default(self, o):
+ try:
+ return super().default(o)
+ except TypeError as exc:
+ if "not JSON serializable" in str(exc):
+ return str(o)
+ raise
+
+
+class JsonEncoderDatetime(JsonEncoderStrFallback):
+ def default(self, o):
+ if isinstance(o, datetime):
+ return o.strftime("%Y-%m-%dT%H:%M:%S%z")
+
+ return super().default(o)
+
+
+class JsonFormatter(logging.Formatter):
+ def formatTime(self, record, datefmt=None):
+ """
+ Override formatTime to customize the time format.
+ """
+ timestamp = datetime.fromtimestamp(record.created)
+ if datefmt:
+ # Use the specified date format
+ return timestamp.strftime(datefmt)
+ # Default date format: YYYY-MM-DD HH:MM:SS,mmm
+ secs = int(record.msecs)
+ return f"{timestamp.strftime('%Y-%m-%d %H:%M:%S')},{secs:03d}"
+
+ def format(self, record):
+ log_data = {
+ "level": record.levelname,
+ "time": self.formatTime(record),
+ "log_msg": record.getMessage(),
+ }
+
+ # Filter out standard log record attributes and include only custom ones
+ custom_attrs = ["user_name", "user_id", "server_id", "source_ip"]
+ extra_attrs = {
+ key: value for key, value in record.__dict__.items() if key in custom_attrs
+ }
+
+ # Merge extra attributes with log data
+ log_data.update(extra_attrs)
+ return json.dumps(log_data)
diff --git a/app/classes/minecraft/bigbucket.py b/app/classes/minecraft/bigbucket.py
new file mode 100644
index 00000000..591aa6d8
--- /dev/null
+++ b/app/classes/minecraft/bigbucket.py
@@ -0,0 +1,236 @@
+import os
+import json
+import threading
+import time
+import logging
+from datetime import datetime
+import requests
+
+from app.classes.controllers.servers_controller import ServersController
+from app.classes.models.server_permissions import PermissionsServers
+from app.classes.shared.file_helpers import FileHelpers
+from app.classes.shared.websocket_manager import WebSocketManager
+
+logger = logging.getLogger(__name__)
+# Temp type var until sjars restores generic fetchTypes0
+
+
+class BigBucket:
+ def __init__(self, helper):
+ self.helper = helper
+ # remove any trailing slash from config.json
+ # url since we add it on all the calls
+ self.base_url = str(
+ self.helper.get_setting("big_bucket_repo", "https://jars.arcadiatech.org")
+ ).rstrip("/")
+
+ def _read_cache(self) -> dict:
+ cache_file = self.helper.big_bucket_cache
+ cache = {}
+ try:
+ with open(cache_file, "r", encoding="utf-8") as f:
+ cache = json.load(f)
+
+ except Exception as e:
+ logger.error(f"Unable to read big_bucket cache file: {e}")
+
+ return cache
+
+ def get_bucket_data(self):
+ data = self._read_cache()
+ return data.get("categories")
+
+ def _check_bucket_alive(self) -> bool:
+ logger.info("Checking Big Bucket status")
+
+ check_url = f"{self.base_url}/healthcheck"
+ try:
+ response = requests.get(check_url, timeout=2)
+ response_json = response.json()
+ if (
+ response.status_code in [200, 201]
+ and response_json.get("status") == "ok"
+ ):
+ logger.info("Big bucket is alive and responding as expected")
+ return True
+ except Exception as e:
+ logger.error(f"Unable to connect to big bucket due to error: {e}")
+ return False
+
+ logger.error(
+ "Big bucket manifest is not available as expected or unable to contact"
+ )
+ return False
+
+ def _get_big_bucket(self) -> dict:
+ logger.debug("Calling for big bucket manifest.")
+ try:
+ response = requests.get(f"{self.base_url}/manifest.json", timeout=5)
+ if response.status_code in [200, 201]:
+ data = response.json()
+ del data["manifest_version"]
+ return data
+ return {}
+ except TimeoutError as e:
+ logger.error(f"Unable to get jars from remote with error {e}")
+ return {}
+
+ def _refresh_cache(self):
+ """
+ Contains the shared logic for refreshing the cache.
+ This method is called by both manual_refresh_cache and refresh_cache methods.
+ """
+ if not self._check_bucket_alive():
+ logger.error("big bucket API is not available.")
+ return False
+
+ cache_data = {
+ "last_refreshed": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"),
+ "categories": self._get_big_bucket(),
+ }
+ try:
+ with open(
+ self.helper.big_bucket_cache, "w", encoding="utf-8"
+ ) as cache_file:
+ json.dump(cache_data, cache_file, indent=4)
+ logger.info("Cache file successfully refreshed manually.")
+ except Exception as e:
+ logger.error(f"Failed to update cache file manually: {e}")
+
+ def manual_refresh_cache(self):
+ """
+ Manually triggers the cache refresh process.
+ """
+ logger.info("Manual bucket cache refresh initiated.")
+ self._refresh_cache()
+ logger.info("Manual refresh completed.")
+
+ def refresh_cache(self):
+ """
+ Automatically trigger cache refresh process based age.
+
+ This method checks if the cache file is older than a specified number of days
+ before deciding to refresh.
+ """
+ cache_file_path = self.helper.big_bucket_cache
+
+ # Determine if the cache is old and needs refreshing
+ cache_old = self.helper.is_file_older_than_x_days(cache_file_path)
+
+ # debug override
+ # cache_old = True
+
+ if not self._check_bucket_alive():
+ logger.error("big bucket API is not available.")
+ return False
+
+ if not cache_old:
+ logger.info("Cache file is not old enough to require automatic refresh.")
+ return False
+
+ logger.info("Automatic cache refresh initiated due to old cache.")
+ self._refresh_cache()
+
+ def get_fetch_url(self, jar, server, version) -> str:
+ """
+ Constructs the URL for downloading a server JAR file based on the server type.
+ Parameters:
+ jar (str): The category of the JAR file to download.
+ server (str): Server software name (e.g., "paper").
+ version (str): Server version.
+
+ Returns:
+ str or None: URL for downloading the JAR file, or None if URL cannot be
+ constructed or an error occurs.
+ """
+ try:
+ # Read cache file for URL that is in a list of one item
+ return self.get_bucket_data()[jar]["types"][server]["versions"][version][
+ "url"
+ ][0]
+ except Exception as e:
+ logger.error(f"An error occurred while constructing fetch URL: {e}")
+ return None
+
+ def download_jar(self, jar, server, version, path, server_id):
+ update_thread = threading.Thread(
+ name=f"server_download-{server_id}-{server}-{version}",
+ target=self.a_download_jar,
+ daemon=True,
+ args=(jar, server, version, path, server_id),
+ )
+ update_thread.start()
+
+ def a_download_jar(self, jar, server, version, path, server_id):
+ """
+ Downloads a server JAR file and performs post-download actions including
+ notifying users and setting import status.
+
+ This method waits for the server registration to complete, retrieves the
+ download URL for the specified server JAR file.
+
+ Upon successful download, it either runs the installer for
+ Forge servers or simply finishes the import process for other types. It
+ notifies server users about the completion of the download.
+
+ Parameters:
+ - jar (str): The category of the JAR file to download.
+ - server (str): The type of server software (e.g., 'forge', 'paper').
+ - version (str): The version of the server software.
+ - path (str): The local filesystem path where the JAR file will be saved.
+ - server_id (str): The unique identifier for the server being updated or
+ imported, used for notifying users and setting the import status.
+
+ Returns:
+ - bool: True if the JAR file was successfully downloaded and saved;
+ False otherwise.
+
+ The method ensures that the server is properly registered before proceeding
+ with the download and handles exceptions by logging errors and reverting
+ the import status if necessary.
+ """
+ # delaying download for server register to finish
+ time.sleep(3)
+
+ fetch_url = self.get_fetch_url(jar, server, version)
+ if not fetch_url:
+ return False
+
+ server_users = PermissionsServers.get_server_user_list(server_id)
+
+ # Make sure the server is registered before updating its stats
+ while True:
+ try:
+ ServersController.set_import(server_id)
+ for user in server_users:
+ WebSocketManager().broadcast_user(user, "send_start_reload", {})
+ break
+ except Exception as ex:
+ logger.debug(f"Server not registered yet. Delaying download - {ex}")
+
+ # Initiate Download
+ jar_dir = os.path.dirname(path)
+ jar_name = os.path.basename(path)
+ logger.info(fetch_url)
+ success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
+
+ # Post-download actions
+ if success:
+ if server == "forge-installer":
+ # If this is the newer Forge version, run the installer
+ ServersController.finish_import(server_id, True)
+ else:
+ ServersController.finish_import(server_id)
+
+ # Notify users
+ for user in server_users:
+ WebSocketManager().broadcast_user(
+ user, "notification", "Executable download finished"
+ )
+ time.sleep(3) # Delay for user notification
+ WebSocketManager().broadcast_user(user, "send_start_reload", {})
+ else:
+ logger.error(f"Unable to save jar to {path} due to download failure.")
+ ServersController.finish_import(server_id)
+
+ return success
diff --git a/app/classes/minecraft/serverjars.py b/app/classes/minecraft/serverjars.py
deleted file mode 100644
index 83731b52..00000000
--- a/app/classes/minecraft/serverjars.py
+++ /dev/null
@@ -1,361 +0,0 @@
-import os
-import json
-import threading
-import time
-import logging
-from datetime import datetime
-import requests
-
-from app.classes.controllers.servers_controller import ServersController
-from app.classes.models.server_permissions import PermissionsServers
-from app.classes.shared.file_helpers import FileHelpers
-from app.classes.shared.websocket_manager import WebSocketManager
-
-logger = logging.getLogger(__name__)
-PAPERJARS = ["paper", "folia"]
-
-
-class ServerJars:
- def __init__(self, helper):
- self.helper = helper
- self.base_url = "https://serverjars.com"
- self.paper_base = "https://api.papermc.io"
-
- @staticmethod
- def get_paper_jars():
- return PAPERJARS
-
- def get_paper_versions(self, project):
- """
- Retrieves a list of versions for a specified project from the PaperMC API.
-
- Parameters:
- project (str): The project name to query for available versions.
-
- Returns:
- list: A list of version strings available for the project. Returns an empty
- list if the API call fails or if no versions are found.
-
- This function makes a GET request to the PaperMC API to fetch available project
- versions, The versions are returned in reverse order, with the most recent
- version first.
- """
- try:
- response = requests.get(
- f"{self.paper_base}/v2/projects/{project}/", timeout=2
- )
- response.raise_for_status()
- api_data = response.json()
- except Exception as e:
- logger.error(f"Error loading project versions for {project}: {e}")
- return []
-
- versions = api_data.get("versions", [])
- versions.reverse() # Ensure the most recent version comes first
- return versions
-
- def get_paper_build(self, project, version):
- """
- Fetches the latest build for a specified project and version from PaperMC API.
-
- Parameters:
- project (str): Project name, typically a server software like 'paper'.
- version (str): Project version to fetch the build number for.
-
- Returns:
- int or None: Latest build number if successful, None if not or on error.
-
- This method attempts to query the PaperMC API for the latest build and
- handles exceptions by logging errors and returning None.
- """
- try:
- response = requests.get(
- f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
- timeout=2,
- )
- response.raise_for_status()
- api_data = response.json()
- except Exception as e:
- logger.error(f"Error fetching build for {project} {version}: {e}")
- return None
-
- builds = api_data.get("builds", [])
- return builds[-1] if builds else None
-
- def get_fetch_url(self, jar, server, version):
- """
- Constructs the URL for downloading a server JAR file based on the server type.
-
- Supports two main types of server JAR sources:
- - ServerJars API for servers not in PAPERJARS.
- - Paper API for servers available through the Paper project.
-
- Parameters:
- jar (str): Name of the JAR file.
- server (str): Server software name (e.g., "paper").
- version (str): Server version.
-
- Returns:
- str or None: URL for downloading the JAR file, or None if URL cannot be
- constructed or an error occurs.
- """
- try:
- # Check if the server type is not specifically handled by Paper.
- if server not in PAPERJARS:
- return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
-
- # For Paper servers, attempt to get the build for the specified version.
- paper_build_info = self.get_paper_build(server, version)
- if paper_build_info is None:
- # Log an error or handle the case where paper_build_info is None
- logger.error(
- "Error: Unable to get build information for server:"
- f" {server}, version: {version}"
- )
- return None
-
- build = paper_build_info.get("build")
- if not build:
- # Log an error or handle the case where build is None or not found
- logger.error(
- f"Error: Build number not found for server:"
- f" {server}, version: {version}"
- )
- return None
-
- # Construct and return the URL for downloading the Paper server JAR.
- return (
- f"{self.paper_base}/v2/projects/{server}/versions/{version}/"
- f"builds/{build}/downloads/{server}-{version}-{build}.jar"
- )
- except Exception as e:
- logger.error(f"An error occurred while constructing fetch URL: {e}")
- return None
-
- def _get_api_result(self, call_url: str):
- full_url = f"{self.base_url}{call_url}"
-
- try:
- response = requests.get(full_url, timeout=2)
- response.raise_for_status()
- api_data = json.loads(response.content)
- except Exception as e:
- logger.error(f"Unable to load {full_url} api due to error: {e}")
- return {}
-
- api_result = api_data.get("status")
- api_response = api_data.get("response", {})
-
- if api_result != "success":
- logger.error(f"Api returned a failed status: {api_result}")
- return {}
-
- return api_response
-
- def _read_cache(self):
- cache_file = self.helper.serverjar_cache
- cache = {}
- try:
- with open(cache_file, "r", encoding="utf-8") as f:
- cache = json.load(f)
-
- except Exception as e:
- logger.error(f"Unable to read serverjars.com cache file: {e}")
-
- return cache
-
- def get_serverjar_data(self):
- data = self._read_cache()
- return data.get("types")
-
- def _check_api_alive(self):
- logger.info("Checking serverjars.com API status")
-
- check_url = f"{self.base_url}/api/fetchTypes"
- try:
- response = requests.get(check_url, timeout=2)
-
- if response.status_code in [200, 201]:
- logger.info("Serverjars.com API is alive")
- return True
- except Exception as e:
- logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
- return {}
-
- logger.error("unable to contact serverjars.com api")
- return False
-
- def manual_refresh_cache(self):
- cache_file = self.helper.serverjar_cache
-
- # debug override
- # cache_old = True
-
- # if the API is down... we bomb out
- if not self._check_api_alive():
- return False
-
- logger.info("Manual Refresh requested.")
- now = datetime.now()
- data = {
- "last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
- "types": {},
- }
-
- jar_types = self._get_server_type_list()
- data["types"].update(jar_types)
- for s in data["types"]:
- data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
- for j in data["types"].get(s):
- versions = self._get_jar_details(j, s)
- data["types"][s].update({j: versions})
- for item in PAPERJARS:
- data["types"]["servers"][item] = self.get_paper_versions(item)
- # save our cache
- try:
- with open(cache_file, "w", encoding="utf-8") as f:
- f.write(json.dumps(data, indent=4))
- logger.info("Cache file refreshed")
-
- except Exception as e:
- logger.error(f"Unable to update serverjars.com cache file: {e}")
-
- def refresh_cache(self):
- cache_file = self.helper.serverjar_cache
- cache_old = self.helper.is_file_older_than_x_days(cache_file)
-
- # debug override
- # cache_old = True
-
- # if the API is down... we bomb out
- if not self._check_api_alive():
- return False
-
- logger.info("Checking Cache file age")
- # if file is older than 1 day
-
- if cache_old:
- logger.info("Cache file is over 1 day old, refreshing")
- now = datetime.now()
- data = {
- "last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
- "types": {},
- }
-
- jar_types = self._get_server_type_list()
- data["types"].update(jar_types)
- for s in data["types"]:
- data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
- for j in data["types"].get(s):
- versions = self._get_jar_details(j, s)
- data["types"][s].update({j: versions})
- for item in PAPERJARS:
- data["types"]["servers"][item] = self.get_paper_versions(item)
- # save our cache
- try:
- with open(cache_file, "w", encoding="utf-8") as f:
- f.write(json.dumps(data, indent=4))
- logger.info("Cache file refreshed")
-
- except Exception as e:
- logger.error(f"Unable to update serverjars.com cache file: {e}")
-
- def _get_jar_details(self, server_type, jar_type="servers"):
- url = f"/api/fetchAll/{jar_type}/{server_type}"
- response = self._get_api_result(url)
- temp = []
- for v in response:
- temp.append(v.get("version"))
- time.sleep(0.5)
- return temp
-
- def _get_server_type_list(self):
- url = "/api/fetchTypes/"
- response = self._get_api_result(url)
- if "bedrock" in response.keys():
- # remove pocketmine from options
- del response["bedrock"]
- return response
-
- def download_jar(self, jar, server, version, path, server_id):
- update_thread = threading.Thread(
- name=f"server_download-{server_id}-{server}-{version}",
- target=self.a_download_jar,
- daemon=True,
- args=(jar, server, version, path, server_id),
- )
- update_thread.start()
-
- def a_download_jar(self, jar, server, version, path, server_id):
- """
- Downloads a server JAR file and performs post-download actions including
- notifying users and setting import status.
-
- This method waits for the server registration to complete, retrieves the
- download URL for the specified server JAR file.
-
- Upon successful download, it either runs the installer for
- Forge servers or simply finishes the import process for other types. It
- notifies server users about the completion of the download.
-
- Parameters:
- - jar (str): The name of the JAR file to download.
- - server (str): The type of server software (e.g., 'forge', 'paper').
- - version (str): The version of the server software.
- - path (str): The local filesystem path where the JAR file will be saved.
- - server_id (str): The unique identifier for the server being updated or
- imported, used for notifying users and setting the import status.
-
- Returns:
- - bool: True if the JAR file was successfully downloaded and saved;
- False otherwise.
-
- The method ensures that the server is properly registered before proceeding
- with the download and handles exceptions by logging errors and reverting
- the import status if necessary.
- """
- # delaying download for server register to finish
- time.sleep(3)
-
- fetch_url = self.get_fetch_url(jar, server, version)
- if not fetch_url:
- return False
-
- server_users = PermissionsServers.get_server_user_list(server_id)
-
- # Make sure the server is registered before updating its stats
- while True:
- try:
- ServersController.set_import(server_id)
- for user in server_users:
- WebSocketManager().broadcast_user(user, "send_start_reload", {})
- break
- except Exception as ex:
- logger.debug(f"Server not registered yet. Delaying download - {ex}")
-
- # Initiate Download
- jar_dir = os.path.dirname(path)
- jar_name = os.path.basename(path)
- logger.info(fetch_url)
- success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
-
- # Post-download actions
- if success:
- if server == "forge":
- # If this is the newer Forge version, run the installer
- ServersController.finish_import(server_id, True)
- else:
- ServersController.finish_import(server_id)
-
- # Notify users
- for user in server_users:
- WebSocketManager().broadcast_user(
- user, "notification", "Executable download finished"
- )
- time.sleep(3) # Delay for user notification
- WebSocketManager().broadcast_user(user, "send_start_reload", {})
- else:
- logger.error(f"Unable to save jar to {path} due to download failure.")
- ServersController.finish_import(server_id)
-
- return success
diff --git a/app/classes/minecraft/stats.py b/app/classes/minecraft/stats.py
index 789c1e8f..48106b15 100644
--- a/app/classes/minecraft/stats.py
+++ b/app/classes/minecraft/stats.py
@@ -86,7 +86,7 @@ class Stats:
def get_node_stats(self) -> NodeStatsReturnDict:
try:
cpu_freq = psutil.cpu_freq()
- except (NotImplementedError, FileNotFoundError):
+ except (NotImplementedError, AttributeError, FileNotFoundError):
cpu_freq = None
if cpu_freq is None:
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)
diff --git a/app/classes/models/crafty_permissions.py b/app/classes/models/crafty_permissions.py
index 7430f332..e7a159d9 100644
--- a/app/classes/models/crafty_permissions.py
+++ b/app/classes/models/crafty_permissions.py
@@ -187,7 +187,7 @@ class PermissionsCrafty:
@staticmethod
def get_api_key_permissions_list(key: ApiKeys):
user = HelperUsers.get_user(key.user_id)
- if user["superuser"] and key.superuser:
+ if user["superuser"] and key.full_access:
return PermissionsCrafty.get_permissions_list()
if user["superuser"]:
# User is superuser but API key isn't
diff --git a/app/classes/models/management.py b/app/classes/models/management.py
index e86e3209..b6be14b5 100644
--- a/app/classes/models/management.py
+++ b/app/classes/models/management.py
@@ -16,28 +16,11 @@ from app.classes.models.base_model import BaseModel
from app.classes.models.users import HelperUsers
from app.classes.models.servers import Servers
from app.classes.models.server_permissions import PermissionsServers
-from app.classes.shared.main_models import DatabaseShortcuts
+from app.classes.shared.helpers import Helpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
-
-
-# **********************************************************************************
-# Audit_Log Class
-# **********************************************************************************
-class AuditLog(BaseModel):
- audit_id = AutoField()
- created = DateTimeField(default=datetime.datetime.now)
- user_name = CharField(default="")
- user_id = IntegerField(default=0, index=True)
- source_ip = CharField(default="127.0.0.1")
- server_id = IntegerField(
- default=None, index=True
- ) # When auditing global events, use server ID 0
- log_msg = TextField(default="")
-
- class Meta:
- table_name = "audit_log"
+auth_logger = logging.getLogger("audit_log")
# **********************************************************************************
@@ -79,7 +62,7 @@ class HostStats(BaseModel):
# **********************************************************************************
class Webhooks(BaseModel):
id = AutoField()
- server_id = IntegerField(null=True)
+ server_id = ForeignKeyField(Servers, backref="webhook_server", null=True)
name = CharField(default="Custom Webhook", max_length=64)
url = CharField(default="")
webhook_type = CharField(default="Custom")
@@ -105,6 +88,7 @@ class Schedules(BaseModel):
interval_type = CharField()
start_time = CharField(null=True)
command = CharField(null=True)
+ action_id = CharField(null=True)
name = CharField()
one_time = BooleanField(default=False)
cron_string = CharField(default="")
@@ -120,13 +104,19 @@ class Schedules(BaseModel):
# Backups Class
# **********************************************************************************
class Backups(BaseModel):
+ backup_id = CharField(primary_key=True, default=Helpers.create_uuid)
+ backup_name = CharField(default="New Backup")
+ backup_location = CharField(default="")
excluded_dirs = CharField(null=True)
- max_backups = IntegerField()
+ max_backups = IntegerField(default=0)
server_id = ForeignKeyField(Servers, backref="backups_server")
compress = BooleanField(default=False)
shutdown = BooleanField(default=False)
before = CharField(default="")
after = CharField(default="")
+ default = BooleanField(default=False)
+ status = CharField(default='{"status": "Standby", "message": ""}')
+ enabled = BooleanField(default=True)
class Meta:
table_name = "backups"
@@ -149,10 +139,6 @@ class HelpersManagement:
# **********************************************************************************
# Audit_Log Methods
# **********************************************************************************
- @staticmethod
- def get_activity_log():
- query = AuditLog.select()
- return DatabaseShortcuts.return_db_rows(query)
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
@@ -166,50 +152,28 @@ class HelpersManagement:
WebSocketManager().broadcast_user(user, "notification", audit_msg)
except Exception as e:
logger.error(f"Error broadcasting to user {user} - {e}")
-
- AuditLog.insert(
- {
- AuditLog.user_name: user_data["username"],
- AuditLog.user_id: user_id,
- AuditLog.server_id: server_id,
- AuditLog.log_msg: audit_msg,
- AuditLog.source_ip: source_ip,
- }
- ).execute()
- # deletes records when there's more than 300
- ordered = AuditLog.select().order_by(+AuditLog.created)
- for item in ordered:
- if not self.helper.get_setting("max_audit_entries"):
- max_entries = 300
- else:
- max_entries = self.helper.get_setting("max_audit_entries")
- if AuditLog.select().count() > max_entries:
- AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
- else:
- return
+ auth_logger.info(
+ str(log_msg),
+ extra={
+ "user_name": user_data["username"],
+ "user_id": user_id,
+ "server_id": server_id,
+ "source_ip": source_ip,
+ },
+ )
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
- AuditLog.insert(
- {
- AuditLog.user_name: user_name,
- AuditLog.user_id: user_id,
- AuditLog.server_id: server_id,
- AuditLog.log_msg: log_msg,
- AuditLog.source_ip: source_ip,
- }
- ).execute()
- # deletes records when there's more than 300
- ordered = AuditLog.select().order_by(+AuditLog.created)
- for item in ordered:
- # configurable through app/config/config.json
- if not self.helper.get_setting("max_audit_entries"):
- max_entries = 300
- else:
- max_entries = self.helper.get_setting("max_audit_entries")
- if AuditLog.select().count() > max_entries:
- AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
- else:
- return
+ if isinstance(server_id, Servers) and server_id is not None:
+ server_id = server_id.server_id
+ auth_logger.info(
+ str(log_msg),
+ extra={
+ "user_name": user_name,
+ "user_id": user_id,
+ "server_id": server_id,
+ "source_ip": source_ip,
+ },
+ )
@staticmethod
def create_crafty_row():
@@ -307,6 +271,7 @@ class HelpersManagement:
cron_string="* * * * *",
parent=None,
delay=0,
+ action_id=None,
):
sch_id = Schedules.insert(
{
@@ -317,6 +282,7 @@ class HelpersManagement:
Schedules.interval_type: interval_type,
Schedules.start_time: start_time,
Schedules.command: command,
+ Schedules.action_id: action_id,
Schedules.name: name,
Schedules.one_time: one_time,
Schedules.cron_string: cron_string,
@@ -337,7 +303,7 @@ class HelpersManagement:
@staticmethod
def delete_scheduled_task_by_server(server_id):
- Schedules.delete().where(Schedules.server_id == int(server_id)).execute()
+ Schedules.delete().where(Schedules.server_id == server_id).execute()
@staticmethod
def get_scheduled_task(schedule_id):
@@ -379,133 +345,83 @@ class HelpersManagement:
# Backups Methods
# **********************************************************************************
@staticmethod
- def get_backup_config(server_id):
- try:
- row = (
- Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
- )
- conf = {
- "backup_path": row.server_id.backup_path,
- "excluded_dirs": row.excluded_dirs,
- "max_backups": row.max_backups,
- "server_id": row.server_id_id,
- "compress": row.compress,
- "shutdown": row.shutdown,
- "before": row.before,
- "after": row.after,
- }
- except IndexError:
- conf = {
- "backup_path": None,
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- return conf
+ def get_backup_config(backup_id):
+ return model_to_dict(Backups.get(Backups.backup_id == backup_id))
@staticmethod
- def remove_backup_config(server_id):
+ def get_backups_by_server(server_id, model=False):
+ if not model:
+ data = {}
+ for backup in (
+ Backups.select().where(Backups.server_id == server_id).execute()
+ ):
+ data[str(backup.backup_id)] = {
+ "backup_id": backup.backup_id,
+ "backup_name": backup.backup_name,
+ "backup_location": backup.backup_location,
+ "excluded_dirs": backup.excluded_dirs,
+ "max_backups": backup.max_backups,
+ "server_id": backup.server_id_id,
+ "compress": backup.compress,
+ "shutdown": backup.shutdown,
+ "before": backup.before,
+ "after": backup.after,
+ "default": backup.default,
+ "enabled": backup.enabled,
+ }
+ else:
+ data = Backups.select().where(Backups.server_id == server_id).execute()
+ return data
+
+ @staticmethod
+ def get_default_server_backup(server_id: str) -> dict:
+ print(server_id)
+ bu_query = Backups.select().where(
+ Backups.server_id == server_id,
+ Backups.default == True, # pylint: disable=singleton-comparison
+ )
+ for item in bu_query:
+ print("HI", item)
+ backup_model = bu_query.first()
+
+ if backup_model:
+ return model_to_dict(backup_model)
+ raise IndexError
+
+ @staticmethod
+ def remove_all_server_backups(server_id):
Backups.delete().where(Backups.server_id == server_id).execute()
- def set_backup_config(
- self,
- server_id: int,
- backup_path: str = None,
- max_backups: int = None,
- excluded_dirs: list = None,
- compress: bool = False,
- shutdown: bool = False,
- before: str = "",
- after: str = "",
- ):
- logger.debug(f"Updating server {server_id} backup config with {locals()}")
- if Backups.select().where(Backups.server_id == server_id).exists():
- new_row = False
- conf = {}
- else:
- conf = {
- "excluded_dirs": None,
- "max_backups": 0,
- "server_id": server_id,
- "compress": False,
- "shutdown": False,
- "before": "",
- "after": "",
- }
- new_row = True
- if max_backups is not None:
- conf["max_backups"] = max_backups
- if excluded_dirs is not None:
- dirs_to_exclude = ",".join(excluded_dirs)
+ @staticmethod
+ def remove_backup_config(backup_id):
+ Backups.delete().where(Backups.backup_id == backup_id).execute()
+
+ def add_backup_config(self, conf) -> str:
+ if "excluded_dirs" in conf:
+ dirs_to_exclude = ",".join(conf["excluded_dirs"])
conf["excluded_dirs"] = dirs_to_exclude
- conf["compress"] = compress
- conf["shutdown"] = shutdown
- conf["before"] = before
- conf["after"] = after
- if not new_row:
- with self.database.atomic():
- if backup_path is not None:
- server_rows = (
- Servers.update(backup_path=backup_path)
- .where(Servers.server_id == server_id)
- .execute()
- )
- else:
- server_rows = 0
- backup_rows = (
- Backups.update(conf).where(Backups.server_id == server_id).execute()
- )
- logger.debug(
- f"Updating existing backup record. "
- f"{server_rows}+{backup_rows} rows affected"
- )
- else:
- with self.database.atomic():
- conf["server_id"] = server_id
- if backup_path is not None:
- Servers.update(backup_path=backup_path).where(
- Servers.server_id == server_id
- )
- Backups.create(**conf)
- logger.debug("Creating new backup record.")
+ if len(self.get_backups_by_server(conf["server_id"], True)) <= 0:
+ conf["default"] = True
+ backup = Backups.create(**conf)
+ logger.debug("Creating new backup record.")
+ return backup.backup_id
@staticmethod
- def get_excluded_backup_dirs(server_id: int):
- excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
+ def update_backup_config(backup_id, data):
+ if "excluded_dirs" in data:
+ dirs_to_exclude = ",".join(data["excluded_dirs"])
+ data["excluded_dirs"] = dirs_to_exclude
+ Backups.update(**data).where(Backups.backup_id == backup_id).execute()
+
+ @staticmethod
+ def get_excluded_backup_dirs(backup_id: int):
+ excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"]
if excluded_dirs is not None and excluded_dirs != "":
dir_list = excluded_dirs.split(",")
else:
dir_list = []
return dir_list
- def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_add not in dir_list:
- dir_list.append(dir_to_add)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not adding {dir_to_add} to excluded directories - "
- f"already in the excluded directory list for server ID {server_id}"
- )
-
- def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
- dir_list = self.get_excluded_backup_dirs(server_id)
- if dir_to_del in dir_list:
- dir_list.remove(dir_to_del)
- excluded_dirs = ",".join(dir_list)
- self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
- else:
- logger.debug(
- f"Not removing {dir_to_del} from excluded directories - "
- f"not in the excluded directory list for server ID {server_id}"
- )
-
# **********************************************************************************
# Webhooks Class
diff --git a/app/classes/models/server_permissions.py b/app/classes/models/server_permissions.py
index 56f9d8ac..12301e30 100644
--- a/app/classes/models/server_permissions.py
+++ b/app/classes/models/server_permissions.py
@@ -264,7 +264,7 @@ class PermissionsServers:
@staticmethod
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
user = HelperUsers.get_user(key.user_id)
- if user["superuser"] and key.superuser:
+ if user["superuser"] and key.full_access:
return PermissionsServers.get_permissions_list()
roles_list = HelperUsers.get_user_roles_id(user["user_id"])
role_server = (
diff --git a/app/classes/models/server_stats.py b/app/classes/models/server_stats.py
index 8473ed12..64258c8c 100644
--- a/app/classes/models/server_stats.py
+++ b/app/classes/models/server_stats.py
@@ -71,7 +71,7 @@ class HelperServerStats:
database = None
def __init__(self, server_id):
- self.server_id = int(server_id)
+ self.server_id = server_id
self.init_database(self.server_id)
def init_database(self, server_id):
diff --git a/app/classes/models/servers.py b/app/classes/models/servers.py
index 71419dde..d9016877 100644
--- a/app/classes/models/servers.py
+++ b/app/classes/models/servers.py
@@ -3,7 +3,6 @@ import datetime
import typing as t
from peewee import (
CharField,
- AutoField,
DateTimeField,
BooleanField,
IntegerField,
@@ -13,6 +12,9 @@ from playhouse.shortcuts import model_to_dict
from app.classes.shared.main_models import DatabaseShortcuts
from app.classes.models.base_model import BaseModel
+# from app.classes.models.users import Users
+from app.classes.shared.helpers import Helpers
+
logger = logging.getLogger(__name__)
@@ -20,12 +22,10 @@ logger = logging.getLogger(__name__)
# Servers Model
# **********************************************************************************
class Servers(BaseModel):
- server_id = AutoField()
+ server_id = CharField(primary_key=True, default=Helpers.create_uuid())
created = DateTimeField(default=datetime.datetime.now)
- server_uuid = CharField(default="", index=True)
server_name = CharField(default="Server", index=True)
path = CharField(default="")
- backup_path = CharField(default="")
executable = CharField(default="")
log_path = CharField(default="")
execution_command = CharField(default="")
@@ -40,6 +40,7 @@ class Servers(BaseModel):
type = CharField(default="minecraft-java")
show_status = BooleanField(default=1)
created_by = IntegerField(default=-100)
+ # created_by = ForeignKeyField(Users, backref="creator_server", null=True)
shutdown_timeout = IntegerField(default=60)
ignored_exits = CharField(default="0")
app_id = IntegerField(null=True)
@@ -61,10 +62,9 @@ class HelperServers:
# **********************************************************************************
@staticmethod
def create_server(
+ server_id: str,
name: str,
- server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -81,7 +81,6 @@ class HelperServers:
name: The name of the server
server_uuid: This is the UUID of the server
server_dir: The directory where the server is located
- backup_path: The path to the backup folder
server_command: The command to start the server
server_file: The name of the server file
server_log_file: The path to the server log file
@@ -97,26 +96,24 @@ class HelperServers:
Raises:
PeeweeException: If the server already exists
"""
- return Servers.insert(
- {
- Servers.server_name: name,
- Servers.server_uuid: server_uuid,
- Servers.path: server_dir,
- Servers.executable: server_file,
- Servers.execution_command: server_command,
- Servers.auto_start: False,
- Servers.auto_start_delay: 10,
- Servers.crash_detection: False,
- Servers.log_path: server_log_file,
- Servers.server_port: server_port,
- Servers.server_ip: server_host,
- Servers.stop_command: server_stop,
- Servers.backup_path: backup_path,
- Servers.type: server_type,
- Servers.created_by: created_by,
- Servers.app_id: app_id,
- }
- ).execute()
+ return Servers.create(
+ server_id=server_id,
+ server_uuid=server_id,
+ server_name=name,
+ path=server_dir,
+ executable=server_file,
+ execution_command=server_command,
+ auto_start=False,
+ auto_start_delay=10,
+ crash_detection=False,
+ log_path=server_log_file,
+ server_port=server_port,
+ server_ip=server_host,
+ stop_command=server_stop,
+ type=server_type,
+ created_by=created_by,
+ app_id: app_id,
+ ).server_id
@staticmethod
def get_server_obj(server_id):
diff --git a/app/classes/models/users.py b/app/classes/models/users.py
index e44d06fb..6f6a6bde 100644
--- a/app/classes/models/users.py
+++ b/app/classes/models/users.py
@@ -38,7 +38,7 @@ class Users(BaseModel):
superuser = BooleanField(default=False)
lang = CharField(default="en_EN")
support_logs = CharField(default="")
- valid_tokens_from = DateTimeField(default=datetime.datetime.now)
+ valid_tokens_from = DateTimeField(default=Helpers.get_utc_now)
server_order = CharField(default="")
preparing = BooleanField(default=False)
hints = BooleanField(default=True)
@@ -71,7 +71,7 @@ class ApiKeys(BaseModel):
user_id = ForeignKeyField(Users, backref="api_token", index=True)
server_permissions = CharField(default="00000000")
crafty_permissions = CharField(default="000")
- superuser = BooleanField(default=False)
+ full_access = BooleanField(default=False)
class Meta:
table_name = "api_keys"
@@ -119,7 +119,6 @@ class HelperUsers:
@staticmethod
def get_user_total():
count = Users.select().where(Users.username != "system").count()
- print(count)
return count
@staticmethod
@@ -408,7 +407,7 @@ class HelperUsers:
def add_user_api_key(
name: str,
user_id: str,
- superuser: bool = False,
+ full_access: bool = False,
server_permissions_mask: t.Optional[str] = None,
crafty_permissions_mask: t.Optional[str] = None,
):
@@ -426,7 +425,7 @@ class HelperUsers:
if crafty_permissions_mask is not None
else {}
),
- ApiKeys.superuser: superuser,
+ ApiKeys.full_access: full_access,
}
).execute()
diff --git a/app/classes/shared/authentication.py b/app/classes/shared/authentication.py
index fad8b730..94db5532 100644
--- a/app/classes/shared/authentication.py
+++ b/app/classes/shared/authentication.py
@@ -1,5 +1,6 @@
import logging
import time
+from datetime import datetime
from typing import Optional, Dict, Any, Tuple
import jwt
from jwt import PyJWTError
@@ -62,7 +63,17 @@ class Authentication:
user = HelperUsers.get_user(user_id)
# TODO: Have a cache or something so we don't constantly
# have to query the database
- if int(user.get("valid_tokens_from").timestamp()) < iat:
+ valid_tokens_from_str = user.get("valid_tokens_from")
+ # It's possible this will be a string or a dt coming from the DB
+ # We need to account for that
+ try:
+ valid_tokens_from_dt = datetime.strptime(
+ valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z"
+ )
+ except TypeError:
+ valid_tokens_from_dt = valid_tokens_from_str
+ # Convert the string to a datetime object
+ if int(valid_tokens_from_dt.timestamp()) < iat:
# Success!
return key, data, user
return None
diff --git a/app/classes/shared/command.py b/app/classes/shared/command.py
index 155fe083..4b7abbc3 100644
--- a/app/classes/shared/command.py
+++ b/app/classes/shared/command.py
@@ -18,7 +18,12 @@ logger = logging.getLogger(__name__)
class MainPrompt(cmd.Cmd):
def __init__(
- self, helper, tasks_manager, migration_manager, main_controller, import3
+ self,
+ helper,
+ tasks_manager,
+ migration_manager,
+ main_controller,
+ import3,
):
super().__init__()
self.helper: Helpers = helper
@@ -77,11 +82,11 @@ class MainPrompt(cmd.Cmd):
# get new password from user
new_pass = getpass.getpass(prompt=f"NEW password for: {username} > ")
# check to make sure it fits our requirements.
- if len(new_pass) > 512:
- Console.warning("Passwords must be greater than 6char long and under 512")
- return False
- if len(new_pass) < 6:
- Console.warning("Passwords must be greater than 6char long and under 512")
+ if len(new_pass) < self.helper.minimum_password_length:
+ Console.warning(
+ "Passwords must be greater than"
+ f" {self.helper.minimum_password_length} char long"
+ )
return False
# grab repeated password input
new_pass_conf = getpass.getpass(prompt="Re-enter your password: > ")
diff --git a/app/classes/shared/file_helpers.py b/app/classes/shared/file_helpers.py
index 90d8e65c..4fcd0c21 100644
--- a/app/classes/shared/file_helpers.py
+++ b/app/classes/shared/file_helpers.py
@@ -4,7 +4,10 @@ import logging
import pathlib
import tempfile
import zipfile
-from zipfile import ZipFile, ZIP_DEFLATED
+import hashlib
+from typing import BinaryIO
+import mimetypes
+from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
import urllib.request
import ssl
import time
@@ -22,6 +25,7 @@ class FileHelpers:
def __init__(self, helper):
self.helper: Helpers = helper
+ self.mime_types = mimetypes.MimeTypes()
@staticmethod
def ssl_get_file(
@@ -142,6 +146,32 @@ class FileHelpers:
logger.error(f"Path specified is not a file or does not exist. {path}")
return e
+ def check_mime_types(self, file_path):
+ m_type, _value = self.mime_types.guess_type(file_path)
+ return m_type
+
+ @staticmethod
+ def calculate_file_hash(file_path: str) -> str:
+ """
+ Takes one parameter of file path.
+ It will generate a SHA256 hash for the path and return it.
+ """
+ sha256_hash = hashlib.sha256()
+ with open(file_path, "rb") as f:
+ for byte_block in iter(lambda: f.read(4096), b""):
+ sha256_hash.update(byte_block)
+ return sha256_hash.hexdigest()
+
+ @staticmethod
+ def calculate_buffer_hash(buffer: BinaryIO) -> str:
+ """
+ Takes one argument of a stream buffer. Will return a
+ sha256 hash of the buffer
+ """
+ sha256_hash = hashlib.sha256()
+ sha256_hash.update(buffer)
+ return sha256_hash.hexdigest()
+
@staticmethod
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
# pylint: disable=unexpected-keyword-arg
@@ -153,8 +183,7 @@ class FileHelpers:
@staticmethod
def move_dir(src_path, dest_path):
- FileHelpers.copy_dir(src_path, dest_path)
- FileHelpers.del_dirs(src_path)
+ shutil.move(src_path, dest_path)
@staticmethod
def move_dir_exist(src_path, dest_path):
@@ -163,8 +192,7 @@ class FileHelpers:
@staticmethod
def move_file(src_path, dest_path):
- FileHelpers.copy_file(src_path, dest_path)
- FileHelpers.del_file(src_path)
+ shutil.move(src_path, dest_path)
@staticmethod
def make_archive(path_to_destination, path_to_zip, comment=""):
@@ -229,74 +257,15 @@ class FileHelpers:
return True
- def make_compressed_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
- ):
- # create a ZipFile object
- path_to_destination += ".zip"
- ex_replace = [p.replace("\\", "/") for p in excluded_dirs]
- total_bytes = 0
- dir_bytes = Helpers.get_dir_size(path_to_zip)
- results = {
- "percent": 0,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
- with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
- zip_file.comment = bytes(
- comment, "utf-8"
- ) # comments over 65535 bytes will be truncated
- for root, dirs, files in os.walk(path_to_zip, topdown=True):
- for l_dir in dirs:
- if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace:
- dirs.remove(l_dir)
- ziproot = path_to_zip
- for file in files:
- if (
- str(os.path.join(root, file)).replace("\\", "/")
- not in ex_replace
- and file != "crafty.sqlite"
- ):
- try:
- logger.info(f"backing up: {os.path.join(root, file)}")
- if os.name == "nt":
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, ""), file),
- )
- else:
- zip_file.write(
- os.path.join(root, file),
- os.path.join(root.replace(ziproot, "/"), file),
- )
-
- except Exception as e:
- logger.warning(
- f"Error backing up: {os.path.join(root, file)}!"
- f" - Error was: {e}"
- )
- total_bytes += os.path.getsize(os.path.join(root, file))
- percent = round((total_bytes / dir_bytes) * 100, 2)
- results = {
- "percent": percent,
- "total_files": self.helper.human_readable_file_size(dir_bytes),
- }
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(server_id)},
- "backup_status",
- results,
- )
-
- return True
-
def make_backup(
- self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
+ self,
+ path_to_destination,
+ path_to_zip,
+ excluded_dirs,
+ server_id,
+ backup_id,
+ comment="",
+ compressed=None,
):
# create a ZipFile object
path_to_destination += ".zip"
@@ -313,7 +282,15 @@ class FileHelpers:
"backup_status",
results,
)
- with ZipFile(path_to_destination, "w") as zip_file:
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
+ # Set the compression mode based on the `compressed` parameter
+ compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED
+ with ZipFile(path_to_destination, "w", compression_mode) as zip_file:
zip_file.comment = bytes(
comment, "utf-8"
) # comments over 65535 bytes will be truncated
@@ -364,6 +341,7 @@ class FileHelpers:
results = {
"percent": percent,
"total_files": self.helper.human_readable_file_size(dir_bytes),
+ "backup_id": backup_id,
}
# send status results to page.
WebSocketManager().broadcast_page_params(
@@ -372,6 +350,12 @@ class FileHelpers:
"backup_status",
results,
)
+ WebSocketManager().broadcast_page_params(
+ "/panel/edit_backup",
+ {"id": str(server_id)},
+ "backup_status",
+ results,
+ )
return True
@staticmethod
diff --git a/app/classes/shared/helpers.py b/app/classes/shared/helpers.py
index f7cf663c..3780d2df 100644
--- a/app/classes/shared/helpers.py
+++ b/app/classes/shared/helpers.py
@@ -19,7 +19,7 @@ import shutil
import shlex
import subprocess
import itertools
-from datetime import datetime
+from datetime import datetime, timezone
from socket import gethostname
from contextlib import redirect_stderr, suppress
import libgravatar
@@ -72,9 +72,10 @@ class Helpers:
self.db_path = os.path.join(
self.root_dir, "app", "config", "db", "crafty.sqlite"
)
- self.serverjar_cache = os.path.join(self.config_dir, "serverjars.json")
+ self.big_bucket_cache = os.path.join(self.config_dir, "bigbucket.json")
self.steamapps_cache = os.path.join(self.config_dir, "steamapps.json")
self.credits_cache = os.path.join(self.config_dir, "credits.json")
+
self.passhasher = PasswordHasher()
self.exiting = False
@@ -82,6 +83,7 @@ class Helpers:
self.update_available = False
self.ignored_names = ["crafty_managed.txt", "db_stats"]
self.crafty_starting = False
+ self.minimum_password_length = 8
@staticmethod
def auto_installer_fix(ex):
@@ -118,7 +120,7 @@ class Helpers:
Get latest bedrock executable url \n\n
returns url if successful, False if not
"""
- url = "https://minecraft.net/en-us/download/server/bedrock/"
+ url = "https://www.minecraft.net/en-us/download/server/bedrock/"
headers = {
"Accept-Encoding": "identity",
"Accept-Language": "en",
@@ -496,7 +498,6 @@ class Helpers:
# Config.json was removed from the repo to make it easier for users
# To make non-breaking changes to the file.
return {
- "http_port": 8000,
"https_port": 8443,
"language": "en_EN",
"cookie_expire": 30,
@@ -509,7 +510,6 @@ class Helpers:
"max_log_lines": 700,
"max_audit_entries": 300,
"disabled_language_files": [],
- "stream_size_GB": 1,
"keywords": ["help", "chunk"],
"allow_nsfw_profile_pictures": False,
"enable_user_self_delete": False,
@@ -517,6 +517,7 @@ class Helpers:
"monitored_mounts": mounts,
"dir_size_poll_freq_minutes": 5,
"crafty_logs_delete_after_days": 0,
+ "big_bucket_repo": "https://jars.arcadiatech.org",
}
def get_all_settings(self):
@@ -640,6 +641,10 @@ class Helpers:
version = f"{major}.{minor}.{sub}"
return str(version)
+ @staticmethod
+ def get_utc_now() -> datetime:
+ return datetime.fromtimestamp(time.time(), tz=timezone.utc)
+
def encode_pass(self, password):
return self.passhasher.hash(password)
@@ -1006,6 +1011,11 @@ class Helpers:
except PermissionError as e:
logger.critical(f"Check generated exception due to permssion error: {e}")
return False
+ except FileNotFoundError as e:
+ logger.critical(
+ f"Check generated exception due to file does not exist error: {e}"
+ )
+ return False
def create_self_signed_cert(self, cert_dir=None):
if cert_dir is None:
diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py
index 4344b18d..8120d270 100644
--- a/app/classes/shared/main_controller.py
+++ b/app/classes/shared/main_controller.py
@@ -1,4 +1,5 @@
import os
+import sys
import pathlib
from pathlib import Path
from datetime import datetime
@@ -32,7 +33,7 @@ from app.classes.shared.console import Console
from app.classes.shared.helpers import Helpers
from app.classes.shared.file_helpers import FileHelpers
from app.classes.shared.import_helper import ImportHelpers
-from app.classes.minecraft.serverjars import ServerJars
+from app.classes.minecraft.bigbucket import BigBucket
from app.classes.shared.websocket_manager import WebSocketManager
from app.classes.steamcmd.serverapps import SteamApps
@@ -45,8 +46,10 @@ class Controller:
self.helper: Helpers = helper
self.file_helper: FileHelpers = file_helper
self.import_helper: ImportHelpers = import_helper
- self.server_jars: ServerJars = ServerJars(helper)
+
+ self.big_bucket: BigBucket = BigBucket(helper)
self.steam_apps: SteamApps = SteamApps(helper)
+
self.users_helper: HelperUsers = HelperUsers(database, self.helper)
self.roles_helper: HelperRoles = HelperRoles(database)
self.servers_helper: HelperServers = HelperServers(database)
@@ -242,7 +245,7 @@ class Controller:
try:
os.mkdir(final_path)
except FileExistsError:
- final_path += "_" + server["server_uuid"]
+ final_path += "_" + server["server_id"]
os.mkdir(final_path)
try:
FileHelpers.copy_file(
@@ -254,6 +257,19 @@ class Controller:
# Copy crafty logs to archive dir
full_log_name = os.path.join(crafty_path, "logs")
FileHelpers.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
+ thread_dump = ""
+ for thread in threading.enumerate():
+ if sys.version_info >= (3, 8):
+ thread_dump += (
+ f"Name: {thread.name}\tIdentifier:"
+ f" {thread.ident}\tTID/PID: {thread.native_id}\n"
+ )
+ else:
+ print(f"Name: {thread.name}\tIdentifier: {thread.ident}")
+ with open(
+ os.path.join(temp_dir, "crafty_thread_dump.txt"), "a", encoding="utf-8"
+ ) as f:
+ f.write(thread_dump)
self.support_scheduler.add_job(
self.log_status,
"interval",
@@ -439,7 +455,7 @@ class Controller:
if root_create_data["create_type"] == "download_jar":
if Helpers.is_os_windows():
# Let's check for and setup for install server commands
- if create_data["type"] == "forge":
+ if create_data["type"] == "forge-installer":
server_command = (
f"java -Xms{Helpers.float_to_string(min_mem)}M "
f"-Xmx{Helpers.float_to_string(max_mem)}M "
@@ -452,7 +468,7 @@ class Controller:
f'-jar "{server_file}" nogui'
)
else:
- if create_data["type"] == "forge":
+ if create_data["type"] == "forge-installer":
server_command = (
f"java -Xms{Helpers.float_to_string(min_mem)}M "
f"-Xmx{Helpers.float_to_string(max_mem)}M "
@@ -565,7 +581,6 @@ class Controller:
name=data["name"],
server_uuid=server_fs_uuid,
server_dir=new_server_path,
- backup_path=backup_path,
server_command=server_command,
server_file=server_file,
server_log_file=log_location,
@@ -575,26 +590,23 @@ class Controller:
server_host=monitoring_host,
server_type=monitoring_type,
)
- self.management.set_backup_config(
+ self.management.add_default_backup_config(
new_server_id,
backup_path,
)
if data["create_type"] == "minecraft_java":
if root_create_data["create_type"] == "download_jar":
# modded update urls from server jars will only update the installer
- if (
- create_data["category"] != "modded"
- and create_data["type"] not in ServerJars.get_paper_jars()
- ):
+ if create_data["type"] != "forge-installer":
server_obj = self.servers.get_server_obj(new_server_id)
- url = (
- "https://serverjars.com/api/fetchJar/"
- f"{create_data['category']}"
- f"/{create_data['type']}/{create_data['version']}"
+ url = self.big_bucket.get_fetch_url(
+ create_data["category"],
+ create_data["type"],
+ create_data["version"],
)
server_obj.executable_update_url = url
self.servers.update_server(server_obj)
- self.server_jars.download_jar(
+ self.big_bucket.download_jar(
create_data["category"],
create_data["type"],
create_data["version"],
@@ -654,11 +666,11 @@ class Controller:
# and add the user to it if he's not a superuser
if len(captured_roles) == 0:
if not exec_user["superuser"]:
- new_server_uuid = self.servers.get_server_data_by_id(new_server_id).get(
- "server_uuid"
+ new_server_id = self.servers.get_server_data_by_id(new_server_id).get(
+ "server_id"
)
role_id = self.roles.add_role(
- f"Creator of Server with uuid={new_server_uuid}",
+ f"Creator of Server with id={new_server_id}",
exec_user["user_id"],
)
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
@@ -669,7 +681,7 @@ class Controller:
role_id = role
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
- return new_server_id, server_fs_uuid
+ return new_server_id
@staticmethod
def verify_jar_server(server_path: str, server_jar: str):
@@ -733,7 +745,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_jar,
server_log_file,
@@ -787,7 +798,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -832,7 +842,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -880,7 +889,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
- backup_path,
server_command,
server_exe,
server_log_file,
@@ -904,16 +912,13 @@ class Controller:
# **********************************************************************************
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
- server_data = self.servers.get_server_data_by_id(old_server_id)
server_obj = self.servers.get_server_obj(new_server_id)
- old_bu_path = server_data["backup_path"]
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
- backup_path = old_bu_path
+ backup_path = os.path.join(self.helper.backup_path, old_server_id)
backup_path = Path(backup_path)
backup_path_components = list(backup_path.parts)
backup_path_components[-1] = new_uuid
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
- server_obj.backup_path = new_bu_path
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
try:
os.rmdir(default_backup_dir)
@@ -927,7 +932,6 @@ class Controller:
name: str,
server_uuid: str,
server_dir: str,
- backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@@ -943,7 +947,6 @@ class Controller:
name,
server_uuid,
server_dir,
- backup_path,
server_command,
server_file,
server_log_file,
@@ -1009,16 +1012,16 @@ class Controller:
f"Unable to delete server files for server with ID: "
f"{server_id} with error logged: {e}"
)
- if Helpers.check_path_exists(
- self.servers.get_server_data_by_id(server_id)["backup_path"]
- ):
- FileHelpers.del_dirs(
- Helpers.get_os_understandable_path(
- self.servers.get_server_data_by_id(server_id)[
- "backup_path"
- ]
+ backup_configs = HelpersManagement.get_backups_by_server(
+ server_id, True
+ )
+ for config in backup_configs:
+ if Helpers.check_path_exists(config.backup_location):
+ FileHelpers.del_dirs(
+ Helpers.get_os_understandable_path(
+ config.backup_location
+ )
)
- )
# Cleanup scheduled tasks
try:
@@ -1119,7 +1122,7 @@ class Controller:
for server in servers:
server_path = server.get("path")
new_local_server_path = os.path.join(
- new_server_path, server.get("server_uuid")
+ new_server_path, server.get("server_id")
)
if os.path.isdir(server_path):
WebSocketManager().broadcast_page(
@@ -1155,7 +1158,7 @@ class Controller:
server_obj.path = new_local_server_path
failed = False
for s in self.servers.failed_servers:
- if int(s["server_id"]) == int(server.get("server_id")):
+ if s["server_id"] == server.get("server_id"):
failed = True
if not failed:
self.servers.update_server(server_obj)
diff --git a/app/classes/shared/main_models.py b/app/classes/shared/main_models.py
index c166b7fb..77633a4c 100644
--- a/app/classes/shared/main_models.py
+++ b/app/classes/shared/main_models.py
@@ -18,13 +18,22 @@ class DatabaseBuilder:
logger.info("Fresh Install Detected - Creating Default Settings")
Console.info("Fresh Install Detected - Creating Default Settings")
default_data = self.helper.find_default_password()
- if password not in default_data:
+ if "password" not in default_data:
Console.help(
"No default password found. Using password created "
"by Crafty. Find it in app/config/default-creds.txt"
)
username = default_data.get("username", "admin")
- password = default_data.get("password", password)
+ if self.helper.minimum_password_length > len(
+ default_data.get("password", password)
+ ):
+ Console.critical(
+ "Default password too short"
+ " using Crafty's created default."
+ " Find it in app/config/default-creds.txt"
+ )
+ else:
+ password = default_data.get("password", password)
self.users_helper.add_user(
username=username,
diff --git a/app/classes/shared/migration.py b/app/classes/shared/migration.py
index c31542a2..287bb4f3 100644
--- a/app/classes/shared/migration.py
+++ b/app/classes/shared/migration.py
@@ -200,6 +200,21 @@ class Migrator(object):
)
return model
+ @get_model
+ def alter_column_type(
+ self,
+ model: peewee.Model,
+ column_name: str,
+ field: peewee.Field,
+ ) -> peewee.Model:
+ """
+ Alter field data type in database.
+ """
+ self.operations.append(
+ self.migrator.alter_column_type(model._meta.table_name, column_name, field)
+ )
+ return model
+
@get_model
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
"""
@@ -354,9 +369,12 @@ class MigrationManager(object):
@cached_property
def migrator(self) -> Migrator:
"""
- Create migrator and setup it with fake migrations.
+ Create migrator
"""
migrator = Migrator(self.database)
+ # Running false migrations to retrives the schemes of
+ # the precedents created tables in the table_dict element
+ # It's useful to run the new migrations
for name in self.done:
self.up_one(name, migrator, True)
return migrator
diff --git a/app/classes/shared/server.py b/app/classes/shared/server.py
index 3af51558..880215c5 100644
--- a/app/classes/shared/server.py
+++ b/app/classes/shared/server.py
@@ -209,9 +209,6 @@ class ServerInstance:
self.server_scheduler.start()
self.dir_scheduler.start()
self.start_dir_calc_task()
- self.backup_thread = threading.Thread(
- target=self.a_backup_server, daemon=True, name=f"backup_{self.name}"
- )
self.is_backingup = False
# Reset crash and update at initialization
self.stats_helper.server_crash_reset()
@@ -765,12 +762,17 @@ class ServerInstance:
try:
# Getting the forge version from the executable command
version = re.findall(
- r"forge-([0-9\.]+)((?:)|(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
+ r"forge-installer-([0-9\.]+)((?:)|"
+ r"(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
server_obj.execution_command,
)
version_param = version[0][0].split(".")
version_major = int(version_param[0])
version_minor = int(version_param[1])
+ if len(version_param) > 2:
+ version_sub = int(version_param[2])
+ else:
+ version_sub = 0
# Checking which version we are with
if version_major <= 1 and version_minor < 17:
@@ -804,8 +806,8 @@ class ServerInstance:
server_obj.execution_command = execution_command
Console.debug(SUCCESSMSG)
- elif version_major <= 1 and version_minor < 20:
- # NEW VERSION >= 1.17 and <= 1.20
+ elif version_major <= 1 and version_minor <= 20 and version_sub < 3:
+ # NEW VERSION >= 1.17 and <= 1.20.2
# (no jar file in server dir, only run.bat and run.sh)
run_file_path = ""
@@ -852,7 +854,7 @@ class ServerInstance:
server_obj.execution_command = execution_command
Console.debug(SUCCESSMSG)
else:
- # NEW VERSION >= 1.20
+ # NEW VERSION >= 1.20.3
# (executable jar is back in server dir)
# Retrieving the executable jar filename
@@ -1010,8 +1012,7 @@ class ServerInstance:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
def restart_threaded_server(self, user_id):
- bu_conf = HelpersManagement.get_backup_config(self.server_id)
- if self.is_backingup and bu_conf["shutdown"]:
+ if self.is_backingup:
logger.info(
"Restart command detected. Supressing - server has"
" backup shutdown enabled and server is currently backing up."
@@ -1181,13 +1182,16 @@ class ServerInstance:
f.write("eula=true")
self.run_threaded_server(user_id)
- @callback
- def backup_server(self):
- if self.settings["backup_path"] == "":
- logger.critical("Backup path is None. Canceling Backup!")
- return
+ def server_backup_threader(self, backup_id, update=False):
+ # Check to see if we're already backing up
+ if self.check_backup_by_id(backup_id):
+ return False
+
backup_thread = threading.Thread(
- target=self.a_backup_server, daemon=True, name=f"backup_{self.name}"
+ target=self.backup_server,
+ daemon=True,
+ name=f"backup_{backup_id}",
+ args=[backup_id, update],
)
logger.info(
f"Starting Backup Thread for server {self.settings['server_name']}."
@@ -1198,26 +1202,20 @@ class ServerInstance:
"Backup Thread - Local server path not defined. "
"Setting local server path variable."
)
- # checks if the backup thread is currently alive for this server
- if not self.is_backingup:
- try:
- backup_thread.start()
- self.is_backingup = True
- except Exception as ex:
- logger.error(f"Failed to start backup: {ex}")
- return False
- else:
- logger.error(
- f"Backup is already being processed for server "
- f"{self.settings['server_name']}. Canceling backup request"
- )
+
+ try:
+ backup_thread.start()
+ except Exception as ex:
+ logger.error(f"Failed to start backup: {ex}")
return False
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
- def a_backup_server(self):
+ @callback
+ def backup_server(self, backup_id, update):
was_server_running = None
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
server_users = PermissionsServers.get_server_user_list(self.server_id)
+ # Alert the start of the backup to the authorized users.
for user in server_users:
WebSocketManager().broadcast_user(
user,
@@ -1227,30 +1225,40 @@ class ServerInstance:
).format(self.name),
)
time.sleep(3)
- conf = HelpersManagement.get_backup_config(self.server_id)
+
+ # Get the backup config
+ conf = HelpersManagement.get_backup_config(backup_id)
+ # Adjust the location to include the backup ID for destination.
+ backup_location = os.path.join(conf["backup_location"], conf["backup_id"])
+
+ # Check if the backup location even exists.
+ if not backup_location:
+ Console.critical("No backup path found. Canceling")
+ return None
if conf["before"]:
- if self.check_running():
- logger.debug(
- "Found running server and send command option. Sending command"
- )
- self.send_command(conf["before"])
+ logger.debug(
+ "Found running server and send command option. Sending command"
+ )
+ self.send_command(conf["before"])
+ # Pause to let command run
+ time.sleep(5)
if conf["shutdown"]:
- if conf["before"]:
- # pause to let people read message.
- time.sleep(5)
logger.info(
"Found shutdown preference. Delaying"
+ "backup start. Shutting down server."
)
- if self.check_running():
- self.stop_server()
- was_server_running = True
+ if not update:
+ was_server_running = False
+ if self.check_running():
+ self.stop_server()
+ was_server_running = True
+
+ self.helper.ensure_dir_exists(backup_location)
- self.helper.ensure_dir_exists(self.settings["backup_path"])
try:
backup_filename = (
- f"{self.settings['backup_path']}/"
+ f"{backup_location}/"
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
)
logger.info(
@@ -1258,42 +1266,36 @@ class ServerInstance:
f" (ID#{self.server_id}, path={self.server_path}) "
f"at '{backup_filename}'"
)
- excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id)
+ excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id)
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
- if conf["compress"]:
- logger.debug(
- "Found compress backup to be true. Calling compressed archive"
- )
- self.file_helper.make_compressed_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
- else:
- logger.debug(
- "Found compress backup to be false. Calling NON-compressed archive"
- )
- self.file_helper.make_backup(
- Helpers.get_os_understandable_path(backup_filename),
- server_dir,
- excluded_dirs,
- self.server_id,
- )
+
+ self.file_helper.make_backup(
+ Helpers.get_os_understandable_path(backup_filename),
+ server_dir,
+ excluded_dirs,
+ self.server_id,
+ backup_id,
+ conf["backup_name"],
+ conf["compress"],
+ )
while (
- len(self.list_backups()) > conf["max_backups"]
+ len(self.list_backups(conf)) > conf["max_backups"]
and conf["max_backups"] > 0
):
- backup_list = self.list_backups()
+ backup_list = self.list_backups(conf)
oldfile = backup_list[0]
- oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
+ oldfile_path = f"{backup_location}/{oldfile['path']}"
logger.info(f"Removing old backup '{oldfile['path']}'")
os.remove(Helpers.get_os_understandable_path(oldfile_path))
- self.is_backingup = False
logger.info(f"Backup of server: {self.name} completed")
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1318,7 +1320,6 @@ class ServerInstance:
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
time.sleep(3)
- self.last_backup_failed = False
if conf["after"]:
if self.check_running():
logger.debug(
@@ -1326,12 +1327,21 @@ class ServerInstance:
)
self.send_command(conf["after"])
# pause to let people read message.
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Standby", "message": ""})},
+ )
time.sleep(5)
- except:
+ except Exception as e:
logger.exception(
f"Failed to create backup of server {self.name} (ID {self.server_id})"
)
- results = {"percent": 100, "total_files": 0, "current_file": 0}
+ results = {
+ "percent": 100,
+ "total_files": 0,
+ "current_file": 0,
+ "backup_id": backup_id,
+ }
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@@ -1339,56 +1349,51 @@ class ServerInstance:
"backup_status",
results,
)
- self.is_backingup = False
if was_server_running:
logger.info(
"Backup complete. User had shutdown preference. Starting server."
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
- self.last_backup_failed = True
-
- def backup_status(self, source_path, dest_path):
- results = Helpers.calc_percent(source_path, dest_path)
- self.backup_stats = results
- if len(WebSocketManager().clients) > 0:
- WebSocketManager().broadcast_page_params(
- "/panel/server_detail",
- {"id": str(self.server_id)},
- "backup_status",
- results,
+ HelpersManagement.update_backup_config(
+ backup_id,
+ {"status": json.dumps({"status": "Failed", "message": f"{e}"})},
)
+ self.set_backup_status()
def last_backup_status(self):
return self.last_backup_failed
- def send_backup_status(self):
- try:
- return self.backup_stats
- except:
- return {"percent": 0, "total_files": 0}
+ def set_backup_status(self):
+ backups = HelpersManagement.get_backups_by_server(self.server_id, True)
+ alert = False
+ for backup in backups:
+ if json.loads(backup.status)["status"] == "Failed":
+ alert = True
+ self.last_backup_failed = alert
- def list_backups(self):
- if not self.settings["backup_path"]:
+ def list_backups(self, backup_config: dict) -> list:
+ if not backup_config:
logger.info(
f"Error putting backup file list for server with ID: {self.server_id}"
)
return []
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
if not Helpers.check_path_exists(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
):
return []
files = Helpers.get_human_readable_files_sizes(
Helpers.list_dir_by_date(
- Helpers.get_os_understandable_path(self.settings["backup_path"])
+ Helpers.get_os_understandable_path(backup_location)
)
)
return [
{
"path": os.path.relpath(
f["path"],
- start=Helpers.get_os_understandable_path(
- self.settings["backup_path"]
- ),
+ start=Helpers.get_os_understandable_path(backup_location),
),
"size": f["size"],
}
@@ -1400,7 +1405,7 @@ class ServerInstance:
def server_upgrade(self):
self.stats_helper.set_update(True)
update_thread = threading.Thread(
- target=self.a_server_upgrade, daemon=True, name=f"exe_update_{self.name}"
+ target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}"
)
update_thread.start()
@@ -1441,14 +1446,32 @@ class ServerInstance:
def check_update(self):
return self.stats_helper.get_server_stats()["updating"]
- def a_server_upgrade(self):
+ def threaded_jar_update(self):
server_users = PermissionsServers.get_server_user_list(self.server_id)
+ # check to make sure a backup config actually exists before starting the update
+ if len(self.management_helper.get_backups_by_server(self.server_id, True)) <= 0:
+ for user in server_users:
+ WebSocketManager().broadcast_user(
+ user,
+ "notification",
+ "Backup config does not exist for "
+ + self.name
+ + ". canceling update.",
+ )
+ logger.error(f"Back config does not exist for {self.name}. Update Failed.")
+ self.stats_helper.set_update(False)
+ return
was_started = "-1"
###############################
# Backup Server ###############
###############################
- self.backup_server()
+
+ # Get default backup configuration
+ backup_config = HelpersManagement.get_default_server_backup(self.server_id)
+ # start threaded backup
+ self.server_backup_threader(backup_config["backup_id"], True)
+
# checks if server is running. Calls shutdown if it is running.
if self.check_running():
was_started = True
@@ -1477,54 +1500,30 @@ class ServerInstance:
"string": message,
},
)
- backup_dir = os.path.join(
- Helpers.get_os_understandable_path(self.settings["path"]),
- "crafty_executable_backups",
- )
- # checks if backup directory already exists
- if os.path.isdir(backup_dir):
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
- else:
- logger.info(
- f"Executable backup directory not found for Server: {self.name}."
- f" Creating one."
- )
- os.mkdir(backup_dir)
- backup_executable = os.path.join(backup_dir, self.settings["executable"])
-
- if len(os.listdir(backup_dir)) > 0:
- # removes old backup
- logger.info(f"Old backups found for server: {self.name}. Removing...")
- for item in os.listdir(backup_dir):
- os.remove(os.path.join(backup_dir, item))
- logger.info(f"Old backups removed for server: {self.name}.")
- else:
- logger.info(f"No old backups found for server: {self.name}")
-
current_executable = os.path.join(
Helpers.get_os_understandable_path(self.settings["path"]),
self.settings["executable"],
)
-
- try:
- # copies to backup dir
- FileHelpers.copy_file(current_executable, backup_executable)
- except FileNotFoundError:
- logger.error("Could not create backup of jarfile. File not found.")
-
+ backing_up = True
# wait for backup
- while self.is_backingup:
- time.sleep(10)
+ while backing_up:
+ # Check to see if we're already backing up
+ backing_up = self.check_backup_by_id(backup_config["backup_id"])
+ time.sleep(2)
# check if backup was successful
- if self.last_backup_failed:
+ backup_status = json.loads(
+ HelpersManagement.get_backup_config(backup_config["backup_id"])["status"]
+ )["status"]
+ if backup_status == "Failed":
for user in server_users:
WebSocketManager().broadcast_user(
user,
"notification",
"Backup failed for " + self.name + ". canceling update.",
)
- return False
+ self.stats_helper.set_update(False)
+ return
################################
# Executable Download ##########
@@ -1638,12 +1637,6 @@ class ServerInstance:
WebSocketManager().broadcast_user_page(
user, "/panel/dashboard", "send_start_reload", {}
)
- WebSocketManager().broadcast_user(
- user,
- "notification",
- "Executable update finished for " + self.name,
- )
-
self.management_helper.add_to_audit_log_raw(
"Alert",
"-1",
@@ -1766,6 +1759,14 @@ class ServerInstance:
except:
Console.critical("Can't broadcast server status to websocket")
+ def check_backup_by_id(self, backup_id: str) -> bool:
+ # Check to see if we're already backing up
+ for thread in threading.enumerate():
+ if thread.getName() == f"backup_{backup_id}":
+ Console.debug(f"Backup with id {backup_id} already running!")
+ return True
+ return False
+
def get_servers_stats(self):
server_stats = {}
diff --git a/app/classes/shared/tasks.py b/app/classes/shared/tasks.py
index 1a527d55..936909e4 100644
--- a/app/classes/shared/tasks.py
+++ b/app/classes/shared/tasks.py
@@ -140,7 +140,7 @@ class TasksManager:
)
elif command == "backup_server":
- svr.backup_server()
+ svr.server_backup_threader(cmd["action_id"])
elif command == "update_executable":
svr.server_upgrade()
@@ -240,6 +240,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -268,6 +269,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -284,6 +286,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -303,6 +306,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -337,6 +341,7 @@ class TasksManager:
job_data["cron_string"],
job_data["parent"],
job_data["delay"],
+ job_data["action_id"],
)
# Checks to make sure some doofus didn't actually make the newly
@@ -367,6 +372,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -393,6 +399,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -409,6 +416,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -428,6 +436,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -520,6 +529,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -543,6 +553,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -559,6 +570,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -578,6 +590,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
+ "action_id": job_data["action_id"],
}
],
)
@@ -653,6 +666,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
+ "action_id": schedule.action_id,
}
],
)
@@ -685,16 +699,16 @@ class TasksManager:
id="stats",
)
- def serverjar_cache_refresher(self):
- logger.info("Refreshing serverjars.com cache on start")
- self.controller.server_jars.refresh_cache()
+ def big_bucket_cache_refresher(self):
+ logger.info("Refreshing big bucket cache on start")
+ self.controller.big_bucket.refresh_cache()
- logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
+ logger.info("Scheduling big bucket cache refresh service every 12 hours")
self.scheduler.add_job(
- self.controller.server_jars.refresh_cache,
+ self.controller.big_bucket.refresh_cache,
"interval",
hours=12,
- id="serverjars",
+ id="big_bucket",
)
def steamapps_cache_refresher(self):
@@ -797,6 +811,18 @@ class TasksManager:
self.helper.ensure_dir_exists(
os.path.join(self.controller.project_root, "import", "upload")
)
+ self.helper.ensure_dir_exists(
+ os.path.join(self.controller.project_root, "temp")
+ )
+ for file in os.listdir(os.path.join(self.controller.project_root, "temp")):
+ if self.helper.is_file_older_than_x_days(
+ os.path.join(self.controller.project_root, "temp", file)
+ ):
+ try:
+ os.remove(os.path.join(file))
+ except FileNotFoundError:
+ logger.debug("Could not clear out file from temp directory")
+
for file in os.listdir(
os.path.join(self.controller.project_root, "import", "upload")
):
@@ -805,7 +831,7 @@ class TasksManager:
):
try:
os.remove(os.path.join(file))
- except:
+ except FileNotFoundError:
logger.debug("Could not clear out file from import directory")
def log_watcher(self):
diff --git a/app/classes/shared/translation.py b/app/classes/shared/translation.py
index 0e441808..538856a8 100644
--- a/app/classes/shared/translation.py
+++ b/app/classes/shared/translation.py
@@ -20,7 +20,7 @@ class Translation:
def get_language_file(self, language: str):
return os.path.join(self.translations_path, str(language) + ".json")
- def translate(self, page, word, language):
+ def translate(self, page, word, language, error=True):
fallback_language = "en_EN"
translated_word = self.translate_inner(page, word, language)
@@ -37,7 +37,9 @@ class Translation:
if hasattr(translated_word, "__iter__"):
# Multiline strings
return "\n".join(translated_word)
- return "Error while getting translation"
+ if error:
+ return "Error while getting translation"
+ return word
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
language_file = self.get_language_file(language)
diff --git a/app/classes/web/base_handler.py b/app/classes/web/base_handler.py
index ced6cb97..2d9261ea 100644
--- a/app/classes/web/base_handler.py
+++ b/app/classes/web/base_handler.py
@@ -6,6 +6,7 @@ import nh3
import tornado.web
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
+from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.users import ApiKeys
from app.classes.shared.helpers import Helpers
from app.classes.shared.file_helpers import FileHelpers
@@ -182,6 +183,7 @@ class BaseHandler(tornado.web.RequestHandler):
t.List[str],
bool,
t.Dict[str, t.Any],
+ str,
]
]:
try:
@@ -190,9 +192,12 @@ class BaseHandler(tornado.web.RequestHandler):
)
superuser = user["superuser"]
+ server_permissions_api_mask = ""
if api_key is not None:
- superuser = superuser and api_key.superuser
-
+ superuser = superuser and api_key.full_access
+ server_permissions_api_mask = api_key.server_permissions
+ if api_key.full_access:
+ server_permissions_api_mask = "1" * len(EnumPermissionsServer)
exec_user_role = set()
if superuser:
authorized_servers = self.controller.servers.get_all_defined_servers()
@@ -214,6 +219,7 @@ class BaseHandler(tornado.web.RequestHandler):
user["user_id"]
)
)
+
logger.debug(user["roles"])
for r in user["roles"]:
role = self.controller.roles.get_role(r)
@@ -234,6 +240,7 @@ class BaseHandler(tornado.web.RequestHandler):
exec_user_role,
superuser,
user,
+ server_permissions_api_mask,
)
logging.debug("Auth unsuccessful")
auth_log.error(
diff --git a/app/classes/web/http_handler.py b/app/classes/web/http_handler.py
deleted file mode 100644
index 32676d59..00000000
--- a/app/classes/web/http_handler.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import logging
-import requests
-
-from app.classes.web.base_handler import BaseHandler
-
-logger = logging.getLogger(__name__)
-
-
-class HTTPHandler(BaseHandler):
- def get(self):
- url = str(self.request.host)
- port = 443
- url_list = url.split(":")
- if url_list[0] != "":
- url = "https://" + url_list[0]
- else:
- url = "https://" + url
- db_port = self.helper.get_setting("https_port")
- try:
- resp = requests.head(url + ":" + str(port), timeout=(0.5, 5))
- resp.raise_for_status()
- except Exception:
- port = db_port
- self.redirect(url + ":" + str(port))
-
-
-class HTTPHandlerPage(BaseHandler):
- def get(self):
- url = str(self.request.host)
- port = 443
- url_list = url.split(":")
- if url_list[0] != "":
- url = "https://" + url_list[0]
- else:
- url = "https://" + url
- db_port = self.helper.get_setting("https_port")
- try:
- resp = requests.head(url + ":" + str(port), timeout=(0.5, 5))
- resp.raise_for_status()
- except Exception:
- port = db_port
- self.redirect(url + ":" + str(port))
diff --git a/app/classes/web/http_handler_page.py b/app/classes/web/http_handler_page.py
deleted file mode 100644
index 77161577..00000000
--- a/app/classes/web/http_handler_page.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import logging
-import requests
-from app.classes.web.base_handler import BaseHandler
-
-logger = logging.getLogger(__name__)
-
-
-class HTTPHandlerPage(BaseHandler):
- def get(self):
- url = self.request.full_url
- port = 443
- if url[len(url) - 1] == "/":
- url = url.strip(url[len(url) - 1])
- url_list = url.split("/")
- if url_list[0] != "":
- primary_url = url_list[0] + ":" + str(port) + "/"
- backup_url = (
- url_list[0] + ":" + str(self.helper.get_setting("https_port")) + "/"
- )
- for i in range(len(url_list) - 1):
- primary_url += url_list[i + 1]
- backup_url += url_list[i + 1]
- else:
- primary_url = url + str(port)
- backup_url = url + str(self.helper.get_setting("https_port"))
-
- try:
- resp = requests.head(primary_url, timeout=(0.5, 5))
- resp.raise_for_status()
- url = primary_url
- except Exception:
- url = backup_url
- self.redirect("https://" + url + ":" + str(port))
diff --git a/app/classes/web/panel_handler.py b/app/classes/web/panel_handler.py
index e8c93c68..8df48431 100644
--- a/app/classes/web/panel_handler.py
+++ b/app/classes/web/panel_handler.py
@@ -41,6 +41,8 @@ SUBPAGE_PERMS = {
"webhooks": EnumPermissionsServer.CONFIG,
}
+SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules"
+
class PanelHandler(BaseHandler):
def get_user_roles(self) -> t.Dict[str, list]:
@@ -168,13 +170,13 @@ class PanelHandler(BaseHandler):
# Commented out because there is no server access control for API keys,
# they just inherit from the host user
# if api_key is not None:
- # superuser = superuser and api_key.superuser
+ # superuser = superuser and api_key.full_access
if server_id is None:
self.redirect("/panel/error?error=Invalid Server ID")
return None
for server in self.controller.servers.failed_servers:
- if int(server_id) == server["server_id"]:
+ if server_id == server["server_id"]:
self.failed_server = True
return server_id
# Does this server exist?
@@ -242,7 +244,7 @@ class PanelHandler(BaseHandler):
api_key, _token_data, exec_user = self.current_user
superuser = exec_user["superuser"]
if api_key is not None:
- superuser = superuser and api_key.superuser
+ superuser = superuser and api_key.full_access
if superuser: # TODO: Figure out a better solution
defined_servers = self.controller.servers.list_defined_servers()
@@ -351,7 +353,7 @@ class PanelHandler(BaseHandler):
"created": api_key.created,
"server_permissions": api_key.server_permissions,
"crafty_permissions": api_key.crafty_permissions,
- "superuser": api_key.superuser,
+ "full_access": api_key.full_access,
}
if api_key is not None
else None
@@ -556,7 +558,7 @@ class PanelHandler(BaseHandler):
"server_id": {
"server_id": server_id,
"server_name": server_temp_obj["server_name"],
- "server_uuid": server_temp_obj["server_uuid"],
+ "server_uuid": server_temp_obj["server_id"],
"path": server_temp_obj["path"],
"log_path": server_temp_obj["log_path"],
"executable": server_temp_obj["executable"],
@@ -574,6 +576,7 @@ class PanelHandler(BaseHandler):
"crash_detection": server_temp_obj["crash_detection"],
"show_status": server_temp_obj["show_status"],
"ignored_exits": server_temp_obj["ignored_exits"],
+ "count_players": server_temp_obj["count_players"],
},
"running": False,
"crashed": False,
@@ -676,36 +679,18 @@ class PanelHandler(BaseHandler):
page_data["java_versions"] = page_java
if subpage == "backup":
server_info = self.controller.servers.get_server_data_by_id(server_id)
- page_data["backup_config"] = (
- self.controller.management.get_backup_config(server_id)
- )
- exclusions = []
- page_data["exclusions"] = (
- self.controller.management.get_excluded_backup_dirs(server_id)
+
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
)
page_data["backing_up"] = (
self.controller.servers.get_server_instance_by_id(
server_id
).is_backingup
)
- page_data["backup_stats"] = (
- self.controller.servers.get_server_instance_by_id(
- server_id
- ).send_backup_status()
- )
# makes it so relative path is the only thing shown
- for file in page_data["exclusions"]:
- if Helpers.is_os_windows():
- exclusions.append(file.replace(server_info["path"] + "\\", ""))
- else:
- exclusions.append(file.replace(server_info["path"] + "/", ""))
- page_data["exclusions"] = exclusions
+
self.controller.servers.refresh_server_settings(server_id)
- try:
- page_data["backup_list"] = server.list_backups()
- except:
- page_data["backup_list"] = []
- page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"])
if subpage == "metrics":
try:
@@ -779,20 +764,23 @@ class PanelHandler(BaseHandler):
elif page == "download_backup":
file = self.get_argument("file", "")
+ backup_id = self.get_argument("backup_id", "")
server_id = self.check_server_id()
if server_id is None:
return
-
+ backup_config = self.controller.management.get_backup_config(backup_id)
server_info = self.controller.servers.get_server_data_by_id(server_id)
+ backup_location = os.path.join(backup_config["backup_location"], backup_id)
backup_file = os.path.abspath(
os.path.join(
- Helpers.get_os_understandable_path(server_info["backup_path"]), file
+ Helpers.get_os_understandable_path(backup_location),
+ file,
)
)
if not self.helper.is_subdir(
backup_file,
- Helpers.get_os_understandable_path(server_info["backup_path"]),
+ Helpers.get_os_understandable_path(backup_location),
) or not os.path.isfile(backup_file):
self.redirect("/panel/error?error=Invalid path detected")
return
@@ -891,6 +879,8 @@ class PanelHandler(BaseHandler):
os.path.join(self.helper.root_dir, "app", "translations")
)
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@@ -1129,6 +1119,9 @@ class PanelHandler(BaseHandler):
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
@@ -1149,6 +1142,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["delay"] = 0
page_data["schedule"]["time"] = ""
page_data["schedule"]["interval"] = 1
+ page_data["schedule"]["action_id"] = ""
# we don't need to check difficulty here.
# We'll just default to basic for new schedules
page_data["schedule"]["difficulty"] = "basic"
@@ -1157,7 +1151,7 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
@@ -1194,6 +1188,9 @@ class PanelHandler(BaseHandler):
exec_user["user_id"], server_id
)
)
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, True
+ )
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
@@ -1208,6 +1205,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["server_id"] = server_id
page_data["schedule"]["schedule_id"] = schedule.schedule_id
page_data["schedule"]["action"] = schedule.action
+ page_data["schedule"]["action_id"] = schedule.action_id
if schedule.name:
page_data["schedule"]["name"] = schedule.name
else:
@@ -1236,9 +1234,11 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["interval_type"] = schedule.interval_type
if schedule.interval_type == "reaction":
difficulty = "reaction"
- page_data["parent"] = self.controller.management.get_scheduled_task(
- schedule.parent
- )
+ page_data["parent"] = None
+ if schedule.parent:
+ page_data["parent"] = self.controller.management.get_scheduled_task(
+ schedule.parent
+ )
elif schedule.cron_string == "":
difficulty = "basic"
page_data["parent"] = None
@@ -1249,11 +1249,141 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
- self.redirect("/panel/error?error=Unauthorized access To Schedules")
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
+ elif page == "edit_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = self.controller.management.get_backup_config(
+ backup_id
+ )
+ page_data["backups"] = self.controller.management.get_backups_by_server(
+ server_id, model=True
+ )
+ exclusions = []
+ page_data["backing_up"] = self.controller.servers.get_server_instance_by_id(
+ server_id
+ ).is_backingup
+ self.controller.servers.refresh_server_settings(server_id)
+ try:
+ page_data["backup_list"] = server.list_backups(
+ page_data["backup_config"]
+ )
+ except:
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = (
+ self.controller.management.get_excluded_backup_dirs(backup_id)
+ )
+ # Make exclusion paths relative for page
+ for file in page_data["exclusions"]:
+ if Helpers.is_os_windows():
+ exclusions.append(file.replace(server_info["path"] + "\\", ""))
+ else:
+ exclusions.append(file.replace(server_info["path"] + "/", ""))
+ page_data["exclusions"] = exclusions
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
+ elif page == "add_backup":
+ server_id = self.get_argument("id", None)
+ backup_id = self.get_argument("backup_id", None)
+ page_data["active_link"] = "backups"
+ page_data["permissions"] = {
+ "Commands": EnumPermissionsServer.COMMANDS,
+ "Terminal": EnumPermissionsServer.TERMINAL,
+ "Logs": EnumPermissionsServer.LOGS,
+ "Schedule": EnumPermissionsServer.SCHEDULE,
+ "Backup": EnumPermissionsServer.BACKUP,
+ "Files": EnumPermissionsServer.FILES,
+ "Config": EnumPermissionsServer.CONFIG,
+ "Players": EnumPermissionsServer.PLAYERS,
+ }
+ if not self.failed_server:
+ server_obj = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ page_data["backup_failed"] = server_obj.last_backup_status()
+ page_data["user_permissions"] = (
+ self.controller.server_perms.get_user_id_permissions_list(
+ exec_user["user_id"], server_id
+ )
+ )
+ server_info = self.controller.servers.get_server_data_by_id(server_id)
+ page_data["backup_config"] = {
+ "excluded_dirs": [],
+ "max_backups": 0,
+ "server_id": server_id,
+ "backup_location": os.path.join(self.helper.backup_path, server_id),
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ }
+ page_data["backing_up"] = False
+ self.controller.servers.refresh_server_settings(server_id)
+
+ page_data["backup_list"] = []
+ page_data["backup_path"] = Helpers.wtol_path(
+ page_data["backup_config"]["backup_location"]
+ )
+ page_data["server_data"] = self.controller.servers.get_server_data_by_id(
+ server_id
+ )
+ page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
+ server_id
+ )
+ page_data["server_stats"]["server_type"] = (
+ self.controller.servers.get_server_type_by_id(server_id)
+ )
+ page_data["exclusions"] = []
+
+ if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
+ if not superuser:
+ self.redirect(SCHEDULE_AUTH_ERROR_URL)
+ return
+ template = "panel/server_backup_edit.html"
+
elif page == "edit_user":
user_id = self.get_argument("id", None)
role_servers = self.controller.servers.get_authorized_servers(user_id)
@@ -1304,6 +1434,8 @@ class PanelHandler(BaseHandler):
for file in sorted(
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
):
+ if file == "humanized_index.json":
+ continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@@ -1355,6 +1487,9 @@ class PanelHandler(BaseHandler):
page_data["crafty_permissions_all"] = (
self.controller.crafty_perms.list_defined_crafty_permissions()
)
+ page_data["user_crafty_permissions"] = (
+ self.controller.crafty_perms.get_crafty_permissions_list(user_id)
+ )
if user_id is None:
self.redirect("/panel/error?error=Invalid User ID")
@@ -1402,7 +1537,7 @@ class PanelHandler(BaseHandler):
self.controller.management.add_to_audit_log(
exec_user["user_id"],
f"Removed user {target_user['username']} (UID:{user_id})",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
self.redirect("/panel/panel_config")
@@ -1502,8 +1637,6 @@ class PanelHandler(BaseHandler):
template = "panel/panel_edit_role.html"
elif page == "activity_logs":
- page_data["audit_logs"] = self.controller.management.get_activity_log()
-
template = "panel/activity_logs.html"
elif page == "download_file":
diff --git a/app/classes/web/public_handler.py b/app/classes/web/public_handler.py
index 762d3fb1..a3d89d25 100644
--- a/app/classes/web/public_handler.py
+++ b/app/classes/web/public_handler.py
@@ -1,5 +1,8 @@
import logging
+import json
import nh3
+from jsonschema import validate
+from jsonschema.exceptions import ValidationError
from app.classes.shared.helpers import Helpers
from app.classes.models.users import HelperUsers
@@ -45,7 +48,10 @@ class PublicHandler(BaseHandler):
}
if self.request.query:
- page_data["query"] = self.request.query
+ request_query = self.request.query_arguments.get("next")
+ if not request_query:
+ self.redirect("/login")
+ page_data["query"] = request_query[0].decode()
# sensible defaults
template = "public/404.html"
@@ -75,11 +81,7 @@ class PublicHandler(BaseHandler):
# if we have no page, let's go to login
else:
- if self.request.query:
- self.redirect("/login?" + self.request.query)
- else:
- self.redirect("/login")
- return
+ return self.redirect("/login")
self.render(
template,
@@ -89,33 +91,61 @@ class PublicHandler(BaseHandler):
)
def post(self, page=None):
- # pylint: disable=no-member
- error = nh3.clean(self.get_argument("error", "Invalid Login!"))
- error_msg = nh3.clean(self.get_argument("error_msg", ""))
- # pylint: enable=no-member
+ login_schema = {
+ "type": "object",
+ "properties": {
+ "username": {
+ "type": "string",
+ },
+ "password": {"type": "string"},
+ },
+ "required": ["username", "password"],
+ "additionalProperties": False,
+ }
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ logger.error(
+ "Invalid JSON schema for API"
+ f" login attempt from {self.get_remote_ip()}"
+ )
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+
+ try:
+ validate(data, login_schema)
+ except ValidationError as e:
+ logger.error(
+ "Invalid JSON schema for API"
+ f" login attempt from {self.get_remote_ip()}"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "VWggb2ghIFN0aW5reS 🪠",
+ "error_data": str(e),
+ },
+ )
page_data = {
"version": self.helper.get_version_string(),
- "error": error,
"lang": self.helper.get_setting("language"),
"lang_page": self.helper.get_lang_page(self.helper.get_setting("language")),
"query": "",
}
if self.request.query:
- page_data["query"] = self.request.query
+ page_data["query"] = self.request.query_arguments.get("next")[0].decode()
if page == "login":
+ data = json.loads(self.request.body)
+
auth_log.info(
f"User attempting to authenticate from {self.get_remote_ip()}"
)
- next_page = "/login"
- if self.request.query:
- next_page = "/login?" + self.request.query
-
- # pylint: disable=no-member
- entered_username = nh3.clean(self.get_argument("username"))
- entered_password = self.get_argument("password")
- # pylint: enable=no-member
+ entered_username = nh3.clean(data["username"]) # pylint: disable=no-member
+ entered_password = data["password"]
try:
user_id = HelperUsers.get_user_id_by_name(entered_username.lower())
@@ -127,16 +157,18 @@ class PublicHandler(BaseHandler):
f" Authentication failed from remote IP {self.get_remote_ip()}"
" Users does not exist."
)
- error_msg = "Incorrect username or password. Please try again."
+ self.finish_json(
+ 403,
+ {
+ "status": "error",
+ "error": self.helper.translation.translate(
+ "login", "incorrect", self.helper.get_setting("language")
+ ),
+ },
+ )
# self.clear_cookie("user")
# self.clear_cookie("user_data")
- self.clear_cookie("token")
- if self.request.query:
- self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
- else:
- self.redirect(f"/login?error_msg={error_msg}")
- return
-
+ return self.clear_cookie("token")
# if we don't have a user
if not user_data:
auth_log.error(
@@ -145,15 +177,18 @@ class PublicHandler(BaseHandler):
" User does not exist."
)
self.controller.log_attempt(self.get_remote_ip(), entered_username)
- error_msg = "Incorrect username or password. Please try again."
+ self.finish_json(
+ 403,
+ {
+ "status": "error",
+ "error": self.helper.translation.translate(
+ "login", "incorrect", self.helper.get_setting("language")
+ ),
+ },
+ )
# self.clear_cookie("user")
# self.clear_cookie("user_data")
- self.clear_cookie("token")
- if self.request.query:
- self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
- else:
- self.redirect(f"/login?error_msg={error_msg}")
- return
+ return self.clear_cookie("token")
# if they are disabled
if not user_data.enabled:
@@ -163,19 +198,18 @@ class PublicHandler(BaseHandler):
" User account disabled"
)
self.controller.log_attempt(self.get_remote_ip(), entered_username)
- error_msg = (
- "User account disabled. Please contact "
- "your system administrator for more info."
+ self.finish_json(
+ 403,
+ {
+ "status": "error",
+ "error": self.helper.translation.translate(
+ "login", "disabled", self.helper.get_setting("language")
+ ),
+ },
)
# self.clear_cookie("user")
# self.clear_cookie("user_data")
- self.clear_cookie("token")
- if self.request.query:
- self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
- else:
- self.redirect(f"/login?error_msg={error_msg}")
- return
-
+ return self.clear_cookie("token")
login_result = self.helper.verify_pass(entered_password, user_data.password)
# Valid Login
@@ -197,35 +231,37 @@ class PublicHandler(BaseHandler):
)
# log this login
self.controller.management.add_to_audit_log(
- user_data.user_id, "Logged in", 0, self.get_remote_ip()
+ user_data.user_id, "Logged in", None, self.get_remote_ip()
)
- if self.request.query_arguments.get("next"):
- next_page = self.request.query_arguments.get("next")[0].decode()
- else:
- next_page = "/panel/dashboard"
+ return self.finish_json(
+ 200, {"status": "ok", "data": {"message": "login successful!"}}
+ )
- self.redirect(next_page)
- else:
- auth_log.error(
- f"User attempted to log into {entered_username}."
- f" Authentication failed from remote IP {self.get_remote_ip()}"
+ # We'll continue on and handle unsuccessful logins
+ auth_log.error(
+ f"User attempted to log into {entered_username}."
+ f" Authentication failed from remote IP {self.get_remote_ip()}"
+ )
+ self.controller.log_attempt(self.get_remote_ip(), entered_username)
+ # self.clear_cookie("user")
+ # self.clear_cookie("user_data")
+ self.clear_cookie("token")
+ error_msg = self.helper.translation.translate(
+ "login", "incorrect", self.helper.get_setting("language")
+ )
+ if entered_password == "app/config/default-creds.txt":
+ error_msg += ". "
+ error_msg += self.helper.translation.translate(
+ "login", "defaultPath", self.helper.get_setting("language")
)
- self.controller.log_attempt(self.get_remote_ip(), entered_username)
- # self.clear_cookie("user")
- # self.clear_cookie("user_data")
- self.clear_cookie("token")
- error_msg = "Incorrect username or password. Please try again."
- # log this failed login attempt
- self.controller.management.add_to_audit_log(
- user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
- )
- if self.request.query:
- self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
- else:
- self.redirect(f"/login?error_msg={error_msg}")
+ # log this failed login attempt
+ self.controller.management.add_to_audit_log(
+ user_data.user_id, "Tried to log in", None, self.get_remote_ip()
+ )
+ return self.finish_json(
+ 403,
+ {"status": "error", "error": error_msg},
+ )
else:
- if self.request.query:
- self.redirect("/login?" + self.request.query)
- else:
- self.redirect("/login")
+ self.redirect("/login?")
diff --git a/app/classes/web/routes/api/api_handlers.py b/app/classes/web/routes/api/api_handlers.py
index d0cb3143..a2f07135 100644
--- a/app/classes/web/routes/api/api_handlers.py
+++ b/app/classes/web/routes/api/api_handlers.py
@@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import (
)
from app.classes.web.routes.api.servers.server.backups.backup.index import (
ApiServersServerBackupsBackupIndexHandler,
+ ApiServersServerBackupsBackupFilesIndexHandler,
)
from app.classes.web.routes.api.servers.server.files import (
ApiServersServerFilesIndexHandler,
ApiServersServerFilesCreateHandler,
ApiServersServerFilesZipHandler,
)
+from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
from app.classes.web.routes.api.servers.server.tasks.task.children import (
ApiServersServerTasksTaskChildrenHandler,
)
@@ -221,92 +223,113 @@ def api_handlers(handler_args):
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/?",
+ r"/api/v2/servers/([a-z0-9-]+)/?",
ApiServersServerIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/backups/?",
+ r"/api/v2/servers/([a-z0-9-]+)/backups/?",
ApiServersServerBackupsIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/backups/backup/?",
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?",
ApiServersServerBackupsBackupIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/files/?",
- ApiServersServerFilesIndexHandler,
+ r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?",
+ ApiServersServerBackupsBackupFilesIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/files/create/?",
+ r"/api/v2/servers/([a-z0-9-]+)/files/create/?",
ApiServersServerFilesCreateHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/files/zip/?",
+ r"/api/v2/servers/([a-z0-9-]+)/files/zip/?",
ApiServersServerFilesZipHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/tasks/?",
+ r"/api/v2/crafty/admin/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/import/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
+ ApiFilesUploadHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?",
+ ApiServersServerFilesIndexHandler,
+ handler_args,
+ ),
+ (
+ r"/api/v2/servers/([a-z0-9-]+)/tasks/?",
ApiServersServerTasksIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/tasks/([0-9]+)/?",
+ r"/api/v2/servers/([a-z0-9-]+)/tasks/([0-9]+)/?",
ApiServersServerTasksTaskIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/tasks/([0-9]+)/children/?",
+ r"/api/v2/servers/([a-z0-9-]+)/tasks/([0-9]+)/children/?",
ApiServersServerTasksTaskChildrenHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/stats/?",
+ r"/api/v2/servers/([a-z0-9-]+)/stats/?",
ApiServersServerStatsHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/history/?",
+ r"/api/v2/servers/([a-z0-9-]+)/history/?",
ApiServersServerHistoryHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/webhook/([0-9]+)/?",
+ r"/api/v2/servers/([a-z0-9-]+)/webhook/([0-9]+)/?",
ApiServersServerWebhooksManagementIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/webhook/?",
+ r"/api/v2/servers/([a-z0-9-]+)/webhook/?",
ApiServersServerWebhooksIndexHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/action/([a-z_]+)/?",
+ # optional third argument when we need a action ID
+ r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?",
ApiServersServerActionHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/logs/?",
+ r"/api/v2/servers/([a-z0-9-]+)/logs/?",
ApiServersServerLogsHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/users/?",
+ r"/api/v2/servers/([a-z0-9-]+)/users/?",
ApiServersServerUsersHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/public/?",
+ r"/api/v2/servers/([a-z0-9-]+)/public/?",
ApiServersServerPublicHandler,
handler_args,
),
(
- r"/api/v2/servers/([0-9]+)/stdin/?",
+ r"/api/v2/servers/([a-z0-9-]+)/stdin/?",
ApiServersServerStdinHandler,
handler_args,
),
diff --git a/app/classes/web/routes/api/auth/invalidate_tokens.py b/app/classes/web/routes/api/auth/invalidate_tokens.py
index f15bf60d..9e38670a 100644
--- a/app/classes/web/routes/api/auth/invalidate_tokens.py
+++ b/app/classes/web/routes/api/auth/invalidate_tokens.py
@@ -1,6 +1,6 @@
-import datetime
import logging
from app.classes.web.base_api_handler import BaseApiHandler
+from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler):
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
self.controller.users.raw_update_user(
- auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
+ auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()}
)
self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/auth/login.py b/app/classes/web/routes/api/auth/login.py
index b91b295d..7e8131f3 100644
--- a/app/classes/web/routes/api/auth/login.py
+++ b/app/classes/web/routes/api/auth/login.py
@@ -17,7 +17,7 @@ login_schema = {
"minLength": 4,
"pattern": "^[a-z0-9_]+$",
},
- "password": {"type": "string", "maxLength": 20, "minLength": 4},
+ "password": {"type": "string", "minLength": 4},
},
"required": ["username", "password"],
"additionalProperties": False,
@@ -101,7 +101,7 @@ class ApiAuthLoginHandler(BaseApiHandler):
# log this login
self.controller.management.add_to_audit_log(
- user_data.user_id, "logged in via the API", 0, self.get_remote_ip()
+ user_data.user_id, "logged in via the API", None, self.get_remote_ip()
)
self.finish_json(
@@ -119,7 +119,7 @@ class ApiAuthLoginHandler(BaseApiHandler):
else:
# log this failed login attempt
self.controller.management.add_to_audit_log(
- user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
+ user_data.user_id, "Tried to log in", None, self.get_remote_ip()
)
self.finish_json(
401,
diff --git a/app/classes/web/routes/api/crafty/announcements/index.py b/app/classes/web/routes/api/crafty/announcements/index.py
index 75f00f16..d66c4473 100644
--- a/app/classes/web/routes/api/crafty/announcements/index.py
+++ b/app/classes/web/routes/api/crafty/announcements/index.py
@@ -26,6 +26,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
_,
_,
_user,
+ _,
) = auth_data
data = self.helper.get_announcements()
@@ -72,6 +73,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
_,
_,
_user,
+ _,
) = auth_data
try:
data = json.loads(self.request.body)
diff --git a/app/classes/web/routes/api/crafty/clogs/index.py b/app/classes/web/routes/api/crafty/clogs/index.py
index 97a24a34..35f48a7f 100644
--- a/app/classes/web/routes/api/crafty/clogs/index.py
+++ b/app/classes/web/routes/api/crafty/clogs/index.py
@@ -1,3 +1,5 @@
+import os
+import json
from app.classes.web.base_api_handler import BaseApiHandler
@@ -12,6 +14,7 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
if not superuser:
@@ -22,9 +25,17 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
raise NotImplementedError
if log_type == "audit":
+ with open(
+ os.path.join(self.controller.project_root, "logs", "audit.log"),
+ "r",
+ encoding="utf-8",
+ ) as f:
+ log_lines = [json.loads(line) for line in f]
+ rev_log_lines = log_lines[::-1]
+
return self.finish_json(
200,
- {"status": "ok", "data": self.controller.management.get_activity_log()},
+ {"status": "ok", "data": rev_log_lines},
)
if log_type == "session":
diff --git a/app/classes/web/routes/api/crafty/config/index.py b/app/classes/web/routes/api/crafty/config/index.py
index c901732c..d625d339 100644
--- a/app/classes/web/routes/api/crafty/config/index.py
+++ b/app/classes/web/routes/api/crafty/config/index.py
@@ -9,7 +9,6 @@ from app.classes.web.base_api_handler import BaseApiHandler
config_json_schema = {
"type": "object",
"properties": {
- "http_port": {"type": "integer"},
"https_port": {"type": "integer"},
"language": {
"type": "string",
@@ -32,6 +31,7 @@ config_json_schema = {
"monitored_mounts": {"type": "array"},
"dir_size_poll_freq_minutes": {"type": "integer"},
"crafty_logs_delete_after_days": {"type": "integer"},
+ "big_bucket_repo": {"type": "string"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -68,6 +68,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
# GET /api/v2/roles?ids=true
@@ -94,20 +95,14 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- (
- _,
- _,
- _,
- superuser,
- user,
- ) = auth_data
+ (_, _, _, superuser, user, _) = auth_data
if not superuser:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
data = orjson.loads(self.request.body)
- except orjson.decoder.JSONDecodeError as e:
+ except orjson.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
@@ -129,7 +124,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
"edited config.json",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
@@ -150,6 +145,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
# GET /api/v2/roles?ids=true
@@ -182,13 +178,14 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
_,
superuser,
user,
+ _,
) = auth_data
if not superuser:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
data = orjson.loads(self.request.body)
- except orjson.decoder.JSONDecodeError as e:
+ except orjson.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
@@ -226,7 +223,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
f"customized login photo: {data['photo']}/{data['opacity']}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
self.controller.management.set_login_opacity(int(data["opacity"]))
diff --git a/app/classes/web/routes/api/crafty/config/server_dir.py b/app/classes/web/routes/api/crafty/config/server_dir.py
index 91c4cc89..bc88cba9 100644
--- a/app/classes/web/routes/api/crafty/config/server_dir.py
+++ b/app/classes/web/routes/api/crafty/config/server_dir.py
@@ -24,6 +24,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
# GET /api/v2/roles?ids=true
@@ -56,6 +57,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
_,
_,
_,
+ _,
) = auth_data
if not auth_data:
@@ -68,7 +70,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
try:
data = orjson.loads(self.request.body)
- except orjson.decoder.JSONDecodeError as e:
+ except orjson.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
@@ -109,7 +111,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
auth_data[4]["user_id"],
f"updated master servers dir to {new_dir}/servers",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/crafty/exe_cache.py b/app/classes/web/routes/api/crafty/exe_cache.py
index f27b8646..5952a048 100644
--- a/app/classes/web/routes/api/crafty/exe_cache.py
+++ b/app/classes/web/routes/api/crafty/exe_cache.py
@@ -12,17 +12,18 @@ class ApiCraftyJarCacheIndexHandler(BaseApiHandler):
_,
_,
_,
+ _,
) = auth_data
if not auth_data[4]["superuser"]:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.controller.server_jars.manual_refresh_cache()
+ self.controller.big_bucket.manual_refresh_cache()
self.finish_json(
200,
{
"status": "ok",
- "data": self.controller.server_jars.get_serverjar_data(),
+ "data": self.controller.big_bucket.get_bucket_data(),
},
)
diff --git a/app/classes/web/routes/api/crafty/upload/index.py b/app/classes/web/routes/api/crafty/upload/index.py
new file mode 100644
index 00000000..b37ef796
--- /dev/null
+++ b/app/classes/web/routes/api/crafty/upload/index.py
@@ -0,0 +1,308 @@
+import os
+import logging
+import shutil
+from app.classes.models.server_permissions import EnumPermissionsServer
+from app.classes.shared.helpers import Helpers
+from app.classes.web.base_api_handler import BaseApiHandler
+
+logger = logging.getLogger(__name__)
+IMAGE_MIME_TYPES = [
+ "image/bmp",
+ "image/cis-cod",
+ "image/gif",
+ "image/ief",
+ "image/jpeg",
+ "image/pipeg",
+ "image/svg+xml",
+ "image/tiff",
+ "image/x-cmu-raster",
+ "image/x-cmx",
+ "image/x-icon",
+ "image/x-portable-anymap",
+ "image/x-portable-bitmap",
+ "image/x-portable-graymap",
+ "image/x-portable-pixmap",
+ "image/x-rgb",
+ "image/x-xbitmap",
+ "image/x-xpixmap",
+ "image/x-xwindowdump",
+ "image/png",
+ "image/webp",
+]
+
+ARCHIVE_MIME_TYPES = ["application/zip"]
+
+
+class ApiFilesUploadHandler(BaseApiHandler):
+ async def post(self, server_id=None):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ upload_type = self.request.headers.get("type")
+ accepted_types = []
+
+ if server_id:
+ # Check to make sure user is authorized for the server
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ # Make sure user has file access for the server
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
+ # if the user doesn't have Files permission, return an error
+ return self.finish_json(
+ 400, {"status": "error", "error": "NOT_AUTHORIZED"}
+ )
+
+ u_type = "server_upload"
+ # Make sure user is a super user if they're changing panel settings
+ elif auth_data[4]["superuser"] and upload_type == "background":
+ u_type = "admin_config"
+ self.upload_dir = os.path.join(
+ self.controller.project_root,
+ "app/frontend/static/assets/images/auth/custom",
+ )
+ accepted_types = IMAGE_MIME_TYPES
+ elif upload_type == "import":
+ # Check that user can make servers
+ if (
+ not self.controller.crafty_perms.can_create_server(
+ auth_data[4]["user_id"]
+ )
+ and not auth_data[4]["superuser"]
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Set directory to upload import dir
+ self.upload_dir = os.path.join(
+ self.controller.project_root, "import", "upload"
+ )
+ u_type = "server_import"
+ accepted_types = ARCHIVE_MIME_TYPES
+ else:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "data": {"message": ""},
+ },
+ )
+ # Get the headers from the request
+ self.chunk_hash = self.request.headers.get("chunkHash", 0)
+ self.file_id = self.request.headers.get("fileId")
+ self.chunked = self.request.headers.get("chunked", False)
+ self.filename = self.request.headers.get("fileName", None)
+ try:
+ file_size = int(self.request.headers.get("fileSize", None))
+ total_chunks = int(self.request.headers.get("totalChunks", 0))
+ except TypeError:
+ return self.finish_json(
+ 400, {"status": "error", "error": "TYPE ERROR", "data": {}}
+ )
+ self.chunk_index = self.request.headers.get("chunkId")
+ if u_type == "server_upload":
+ self.upload_dir = self.request.headers.get("location", None)
+ self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
+
+ if u_type == "server_upload":
+ # If this is an upload from a server the path will be what
+ # Is requested
+ full_path = os.path.join(self.upload_dir, self.filename)
+
+ # Check to make sure the requested path is inside the server's directory
+ if not self.helper.is_subdir(
+ full_path,
+ Helpers.get_os_understandable_path(
+ self.controller.servers.get_server_data_by_id(server_id)["path"]
+ ),
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT AUTHORIZED",
+ "data": {"message": "Traversal detected"},
+ },
+ )
+ # Check to make sure the file type we're being sent is what we're expecting
+ if (
+ self.file_helper.check_mime_types(self.filename) not in accepted_types
+ and u_type != "server_upload"
+ ):
+ return self.finish_json(
+ 422,
+ {
+ "status": "error",
+ "error": "INVALID FILE TYPE",
+ "data": {
+ "message": f"Invalid File Type only accepts {accepted_types}"
+ },
+ },
+ )
+ _total, _used, free = shutil.disk_usage(self.upload_dir)
+
+ # Check to see if we have enough space
+ if free <= file_size:
+ return self.finish_json(
+ 507,
+ {
+ "status": "error",
+ "error": "NO STORAGE SPACE",
+ "data": {"message": "Out Of Space!"},
+ },
+ )
+
+ # If this has no chunk index we know it's the inital request
+ if self.chunked and not self.chunk_index:
+ return self.finish_json(
+ 200, {"status": "ok", "data": {"file-id": self.file_id}}
+ )
+ # Create the upload and temp directories if they don't exist
+ os.makedirs(self.upload_dir, exist_ok=True)
+
+ # Check for chunked header. We will handle this request differently
+ # if it doesn't exist
+ if not self.chunked:
+ # Write the file directly to the upload dir
+ with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
+ chunk = self.request.body
+ if chunk:
+ file.write(chunk)
+ # We'll check the file hash against the sent hash once the file is
+ # written. We cannot check this buffer.
+ calculated_hash = self.file_helper.calculate_file_hash(
+ os.path.join(self.upload_dir, self.filename)
+ )
+ logger.info(
+ f"File upload completed. Filename: {self.filename} Type: {u_type}"
+ )
+ return self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ # Since this is a chunked upload we'll create the temp dir for parts.
+ os.makedirs(self.temp_dir, exist_ok=True)
+
+ # Read headers and query parameters
+ content_length = int(self.request.headers.get("Content-Length"))
+ if content_length <= 0:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID CONTENT LENGTH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID CONTENT LENGTH",
+ "data": {"message": "Invalid content length"},
+ },
+ )
+
+ # At this point filename, chunk index and total chunks are required
+ # in the request
+ if not self.filename or self.chunk_index is None:
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: CHUNK INDEX NOT FOUND"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INDEX ERROR",
+ "data": {
+ "message": "Filename, chunk_index,"
+ " and total_chunks are required"
+ },
+ },
+ )
+
+ # Calculate the hash of the buffer and compare it against the expected hash
+ calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body)
+ if str(self.chunk_hash) != str(calculated_hash):
+ logger.error(
+ f"File upload failed. Filename: {self.filename}"
+ f"Type: {u_type} Error: INVALID HASH"
+ )
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_HASH",
+ "data": {
+ "message": "Hash recieved does not match reported sent hash.",
+ "chunk_id": self.chunk_index,
+ },
+ },
+ )
+
+ # File paths
+ file_path = os.path.join(self.upload_dir, self.filename)
+ chunk_path = os.path.join(
+ self.temp_dir, f"{self.filename}.part{self.chunk_index}"
+ )
+
+ # Save the chunk
+ with open(chunk_path, "wb") as f:
+ f.write(self.request.body)
+
+ # Check if all chunks are received
+ received_chunks = [
+ f
+ for f in os.listdir(self.temp_dir)
+ if f.startswith(f"{self.filename}.part")
+ ]
+ # When we've reached the total chunks we'll
+ # Compare the hash and write the file
+ if len(received_chunks) == total_chunks:
+ with open(file_path, "wb") as outfile:
+ for i in range(total_chunks):
+ chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
+ with open(chunk_file, "rb") as infile:
+ outfile.write(infile.read())
+ os.remove(chunk_file)
+ logger.info(
+ f"File upload completed. Filename: {self.filename}"
+ f" Path: {file_path} Type: {u_type}"
+ )
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Uploaded file {self.filename}",
+ server_id,
+ self.request.remote_ip,
+ )
+ self.finish_json(
+ 200,
+ {
+ "status": "completed",
+ "data": {"message": "File uploaded successfully"},
+ },
+ )
+ else:
+ self.finish_json(
+ 200,
+ {
+ "status": "partial",
+ "data": {"message": f"Chunk {self.chunk_index} received"},
+ },
+ )
diff --git a/app/classes/web/routes/api/roles/index.py b/app/classes/web/routes/api/roles/index.py
index dce6f453..45a00bf0 100644
--- a/app/classes/web/routes/api/roles/index.py
+++ b/app/classes/web/routes/api/roles/index.py
@@ -2,6 +2,7 @@ import typing as t
from jsonschema import ValidationError, validate
import orjson
from playhouse.shortcuts import model_to_dict
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
create_role_schema = {
@@ -10,6 +11,7 @@ create_role_schema = {
"name": {
"type": "string",
"minLength": 1,
+ "pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@@ -17,12 +19,12 @@ create_role_schema = {
"type": "object",
"properties": {
"server_id": {
- "type": "integer",
+ "type": "string",
"minimum": 1,
},
"permissions": {
"type": "string",
- "pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
+ "pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@@ -47,7 +49,7 @@ basic_create_role_schema = {
"type": "object",
"properties": {
"server_id": {
- "type": "integer",
+ "type": "string",
"minimum": 1,
},
"permissions": {
@@ -71,16 +73,20 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
+ _,
) = auth_data
# GET /api/v2/roles?ids=true
get_only_ids = self.get_query_argument("ids", None) == "true"
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@@ -103,13 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
+ _,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -136,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler):
role_name = data["name"]
manager = data.get("manager", None)
+ if not superuser and not manager:
+ manager = auth_data[4]["user_id"]
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
manager = None
@@ -161,7 +173,7 @@ class ApiRolesIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
f"created role {role_name} (RID:{role_id})",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/roles/role/index.py b/app/classes/web/routes/api/roles/role/index.py
index 0dd7d6c8..1eab6183 100644
--- a/app/classes/web/routes/api/roles/role/index.py
+++ b/app/classes/web/routes/api/roles/role/index.py
@@ -1,6 +1,7 @@
from jsonschema import ValidationError, validate
import orjson
-from peewee import DoesNotExist
+from peewee import DoesNotExist, IntegrityError
+from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
modify_role_schema = {
@@ -9,6 +10,7 @@ modify_role_schema = {
"name": {
"type": "string",
"minLength": 1,
+ "pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@@ -16,12 +18,12 @@ modify_role_schema = {
"type": "object",
"properties": {
"server_id": {
- "type": "integer",
+ "type": "string",
"minimum": 1,
},
"permissions": {
"type": "string",
- "pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
+ "pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@@ -46,7 +48,7 @@ basic_modify_role_schema = {
"type": "object",
"properties": {
"server_id": {
- "type": "integer",
+ "type": "string",
"minimum": 1,
},
"permissions": {
@@ -70,13 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
_,
+ _,
) = auth_data
- if not superuser:
+ if (
+ not superuser
+ and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -97,9 +103,13 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
_,
superuser,
user,
+ _,
) = auth_data
-
- if not superuser:
+ role = self.controller.roles.get_role(role_id)
+ if (
+ str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.roles.remove_role(role_id)
@@ -112,7 +122,7 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
f"deleted role with ID {role_id}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
@@ -122,18 +132,30 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
- _,
+ exec_user_permissions_crafty,
_,
superuser,
user,
+ _,
) = auth_data
- if not superuser:
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
+ role = self.controller.roles.get_role(role_id)
+ if not superuser and (
+ user["user_id"] != role["manager"]
+ or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
+ ):
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": "Not Authorized",
+ },
+ )
try:
data = orjson.loads(self.request.body)
- except orjson.decoder.JSONDecodeError as e:
+ except orjson.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
@@ -168,11 +190,14 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
)
except DoesNotExist:
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
-
+ except IntegrityError:
+ return self.finish_json(
+ 404, {"status": "error", "error": "ROLE_NAME_EXISTS"}
+ )
self.controller.management.add_to_audit_log(
user["user_id"],
f"modified role with ID {role_id}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/roles/role/servers.py b/app/classes/web/routes/api/roles/role/servers.py
index 0a0eff6f..8f41f6c6 100644
--- a/app/classes/web/routes/api/roles/role/servers.py
+++ b/app/classes/web/routes/api/roles/role/servers.py
@@ -13,6 +13,7 @@ class ApiRolesRoleServersHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
# GET /api/v2/roles/role/servers?ids=true
diff --git a/app/classes/web/routes/api/roles/role/users.py b/app/classes/web/routes/api/roles/role/users.py
index ac2227ac..48444ead 100644
--- a/app/classes/web/routes/api/roles/role/users.py
+++ b/app/classes/web/routes/api/roles/role/users.py
@@ -12,6 +12,7 @@ class ApiRolesRoleUsersHandler(BaseApiHandler):
_,
superuser,
_,
+ _,
) = auth_data
if not superuser:
diff --git a/app/classes/web/routes/api/servers/index.py b/app/classes/web/routes/api/servers/index.py
index 65d64d7e..e92bb637 100644
--- a/app/classes/web/routes/api/servers/index.py
+++ b/app/classes/web/routes/api/servers/index.py
@@ -23,6 +23,7 @@ new_server_schema = {
"type": "string",
"examples": ["My Server"],
"minLength": 2,
+ "pattern": "^[^/\\\\]*$",
},
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
"stop_command": {
@@ -139,7 +140,7 @@ new_server_schema = {
"category": {
"title": "Jar Category",
"type": "string",
- "examples": ["modded", "vanilla"],
+ "examples": ["Mc_java_servers", "Mc_java_proxies"],
},
"properties": {
"type": {
@@ -743,6 +744,7 @@ class ApiServersIndexHandler(BaseApiHandler):
_,
_superuser,
user,
+ _,
) = auth_data
if EnumPermissionsCrafty.SERVER_CREATION not in exec_user_crafty_permissions:
@@ -782,9 +784,7 @@ class ApiServersIndexHandler(BaseApiHandler):
405, {"status": "error", "error": "DATA CONSTRAINT FAILED"}
)
return
- new_server_id, new_server_uuid = self.controller.create_api_server(
- data, user["user_id"]
- )
+ new_server_id = self.controller.create_api_server(data, user["user_id"])
self.controller.servers.stats.record_stats()
@@ -793,7 +793,7 @@ class ApiServersIndexHandler(BaseApiHandler):
(
f"created server {data['name']}"
f" (ID: {new_server_id})"
- f" (UUID: {new_server_uuid})"
+ f" (UUID: {new_server_id})"
),
server_id=new_server_id,
source_ip=self.get_remote_ip(),
@@ -805,7 +805,7 @@ class ApiServersIndexHandler(BaseApiHandler):
"status": "ok",
"data": {
"new_server_id": str(new_server_id),
- "new_server_uuid": new_server_uuid,
+ "new_server_uuid": new_server_id,
},
},
)
diff --git a/app/classes/web/routes/api/servers/server/action.py b/app/classes/web/routes/api/servers/server/action.py
index a30ab410..d8e58b2f 100644
--- a/app/classes/web/routes/api/servers/server/action.py
+++ b/app/classes/web/routes/api/servers/server/action.py
@@ -1,9 +1,9 @@
import logging
import os
+import json
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.servers import Servers
from app.classes.shared.file_helpers import FileHelpers
-from app.classes.shared.helpers import Helpers
from app.classes.web.base_api_handler import BaseApiHandler
@@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
class ApiServersServerActionHandler(BaseApiHandler):
- def post(self, server_id: str, action: str):
+ def post(self, server_id: str, action: str, action_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -19,13 +19,14 @@ class ApiServersServerActionHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.COMMANDS
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.COMMANDS not in server_permissions:
# if the user doesn't have Commands permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -34,6 +35,17 @@ class ApiServersServerActionHandler(BaseApiHandler):
self.controller.crafty_perms.can_create_server(auth_data[4]["user_id"])
or auth_data[4]["superuser"]
):
+ srv_object = self.controller.servers.get_server_instance_by_id(
+ server_id
+ )
+ if srv_object.check_running():
+ return self.finish_json(
+ 409,
+ {
+ "status": "error",
+ "error": "Server Running!",
+ },
+ )
self._clone_server(server_id, auth_data[4]["user_id"])
return self.finish_json(200, {"status": "ok"})
return self.finish_json(
@@ -43,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler):
return self._agree_eula(server_id, auth_data[4]["user_id"])
self.controller.management.send_command(
- auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
+ auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id
)
self.finish_json(
@@ -68,10 +80,44 @@ class ApiServersServerActionHandler(BaseApiHandler):
name_counter += 1
new_server_name = server_data.get("server_name") + f" (Copy {name_counter})"
- new_server_uuid = Helpers.create_uuid()
- while os.path.exists(os.path.join(self.helper.servers_dir, new_server_uuid)):
- new_server_uuid = Helpers.create_uuid()
- new_server_path = os.path.join(self.helper.servers_dir, new_server_uuid)
+ new_server_id = self.helper.create_uuid()
+ new_server_path = os.path.join(self.helper.servers_dir, new_server_id)
+ new_backup_path = os.path.join(self.helper.backup_path, new_server_id)
+ backup_data = {
+ "backup_name": f"{new_server_name} Backup",
+ "backup_location": new_backup_path,
+ "excluded_dirs": "",
+ "max_backups": 0,
+ "server_id": new_server_id,
+ "compress": False,
+ "shutdown": False,
+ "before": "",
+ "after": "",
+ "default": True,
+ "status": json.dumps({"status": "Standby", "message": ""}),
+ "enabled": True,
+ }
+ new_server_command = str(server_data.get("execution_command")).replace(
+ server_id, new_server_id
+ )
+ new_server_log_path = server_data.get("log_path").replace(
+ server_id, new_server_id
+ )
+
+ self.controller.register_server(
+ new_server_name,
+ new_server_id,
+ new_server_path,
+ new_server_command,
+ server_data.get("executable"),
+ new_server_log_path,
+ server_data.get("stop_command"),
+ server_data.get("server_port"),
+ user_id,
+ server_data.get("type"),
+ )
+
+ self.controller.management.add_backup_config(backup_data)
self.controller.management.add_to_audit_log(
user_id,
@@ -83,25 +129,6 @@ class ApiServersServerActionHandler(BaseApiHandler):
# copy the old server
FileHelpers.copy_dir(server_data.get("path"), new_server_path)
- # TODO get old server DB data to individual variables
- new_server_command = str(server_data.get("execution_command"))
- new_server_log_file = str(
- self.helper.get_os_understandable_path(server_data.get("log_path"))
- )
-
- new_server_id = self.controller.servers.create_server(
- new_server_name,
- new_server_uuid,
- new_server_path,
- "",
- new_server_command,
- server_data.get("executable"),
- new_server_log_file,
- server_data.get("stop_command"),
- server_data.get("type"),
- user_id,
- server_data.get("server_port"),
- )
for role in self.controller.server_perms.get_server_roles(server_id):
mask = self.controller.server_perms.get_permissions_mask(
role.role_id, server_id
diff --git a/app/classes/web/routes/api/servers/server/backups/backup/index.py b/app/classes/web/routes/api/servers/server/backups/backup/index.py
index 9a4ecc30..5d8fd2b5 100644
--- a/app/classes/web/routes/api/servers/server/backups/backup/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/backup/index.py
@@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
-backup_schema = {
+BACKUP_SCHEMA = {
"type": "object",
"properties": {
"filename": {"type": "string", "minLength": 5},
@@ -19,36 +19,157 @@ backup_schema = {
"additionalProperties": False,
"minProperties": 1,
}
+BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+
+BASIC_BACKUP_PATCH_SCHEMA = {
+ "type": "object",
+ "properties": {
+ "backup_name": {"type": "string", "minLength": 3},
+ "max_backups": {"type": "integer"},
+ "compress": {"type": "boolean"},
+ "shutdown": {"type": "boolean"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
+ },
+ "additionalProperties": False,
+ "minProperties": 1,
+}
+ID_MISMATCH = "Server ID backup server ID different"
+GENERAL_AUTH_ERROR = "Authorization Error"
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
- def get(self, server_id: str):
+ def get(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
if not auth_data:
return
- if (
- EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
)
- ):
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.finish_json(200, backup_conf)
- def delete(self, server_id: str):
+ def delete(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
- backup_conf = self.controller.management.get_backup_config(server_id)
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
if not auth_data:
return
- if (
- EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Edited server {server_id}: removed backup config"
+ f" {backup_conf['backup_name']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+ if backup_conf["default"]:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "NOT_ALLOWED",
+ "error_data": "Cannot delete default backup",
+ },
+ )
+ self.controller.management.delete_backup_config(backup_id)
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def post(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
try:
data = json.loads(self.request.body)
@@ -57,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
- validate(data, backup_schema)
+ validate(data, BACKUP_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
@@ -68,9 +189,246 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
},
)
+ svr_obj = self.controller.servers.get_server_obj(server_id)
+ server_data = self.controller.servers.get_server_data_by_id(server_id)
+ zip_name = data["filename"]
+ # import the server again based on zipfile
+ backup_config = self.controller.management.get_backup_config(backup_id)
+ backup_location = os.path.join(
+ backup_config["backup_location"], backup_config["backup_id"]
+ )
+ if Helpers.validate_traversal(backup_location, zip_name):
+ try:
+ temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name)
+ except (FileNotFoundError, NotADirectoryError) as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
+ )
+ if server_data["type"] == "minecraft-java":
+ new_server = self.controller.restore_java_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ "1",
+ "2",
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ elif server_data["type"] == "minecraft-bedrock":
+ new_server = self.controller.restore_bedrock_zip_server(
+ svr_obj.server_name,
+ temp_dir,
+ server_data["executable"],
+ server_data["server_port"],
+ server_data["created_by"],
+ )
+ new_server_id = new_server
+ new_server = self.controller.servers.get_server_data(new_server)
+ self.controller.rename_backup_dir(
+ server_id,
+ new_server_id,
+ new_server["server_id"],
+ )
+ # preserve current schedules
+ for schedule in self.controller.management.get_schedules_by_server(
+ server_id
+ ):
+ job_data = self.controller.management.get_scheduled_task(
+ schedule.schedule_id
+ )
+ job_data["server_id"] = new_server_id
+ del job_data["schedule_id"]
+ self.tasks_manager.update_job(schedule.schedule_id, job_data)
+ # preserve execution command
+ new_server_obj = self.controller.servers.get_server_obj(new_server_id)
+ new_server_obj.execution_command = server_data["execution_command"]
+ # reset executable path
+ if svr_obj.path in svr_obj.executable:
+ new_server_obj.executable = str(svr_obj.executable).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ # reset run command path
+ if svr_obj.path in svr_obj.execution_command:
+ new_server_obj.execution_command = str(
+ svr_obj.execution_command
+ ).replace(svr_obj.path, new_server_obj.path)
+ # reset log path
+ if svr_obj.path in svr_obj.log_path:
+ new_server_obj.log_path = str(svr_obj.log_path).replace(
+ svr_obj.path, new_server_obj.path
+ )
+ self.controller.servers.update_server(new_server_obj)
+
+ # preserve backup config
+ server_backups = self.controller.management.get_backups_by_server(server_id)
+ for backup in server_backups:
+ old_backup_id = server_backups[backup]["backup_id"]
+ del server_backups[backup]["backup_id"]
+ server_backups[backup]["server_id"] = new_server_id
+ if str(server_id) in (server_backups[backup]["backup_location"]):
+ server_backups[backup]["backup_location"] = str(
+ server_backups[backup]["backup_location"]
+ ).replace(str(server_id), str(new_server_id))
+ new_backup_id = self.controller.management.add_backup_config(
+ server_backups[backup]
+ )
+ os.listdir(server_backups[backup]["backup_location"])
+ FileHelpers.move_dir(
+ os.path.join(
+ server_backups[backup]["backup_location"], old_backup_id
+ ),
+ os.path.join(
+ server_backups[backup]["backup_location"], new_backup_id
+ ),
+ )
+ # remove old server's tasks
+ try:
+ self.tasks_manager.remove_all_server_tasks(server_id)
+ except JobLookupError as e:
+ logger.info("No active tasks found for server: {e}")
+ self.controller.remove_server(server_id, True)
+
+ self.controller.management.add_to_audit_log(
+ auth_data[4]["user_id"],
+ f"Restored server {server_id} backup {data['filename']}",
+ server_id,
+ self.get_remote_ip(),
+ )
+
+ return self.finish_json(200, {"status": "ok"})
+
+ def patch(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ if not auth_data:
+ return
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+
+ try:
+ if auth_data[4]["superuser"]:
+ validate(data, BACKUP_PATCH_SCHEMA)
+ else:
+ validate(data, BASIC_BACKUP_PATCH_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
+ # if the user doesn't have access to the server, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+ self.controller.management.update_backup_config(backup_id, data)
+ return self.finish_json(200, {"status": "ok"})
+
+
+class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler):
+ def delete(self, server_id: str, backup_id: str):
+ auth_data = self.authenticate_user()
+ backup_conf = self.controller.management.get_backup_config(backup_id)
+ if backup_conf["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "ID_MISMATCH",
+ "error_data": ID_MISMATCH,
+ },
+ )
+ if not auth_data:
+ return
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
+ # if the user doesn't have Schedule permission, return an error
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "NOT_AUTHORIZED",
+ "error_data": GENERAL_AUTH_ERROR,
+ },
+ )
+
+ try:
+ data = json.loads(self.request.body)
+ except json.decoder.JSONDecodeError as e:
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
+ )
+ try:
+ validate(data, BACKUP_SCHEMA)
+ except ValidationError as e:
+ return self.finish_json(
+ 400,
+ {
+ "status": "error",
+ "error": "INVALID_JSON_SCHEMA",
+ "error_data": str(e),
+ },
+ )
+ self.helper.validate_traversal(
+ os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]),
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ ),
+ )
try:
FileHelpers.del_file(
- os.path.join(backup_conf["backup_path"], data["filename"])
+ os.path.join(
+ backup_conf["backup_location"],
+ backup_conf["backup_id"],
+ data["filename"],
+ )
)
except Exception as e:
return self.finish_json(
@@ -84,134 +442,3 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
)
return self.finish_json(200, {"status": "ok"})
-
- def post(self, server_id: str):
- auth_data = self.authenticate_user()
- if not auth_data:
- return
- if (
- EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
- auth_data[4]["user_id"], server_id
- )
- ):
- # if the user doesn't have Schedule permission, return an error
- return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- try:
- data = json.loads(self.request.body)
- except json.decoder.JSONDecodeError as e:
- return self.finish_json(
- 400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
- )
- try:
- validate(data, backup_schema)
- except ValidationError as e:
- return self.finish_json(
- 400,
- {
- "status": "error",
- "error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
- },
- )
-
- try:
- svr_obj = self.controller.servers.get_server_obj(server_id)
- server_data = self.controller.servers.get_server_data_by_id(server_id)
- zip_name = data["filename"]
- # import the server again based on zipfile
- backup_path = svr_obj.backup_path
- if Helpers.validate_traversal(backup_path, zip_name):
- temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
- if server_data["type"] == "minecraft-java":
- new_server = self.controller.restore_java_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- "1",
- "2",
- server_data["server_port"],
- server_data["created_by"],
- )
- elif server_data["type"] == "minecraft-bedrock":
- new_server = self.controller.restore_bedrock_zip_server(
- svr_obj.server_name,
- temp_dir,
- server_data["executable"],
- server_data["server_port"],
- server_data["created_by"],
- )
- new_server_id = new_server
- new_server = self.controller.servers.get_server_data(new_server)
- self.controller.rename_backup_dir(
- server_id, new_server_id, new_server["server_uuid"]
- )
- # preserve current schedules
- for schedule in self.controller.management.get_schedules_by_server(
- server_id
- ):
- job_data = self.controller.management.get_scheduled_task(
- schedule.schedule_id
- )
- job_data["server_id"] = new_server_id
- del job_data["schedule_id"]
- self.tasks_manager.update_job(schedule.schedule_id, job_data)
- # preserve execution command
- new_server_obj = self.controller.servers.get_server_obj(new_server_id)
- new_server_obj.execution_command = server_data["execution_command"]
- # reset executable path
- if svr_obj.path in svr_obj.executable:
- new_server_obj.executable = str(svr_obj.executable).replace(
- svr_obj.path, new_server_obj.path
- )
- # reset run command path
- if svr_obj.path in svr_obj.execution_command:
- new_server_obj.execution_command = str(
- svr_obj.execution_command
- ).replace(svr_obj.path, new_server_obj.path)
- # reset log path
- if svr_obj.path in svr_obj.log_path:
- new_server_obj.log_path = str(svr_obj.log_path).replace(
- svr_obj.path, new_server_obj.path
- )
- self.controller.servers.update_server(new_server_obj)
-
- # preserve backup config
- backup_config = self.controller.management.get_backup_config(server_id)
- excluded_dirs = []
- server_obj = self.controller.servers.get_server_obj(server_id)
- loop_backup_path = self.helper.wtol_path(server_obj.path)
- for item in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- item_path = self.helper.wtol_path(item)
- bu_path = os.path.relpath(item_path, loop_backup_path)
- bu_path = os.path.join(new_server_obj.path, bu_path)
- excluded_dirs.append(bu_path)
- self.controller.management.set_backup_config(
- new_server_id,
- new_server_obj.backup_path,
- backup_config["max_backups"],
- excluded_dirs,
- backup_config["compress"],
- backup_config["shutdown"],
- )
- # remove old server's tasks
- try:
- self.tasks_manager.remove_all_server_tasks(server_id)
- except JobLookupError as e:
- logger.info("No active tasks found for server: {e}")
- self.controller.remove_server(server_id, True)
- except Exception as e:
- return self.finish_json(
- 400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
- )
- self.controller.management.add_to_audit_log(
- auth_data[4]["user_id"],
- f"Restored server {server_id} backup {data['filename']}",
- server_id,
- self.get_remote_ip(),
- )
-
- return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/backups/index.py b/app/classes/web/routes/api/servers/server/backups/index.py
index 9e47bcfc..a155f943 100644
--- a/app/classes/web/routes/api/servers/server/backups/index.py
+++ b/app/classes/web/routes/api/servers/server/backups/index.py
@@ -1,3 +1,4 @@
+import os
import logging
import json
from jsonschema import validate
@@ -10,13 +11,14 @@ logger = logging.getLogger(__name__)
backup_patch_schema = {
"type": "object",
"properties": {
- "backup_path": {"type": "string", "minLength": 1},
+ "backup_name": {"type": "string", "minLength": 3},
+ "backup_location": {"type": "string", "minLength": 1},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -25,12 +27,13 @@ backup_patch_schema = {
basic_backup_patch_schema = {
"type": "object",
"properties": {
+ "backup_name": {"type": "string", "minLength": 3},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
- "backup_before": {"type": "string"},
- "backup_after": {"type": "string"},
- "exclusions": {"type": "array"},
+ "before": {"type": "string"},
+ "after": {"type": "string"},
+ "excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -42,17 +45,21 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
- self.finish_json(200, self.controller.management.get_backup_config(server_id))
+ self.finish_json(
+ 200, self.controller.management.get_backups_by_server(server_id)
+ )
- def patch(self, server_id: str):
+ def post(self, server_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -78,46 +85,25 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- self.controller.management.set_backup_config(
- server_id,
- data.get(
- "backup_path",
- self.controller.management.get_backup_config(server_id)["backup_path"],
- ),
- data.get(
- "max_backups",
- self.controller.management.get_backup_config(server_id)["max_backups"],
- ),
- data.get("exclusions"),
- data.get(
- "compress",
- self.controller.management.get_backup_config(server_id)["compress"],
- ),
- data.get(
- "shutdown",
- self.controller.management.get_backup_config(server_id)["shutdown"],
- ),
- data.get(
- "backup_before",
- self.controller.management.get_backup_config(server_id)["before"],
- ),
- data.get(
- "backup_after",
- self.controller.management.get_backup_config(server_id)["after"],
- ),
- )
+ # Set the backup location automatically for non-super users. We should probably
+ # make the default location configurable for SU eventually
+ if not auth_data[4]["superuser"]:
+ data["backup_location"] = os.path.join(self.helper.backup_path, server_id)
+ data["server_id"] = server_id
+ if not data.get("excluded_dirs", None):
+ data["excluded_dirs"] = []
+ self.controller.management.add_backup_config(data)
return self.finish_json(200, {"status": "ok"})
diff --git a/app/classes/web/routes/api/servers/server/files.py b/app/classes/web/routes/api/servers/server/files.py
index 8e70d4fe..2699ae0c 100644
--- a/app/classes/web/routes/api/servers/server/files.py
+++ b/app/classes/web/routes/api/servers/server/files.py
@@ -72,7 +72,7 @@ file_delete_schema = {
class ApiServersServerFilesIndexHandler(BaseApiHandler):
- def post(self, server_id: str):
+ def post(self, server_id: str, backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -80,16 +80,16 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
+ auth_data[4]["user_id"], server_id
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
- auth_data[4]["user_id"], server_id
- )
- and EnumPermissionsServer.BACKUP
- not in self.controller.server_perms.get_user_id_permissions_list(
- auth_data[4]["user_id"], server_id
- )
+ EnumPermissionsServer.FILES not in server_permissions
+ and EnumPermissionsServer.BACKUP not in server_permissions
):
# if the user doesn't have Files or Backup permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -149,21 +149,35 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
filename = html.escape(raw_filename)
rel = os.path.join(folder, raw_filename)
dpath = os.path.join(folder, filename)
- if str(dpath) in self.controller.management.get_excluded_backup_dirs(
- server_id
- ):
- if os.path.isdir(rel):
- return_json[filename] = {
- "path": dpath,
- "dir": True,
- "excluded": True,
- }
+ if backup_id:
+ if str(
+ dpath
+ ) in self.controller.management.get_excluded_backup_dirs(backup_id):
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": True,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": True,
+ }
else:
- return_json[filename] = {
- "path": dpath,
- "dir": False,
- "excluded": True,
- }
+ if os.path.isdir(rel):
+ return_json[filename] = {
+ "path": dpath,
+ "dir": True,
+ "excluded": False,
+ }
+ else:
+ return_json[filename] = {
+ "path": dpath,
+ "dir": False,
+ "excluded": False,
+ }
else:
if os.path.isdir(rel):
return_json[filename] = {
@@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
)
self.finish_json(200, {"status": "ok", "data": file_contents})
- def delete(self, server_id: str):
+ def delete(self, server_id: str, _backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -197,13 +211,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -246,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
return self.finish_json(200, {"status": "ok"})
return self.finish_json(500, {"status": "error", "error": str(proc)})
- def patch(self, server_id: str):
+ def patch(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -254,13 +269,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -299,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
file_object.write(file_contents)
return self.finish_json(200, {"status": "ok"})
- def put(self, server_id: str):
+ def put(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@@ -307,13 +323,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -373,13 +390,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -438,13 +456,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@@ -504,13 +523,14 @@ class ApiServersServerFilesZipHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.FILES
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
diff --git a/app/classes/web/routes/api/servers/server/index.py b/app/classes/web/routes/api/servers/server/index.py
index 06db3158..3562334c 100644
--- a/app/classes/web/routes/api/servers/server/index.py
+++ b/app/classes/web/routes/api/servers/server/index.py
@@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
server_patch_schema = {
"type": "object",
"properties": {
- "server_name": {"type": "string", "minLength": 1},
+ "server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"},
"backup_path": {"type": "string"},
"executable": {"type": "string"},
"log_path": {"type": "string", "minLength": 1},
@@ -102,13 +102,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Config permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -154,13 +155,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Config permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -176,7 +178,7 @@ class ApiServersServerIndexHandler(BaseApiHandler):
self.tasks_manager.remove_all_server_tasks(server_id)
failed = False
for item in self.controller.servers.failed_servers[:]:
- if item["server_id"] == int(server_id):
+ if item["server_id"] == server_id:
self.controller.servers.failed_servers.remove(item)
failed = True
diff --git a/app/classes/web/routes/api/servers/server/logs.py b/app/classes/web/routes/api/servers/server/logs.py
index 94a8a71b..eb6ede00 100644
--- a/app/classes/web/routes/api/servers/server/logs.py
+++ b/app/classes/web/routes/api/servers/server/logs.py
@@ -30,13 +30,14 @@ class ApiServersServerLogsHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.LOGS
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.LOGS not in server_permissions:
# if the user doesn't have Logs permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
diff --git a/app/classes/web/routes/api/servers/server/stdin.py b/app/classes/web/routes/api/servers/server/stdin.py
index ba8400b7..ca2cd7d9 100644
--- a/app/classes/web/routes/api/servers/server/stdin.py
+++ b/app/classes/web/routes/api/servers/server/stdin.py
@@ -16,13 +16,14 @@ class ApiServersServerStdinHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.COMMANDS
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.COMMANDS not in server_permissions:
# if the user doesn't have Commands permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
diff --git a/app/classes/web/routes/api/servers/server/tasks/index.py b/app/classes/web/routes/api/servers/server/tasks/index.py
index 8e98bbbe..ed8b9df9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/index.py
@@ -21,6 +21,9 @@ new_task_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@@ -78,13 +81,14 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.SCHEDULE
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
data["server_id"] = server_id
@@ -109,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
)
if "parent" not in data:
data["parent"] = None
+ if data.get("action_id"):
+ backup_config = self.controller.management.get_backup_config(
+ data["action_id"]
+ )
+ if backup_config["server_id"]["server_id"] != server_id:
+ return self.finish_json(
+ 405,
+ {
+ "status": "error",
+ "error": "Server ID Mismatch",
+ },
+ )
task_id = self.tasks_manager.schedule_job(data)
self.controller.management.add_to_audit_log(
diff --git a/app/classes/web/routes/api/servers/server/tasks/task/index.py b/app/classes/web/routes/api/servers/server/tasks/task/index.py
index 742312a6..05c8cee9 100644
--- a/app/classes/web/routes/api/servers/server/tasks/task/index.py
+++ b/app/classes/web/routes/api/servers/server/tasks/task/index.py
@@ -22,6 +22,9 @@ task_patch_schema = {
"action": {
"type": "string",
},
+ "action_id": {
+ "type": "string",
+ },
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@@ -54,12 +57,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.SCHEDULE
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(200, self.controller.management.get_scheduled_task(task_id))
@@ -68,12 +73,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.SCHEDULE
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -120,13 +127,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.SCHEDULE
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
diff --git a/app/classes/web/routes/api/servers/server/webhooks/index.py b/app/classes/web/routes/api/servers/server/webhooks/index.py
index 223171c8..2557c309 100644
--- a/app/classes/web/routes/api/servers/server/webhooks/index.py
+++ b/app/classes/web/routes/api/servers/server/webhooks/index.py
@@ -38,12 +38,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@@ -81,13 +83,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
data["server_id"] = server_id
diff --git a/app/classes/web/routes/api/servers/server/webhooks/webhook/index.py b/app/classes/web/routes/api/servers/server/webhooks/webhook/index.py
index 4b58011e..c94aa975 100644
--- a/app/classes/web/routes/api/servers/server/webhooks/webhook/index.py
+++ b/app/classes/web/routes/api/servers/server/webhooks/webhook/index.py
@@ -39,12 +39,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
@@ -66,12 +68,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -117,13 +121,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@@ -159,13 +164,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
-
- if (
- EnumPermissionsServer.CONFIG
- not in self.controller.server_perms.get_user_id_permissions_list(
+ mask = self.controller.server_perms.get_lowest_api_perm_mask(
+ self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
- )
- ):
+ ),
+ auth_data[5],
+ )
+ server_permissions = self.controller.server_perms.get_permissions(mask)
+ if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
webhook = self.controller.management.get_webhook_by_id(webhook_id)
diff --git a/app/classes/web/routes/api/users/index.py b/app/classes/web/routes/api/users/index.py
index f7341d38..32ebd283 100644
--- a/app/classes/web/routes/api/users/index.py
+++ b/app/classes/web/routes/api/users/index.py
@@ -2,6 +2,7 @@ import logging
import json
from jsonschema import validate
from jsonschema.exceptions import ValidationError
+from app.classes.shared.translation import Translation
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.roles import Roles, HelperRoles
from app.classes.models.users import PUBLIC_USER_ATTRS
@@ -21,6 +22,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
_,
_,
user,
+ _,
) = auth_data
# GET /api/v2/users?ids=true
@@ -53,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
)
def post(self):
+ self.translator = Translation(self.helper)
new_user_schema = {
"type": "object",
"properties": {
@@ -70,6 +73,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
_,
superuser,
user,
+ _,
) = auth_data
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
@@ -85,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler):
try:
validate(data, new_user_schema)
except ValidationError as e:
+ err = self.translator.translate(
+ "validators",
+ e.schema["error"],
+ self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]),
+ )
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
- "error_data": str(e),
+ "error_data": f"{str(err)}",
},
)
username = data["username"]
@@ -149,11 +158,16 @@ class ApiUsersIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_SUPERUSER_CREATE"}
)
- if len(roles) != 0 and not superuser:
- # HACK: This should check if the user has the roles or something
- return self.finish_json(
- 400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
- )
+ for role in roles:
+ role = self.controller.roles.get_role(role)
+ if (
+ str(role.get("manager", "no manager found"))
+ != str(auth_data[4]["user_id"])
+ and not superuser
+ ):
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
+ )
# TODO: do this in the most efficient way
user_id = self.controller.users.add_user(
@@ -177,7 +191,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
f"added user {username} (UID:{user_id}) with roles {roles}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/users/user/api.py b/app/classes/web/routes/api/users/user/api.py
index 1c7635f2..4baac898 100644
--- a/app/classes/web/routes/api/users/user/api.py
+++ b/app/classes/web/routes/api/users/user/api.py
@@ -43,7 +43,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
auth_data[4]["user_id"],
f"Generated a new API token for the key {key.name} "
f"from user with UID: {key.user_id}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
data_key = self.controller.authentication.generate(
@@ -75,7 +75,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
"name": key.name,
"server_permissions": key.server_permissions,
"crafty_permissions": key.crafty_permissions,
- "superuser": key.superuser,
+ "full_access": key.full_access,
}
)
self.finish_json(
@@ -99,7 +99,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
"type": "string",
"pattern": "^[01]{3}$", # 8 bits, see EnumPermissionsCrafty
},
- "superuser": {"type": "boolean"},
+ "full_access": {"type": "boolean"},
},
"additionalProperties": False,
"minProperties": 1,
@@ -113,6 +113,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
_,
_superuser,
user,
+ _,
) = auth_data
try:
@@ -163,7 +164,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
key_id = self.controller.users.add_user_api_key(
data["name"],
user_id,
- data["superuser"],
+ data["full_access"],
data["server_permissions_mask"],
data["crafty_permissions_mask"],
)
@@ -173,7 +174,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
f"Added API key {data['name']} with crafty permissions "
f"{data['crafty_permissions_mask']}"
f" and {data['server_permissions_mask']} for user with UID: {user_id}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
self.finish_json(200, {"status": "ok", "data": {"id": key_id}})
@@ -188,6 +189,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
_,
_,
_user,
+ _,
) = auth_data
if key_id:
key = self.controller.users.get_user_api_key(key_id)
@@ -215,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
)
if (
- target_key.user_id != auth_data[4]["user_id"]
+ str(target_key.user_id) != str(auth_data[4]["user_id"])
and not auth_data[4]["superuser"]
):
return self.finish_json(
@@ -233,7 +235,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
auth_data[4]["user_id"],
f"Removed API key {target_key} "
f"(ID: {key_id}) from user {auth_data[4]['user_id']}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/users/user/index.py b/app/classes/web/routes/api/users/user/index.py
index 1b7f6f91..b05e4ac3 100644
--- a/app/classes/web/routes/api/users/user/index.py
+++ b/app/classes/web/routes/api/users/user/index.py
@@ -24,6 +24,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
_,
user,
+ _,
) = auth_data
if user_id in ["@me", user["user_id"]]:
@@ -72,6 +73,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
_,
user,
+ _,
) = auth_data
if (user_id in ["@me", user["user_id"]]) and self.helper.get_setting(
@@ -94,7 +96,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
self.controller.management.add_to_audit_log(
user["user_id"],
f"deleted the user {user_id}",
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
@@ -121,6 +123,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
superuser,
user,
+ _,
) = auth_data
try:
@@ -129,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
-
try:
validate(data, user_patch_schema)
except ValidationError as e:
@@ -141,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
-
if user_id == "@me":
user_id = user["user_id"]
-
if (
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
and str(user["user_id"]) != str(user_id)
@@ -212,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
)
+ user_modify = self.controller.users.get_user_roles_id(user_id)
+
+ for role in data["roles"]:
+ # Check if user is not a super user and that the exec user is the role
+ # manager or that the role already exists in the user's list
+ if not superuser and (
+ str(
+ self.controller.roles.get_role(role).get(
+ "manager", "no manager found"
+ )
+ )
+ != str(auth_data[4]["user_id"])
+ and role not in user_modify
+ ):
+ for item in user_modify:
+ print(type(role), type(item))
+ return self.finish_json(
+ 400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
+ )
user_obj = HelperUsers.get_user_model(user_id)
if "password" in data and str(user["user_id"]) != str(user_id):
@@ -283,7 +302,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
f"edited user {user_obj.username} (UID: {user_id})"
f"with roles {user_obj.roles}"
),
- server_id=0,
+ server_id=None,
source_ip=self.get_remote_ip(),
)
diff --git a/app/classes/web/routes/api/users/user/permissions.py b/app/classes/web/routes/api/users/user/permissions.py
index b6c8703a..d0f496f2 100644
--- a/app/classes/web/routes/api/users/user/permissions.py
+++ b/app/classes/web/routes/api/users/user/permissions.py
@@ -27,6 +27,7 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
_,
_,
user,
+ _,
) = auth_data
if user_id in ["@me", user["user_id"]]:
@@ -52,6 +53,8 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
},
)
+ counter_data = PermissionsCrafty.get_created_quantity_list(user_id)
+
self.finish_json(
200,
{
@@ -59,9 +62,9 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
"data": {
"permissions": res_data.permissions,
"counters": {
- SERVER_CREATION: res_data.created_server,
- USER_CONFIG: res_data.created_user,
- ROLES_CONFIG: res_data.created_role,
+ SERVER_CREATION: counter_data["SERVER_CREATION"],
+ USER_CONFIG: counter_data["USER_CONFIG"],
+ ROLES_CONFIG: counter_data["ROLES_CONFIG"],
},
"limits": {
SERVER_CREATION: res_data.limit_server_creation,
diff --git a/app/classes/web/routes/api/users/user/public.py b/app/classes/web/routes/api/users/user/public.py
index b67ab61e..e016babc 100644
--- a/app/classes/web/routes/api/users/user/public.py
+++ b/app/classes/web/routes/api/users/user/public.py
@@ -17,6 +17,7 @@ class ApiUsersUserPublicHandler(BaseApiHandler):
_,
_,
user,
+ _,
) = auth_data
if user_id == "@me":
diff --git a/app/classes/web/routes/metrics/metrics_handlers.py b/app/classes/web/routes/metrics/metrics_handlers.py
index fa43a909..7fb1e31d 100644
--- a/app/classes/web/routes/metrics/metrics_handlers.py
+++ b/app/classes/web/routes/metrics/metrics_handlers.py
@@ -17,7 +17,7 @@ def metrics_handlers(handler_args):
handler_args,
),
(
- r"/metrics/servers/([0-9]+)/?",
+ r"/metrics/servers/([a-z0-9-]+)/?",
ApiOpenMetricsServersHandler,
handler_args,
),
diff --git a/app/classes/web/server_handler.py b/app/classes/web/server_handler.py
index f3dc6faf..ed2b7e65 100644
--- a/app/classes/web/server_handler.py
+++ b/app/classes/web/server_handler.py
@@ -30,7 +30,7 @@ class ServerHandler(BaseHandler):
) = self.current_user
superuser = exec_user["superuser"]
if api_key is not None:
- superuser = superuser and api_key.superuser
+ superuser = superuser and api_key.full_access
if superuser:
defined_servers = self.controller.servers.list_defined_servers()
@@ -126,7 +126,7 @@ class ServerHandler(BaseHandler):
"created": api_key.created,
"server_permissions": api_key.server_permissions,
"crafty_permissions": api_key.crafty_permissions,
- "superuser": api_key.superuser,
+ "full_access": api_key.full_access,
}
if api_key is not None
else None
@@ -148,12 +148,12 @@ class ServerHandler(BaseHandler):
return
page_data["server_api"] = False
if page_data["online"]:
- page_data["server_api"] = self.helper.check_address_status(
- "https://serverjars.com/api/fetchTypes"
+ page_data["server_api"] = (
+ self.controller.big_bucket._check_bucket_alive()
)
- page_data["server_types"] = self.controller.server_jars.get_serverjar_data()
+ page_data["server_types"] = self.controller.big_bucket.get_bucket_data()
page_data["js_server_types"] = json.dumps(
- self.controller.server_jars.get_serverjar_data()
+ self.controller.big_bucket.get_bucket_data()
)
if page_data["server_types"] is None:
page_data["server_types"] = []
diff --git a/app/classes/web/tornado_handler.py b/app/classes/web/tornado_handler.py
index fbcf970f..0b8140e3 100644
--- a/app/classes/web/tornado_handler.py
+++ b/app/classes/web/tornado_handler.py
@@ -24,8 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers
from app.classes.web.server_handler import ServerHandler
from app.classes.web.websocket_handler import WebSocketHandler
from app.classes.web.static_handler import CustomStaticHandler
-from app.classes.web.upload_handler import UploadHandler
-from app.classes.web.http_handler import HTTPHandler, HTTPHandlerPage
from app.classes.web.status_handler import StatusHandler
@@ -44,7 +42,6 @@ class Webserver:
file_helper: FileHelpers,
):
self.ioloop = None
- self.http_server = None
self.https_server = None
self.helper = helper
self.controller = controller
@@ -100,7 +97,6 @@ class Webserver:
# let's verify we have an SSL cert
self.helper.create_self_signed_cert()
- http_port = self.helper.get_setting("http_port")
https_port = self.helper.get_setting("https_port")
debug_errors = self.helper.get_setting("show_errors")
@@ -112,9 +108,6 @@ class Webserver:
cookie_secret = self.helper.random_string_generator(32)
HelpersManagement.set_cookie_secret(cookie_secret)
- if not http_port and http_port != 0:
- http_port = 8000
-
if not https_port:
https_port = 8443
@@ -127,7 +120,7 @@ class Webserver:
),
}
- logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
+ logger.info(f"Starting Web Server on ports https:{https_port}")
asyncio.set_event_loop(asyncio.new_event_loop())
@@ -148,7 +141,6 @@ class Webserver:
(r"/panel/(.*)", PanelHandler, handler_args),
(r"/server/(.*)", ServerHandler, handler_args),
(r"/ws", WebSocketHandler, handler_args),
- (r"/upload", UploadHandler, handler_args),
(r"/status", StatusHandler, handler_args),
# API Routes V2
*api_handlers(handler_args),
@@ -173,30 +165,6 @@ class Webserver:
static_handler_class=CustomStaticHandler,
serve_traceback=debug_errors,
)
- http_handers = [
- (r"/", HTTPHandler, handler_args),
- (r"/(.+)", HTTPHandlerPage, handler_args),
- ]
- http_app = tornado.web.Application(
- http_handers,
- template_path=os.path.join(self.helper.webroot, "templates"),
- static_path=os.path.join(self.helper.webroot, "static"),
- debug=debug_errors,
- cookie_secret=cookie_secret,
- xsrf_cookies=True,
- autoreload=False,
- log_function=self.log_function,
- default_handler_class=HTTPHandler,
- login_url="/login",
- serve_traceback=debug_errors,
- )
-
- if http_port != 0:
- self.http_server = tornado.httpserver.HTTPServer(http_app)
- self.http_server.listen(http_port)
- else:
- logger.info("http port disabled by config")
-
self.https_server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
self.https_server.listen(https_port)
@@ -218,7 +186,6 @@ class Webserver:
logger.info("Shutting Down Web Server")
Console.info("Shutting Down Web Server")
self.ioloop.stop()
- self.http_server.stop()
self.https_server.stop()
logger.info("Web Server Stopped")
Console.info("Web Server Stopped")
diff --git a/app/classes/web/upload_handler.py b/app/classes/web/upload_handler.py
deleted file mode 100644
index 0667dd12..00000000
--- a/app/classes/web/upload_handler.py
+++ /dev/null
@@ -1,331 +0,0 @@
-import logging
-import os
-import time
-import urllib.parse
-import tornado.web
-import tornado.options
-import tornado.httpserver
-from app.classes.models.crafty_permissions import EnumPermissionsCrafty
-
-from app.classes.models.server_permissions import EnumPermissionsServer
-from app.classes.shared.console import Console
-from app.classes.shared.helpers import Helpers
-from app.classes.shared.main_controller import Controller
-from app.classes.web.base_handler import BaseHandler
-from app.classes.shared.websocket_manager import WebSocketManager
-
-logger = logging.getLogger(__name__)
-
-
-@tornado.web.stream_request_body
-class UploadHandler(BaseHandler):
- # noinspection PyAttributeOutsideInit
- def initialize(
- self,
- helper: Helpers = None,
- controller: Controller = None,
- tasks_manager=None,
- translator=None,
- file_helper=None,
- ):
- self.helper = helper
- self.controller = controller
- self.tasks_manager = tasks_manager
- self.translator = translator
- self.file_helper = file_helper
-
- def prepare(self):
- # Class & Function Defination
- api_key, _token_data, exec_user = self.current_user
- self.upload_type = str(self.request.headers.get("X-Content-Upload-Type"))
-
- if self.upload_type == "server_import":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.superuser
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_api_key_permissions_list(api_key)
- )
- else:
- exec_user_server_permissions = (
- self.controller.crafty_perms.get_crafty_permissions_list(
- exec_user["user_id"]
- )
- )
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if (
- EnumPermissionsCrafty.SERVER_CREATION
- not in exec_user_server_permissions
- and not exec_user["superuser"]
- ):
- logger.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a server" " without permissions!"
- )
- self.do_upload = False
-
- path = os.path.join(self.controller.project_root, "import", "upload")
- self.helper.ensure_dir_exists(path)
- # Delete existing files
- if len(os.listdir(path)) > 0:
- for item in os.listdir():
- try:
- os.remove(os.path.join(path, item))
- except:
- logger.debug("Could not delete file on user server upload")
-
- self.helper.ensure_dir_exists(path)
- filename = urllib.parse.unquote(
- self.request.headers.get("X-FileName", None)
- )
- if not str(filename).endswith(".zip"):
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- elif self.upload_type == "background":
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.superuser
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if not superuser:
- return self.finish_json(
- 401,
- {
- "status": "error",
- "error": "UNAUTHORIZED ACCESS",
- "info": self.helper.translation.translate(
- "error",
- "superError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if not self.request.headers.get("X-Content-Type", None).startswith(
- "image/"
- ):
- return self.finish_json(
- 415,
- {
- "status": "error",
- "error": "TYPE ERROR",
- "info": self.helper.translation.translate(
- "error",
- "fileError",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- path = os.path.join(
- self.controller.project_root,
- "app/frontend/static/assets/images/auth/custom",
- )
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
- else:
- server_id = self.get_argument("server_id", None)
- superuser = exec_user["superuser"]
- if api_key is not None:
- superuser = superuser and api_key.superuser
- user_id = exec_user["user_id"]
- stream_size_value = self.helper.get_setting("stream_size_GB")
-
- max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
-
- self.content_len = int(self.request.headers.get("Content-Length"))
- if self.content_len > max_streamed_size:
- logger.error(
- f"User with ID {user_id} attempted to upload a file that"
- f" exceeded the max body size."
- )
-
- return self.finish_json(
- 413,
- {
- "status": "error",
- "error": "TOO LARGE",
- "info": self.helper.translation.translate(
- "error",
- "fileTooLarge",
- self.controller.users.get_user_lang_by_id(user_id),
- ),
- },
- )
- self.do_upload = True
-
- if superuser:
- exec_user_server_permissions = (
- self.controller.server_perms.list_defined_permissions()
- )
- elif api_key is not None:
- exec_user_server_permissions = (
- self.controller.server_perms.get_api_key_permissions_list(
- api_key, server_id
- )
- )
- else:
- exec_user_server_permissions = (
- self.controller.server_perms.get_user_id_permissions_list(
- exec_user["user_id"], server_id
- )
- )
-
- server_id = self.request.headers.get("X-ServerId", None)
- if server_id is None:
- logger.warning("Server ID not found in upload handler call")
- Console.warning("Server ID not found in upload handler call")
- self.do_upload = False
-
- if user_id is None:
- logger.warning("User ID not found in upload handler call")
- Console.warning("User ID not found in upload handler call")
- self.do_upload = False
-
- if EnumPermissionsServer.FILES not in exec_user_server_permissions:
- logger.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to "
- f"{server_id} without permissions!"
- )
- self.do_upload = False
-
- path = self.request.headers.get("X-Path", None)
- filename = self.request.headers.get("X-FileName", None)
- full_path = os.path.join(path, filename)
-
- if not self.helper.is_subdir(
- full_path,
- Helpers.get_os_understandable_path(
- self.controller.servers.get_server_data_by_id(server_id)["path"]
- ),
- ):
- logger.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- Console.warning(
- f"User {user_id} tried to upload a file to {server_id} "
- f"but the path is not inside of the server!"
- )
- self.do_upload = False
-
- if self.do_upload:
- try:
- self.f = open(full_path, "wb")
- except Exception as e:
- logger.error(f"Upload failed with error: {e}")
- self.do_upload = False
- # If max_body_size is not set, you cannot upload files > 100MB
- self.request.connection.set_max_body_size(max_streamed_size)
-
- def post(self):
- logger.info("Upload completed")
- if self.upload_type == "server_files":
- files_left = int(self.request.headers.get("X-Files-Left", None))
- else:
- files_left = 0
-
- if self.do_upload:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "success")
- self.finish("success") # Nope, I'm sending "success"
- self.f.close()
- else:
- time.sleep(5)
- if files_left == 0:
- WebSocketManager().broadcast("close_upload_box", "error")
- self.finish("error")
-
- def data_received(self, chunk):
- if self.do_upload:
- self.f.write(chunk)
diff --git a/app/classes/web/websocket_handler.py b/app/classes/web/websocket_handler.py
index cde97584..3e426797 100644
--- a/app/classes/web/websocket_handler.py
+++ b/app/classes/web/websocket_handler.py
@@ -55,7 +55,7 @@ class WebSocketHandler(tornado.websocket.WebSocketHandler):
self.controller.management.add_to_audit_log_raw(
"unknown",
0,
- 0,
+ None,
"Someone tried to connect via WebSocket without proper authentication",
self.get_remote_ip(),
)
diff --git a/app/config/logging.json b/app/config/logging.json
index fd1173eb..d0a20cdf 100644
--- a/app/config/logging.json
+++ b/app/config/logging.json
@@ -14,6 +14,9 @@
"auth": {
"format": "%(asctime)s - [AUTH] - %(levelname)s - %(message)s"
},
+ "audit": {
+ "()": "app.classes.logging.log_formatter.JsonFormatter"
+ },
"cmd_queue": {
"format": "%(asctime)s - [CMD_QUEUE] - %(levelname)s - %(message)s"
}
@@ -70,6 +73,14 @@
"maxBytes": 10485760,
"backupCount": 20,
"encoding": "utf8"
+ },
+ "audit_log_handler": {
+ "class": "logging.handlers.RotatingFileHandler",
+ "formatter": "audit",
+ "filename": "logs/audit.log",
+ "maxBytes": 10485760,
+ "backupCount": 20,
+ "encoding": "utf8"
}
},
"loggers": {
@@ -108,6 +119,12 @@
"cmd_queue_file_handler"
],
"propagate": false
+ },
+ "audit_log": {
+ "level": "INFO",
+ "handlers": [
+ "audit_log_handler"
+ ]
}
}
}
\ No newline at end of file
diff --git a/app/config/version.json b/app/config/version.json
index 3c001e77..4f136968 100644
--- a/app/config/version.json
+++ b/app/config/version.json
@@ -1,5 +1,5 @@
{
"major": 4,
- "minor": 2,
- "sub": 4
+ "minor": 4,
+ "sub": 2
}
diff --git a/app/frontend/static/assets/css/crafty.css b/app/frontend/static/assets/css/crafty.css
index 43dd2e6a..b765bca6 100644
--- a/app/frontend/static/assets/css/crafty.css
+++ b/app/frontend/static/assets/css/crafty.css
@@ -12,6 +12,16 @@ nav.sidebar {
position: fixed;
}
+td {
+ -ms-overflow-style: none;
+ /* IE and Edge */
+ scrollbar-width: none;
+ /* Firefox */
+}
+
+td::-webkit-scrollbar {
+ display: none;
+}
@media (min-width: 992px) {
nav.sidebar {
@@ -267,4 +277,7 @@ div.warnings div.wssError a:hover {
font-family: 'Sarabun', 'roboto', sans-serif;
}
-/**************************************************************/
\ No newline at end of file
+/**************************************************************/
+.hidden-input {
+ margin-left: -40px;
+}
\ No newline at end of file
diff --git a/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css b/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
new file mode 100644
index 00000000..079eeb1f
--- /dev/null
+++ b/app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
@@ -0,0 +1,537 @@
+/*!
+ * Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
+ *
+ * Copyright 2012-2020 SnapAppointments, LLC
+ * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
+ */
+@-webkit-keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+@-o-keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+@keyframes bs-notify-fadeOut {
+ 0% {
+ opacity: .9
+ }
+
+ 100% {
+ opacity: 0
+ }
+}
+
+.bootstrap-select>select.bs-select-hidden,
+select.bs-select-hidden,
+select.selectpicker {
+ display: none !important
+}
+
+.bootstrap-select {
+ width: 220px;
+ vertical-align: middle
+}
+
+.bootstrap-select>.dropdown-toggle {
+ position: relative;
+ width: 100%;
+ text-align: right;
+ white-space: nowrap;
+ display: -webkit-inline-box;
+ display: -webkit-inline-flex;
+ display: -ms-inline-flexbox;
+ display: inline-flex;
+ -webkit-box-align: center;
+ -webkit-align-items: center;
+ -ms-flex-align: center;
+ align-items: center;
+ -webkit-box-pack: justify;
+ -webkit-justify-content: space-between;
+ -ms-flex-pack: justify;
+ justify-content: space-between
+}
+
+.bootstrap-select>.dropdown-toggle:after {
+ margin-top: -1px
+}
+
+.bootstrap-select>.dropdown-toggle.bs-placeholder,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder:hover {
+ color: #999
+}
+
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:hover,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:active,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:focus,
+.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:hover {
+ color: rgba(255, 255, 255, .5)
+}
+
+.bootstrap-select>select {
+ position: absolute !important;
+ bottom: 0;
+ left: 50%;
+ display: block !important;
+ width: .5px !important;
+ height: 100% !important;
+ padding: 0 !important;
+ opacity: 0 !important;
+ border: none;
+ z-index: 0 !important
+}
+
+.bootstrap-select>select.mobile-device {
+ top: 0;
+ left: 0;
+ display: block !important;
+ width: 100% !important;
+ z-index: 2 !important
+}
+
+.bootstrap-select.is-invalid .dropdown-toggle,
+.error .bootstrap-select .dropdown-toggle,
+.has-error .bootstrap-select .dropdown-toggle,
+.was-validated .bootstrap-select select:invalid+.dropdown-toggle {
+ border-color: #b94a48
+}
+
+.bootstrap-select.is-valid .dropdown-toggle,
+.was-validated .bootstrap-select select:valid+.dropdown-toggle {
+ border-color: #28a745
+}
+
+.bootstrap-select.fit-width {
+ width: auto !important
+}
+
+.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn) {
+ width: 220px
+}
+
+.bootstrap-select .dropdown-toggle:focus,
+.bootstrap-select>select.mobile-device:focus+.dropdown-toggle {
+ outline: thin dotted #333 !important;
+ outline: 5px auto -webkit-focus-ring-color !important;
+ outline-offset: -2px
+}
+
+.bootstrap-select.form-control {
+ margin-bottom: 0;
+ padding: 0;
+ border: none;
+ height: auto
+}
+
+:not(.input-group)>.bootstrap-select.form-control:not([class*=col-]) {
+ width: 100%
+}
+
+.bootstrap-select.form-control.input-group-btn {
+ float: none;
+ z-index: auto
+}
+
+.form-inline .bootstrap-select,
+.form-inline .bootstrap-select.form-control:not([class*=col-]) {
+ width: auto
+}
+
+.bootstrap-select:not(.input-group-btn),
+.bootstrap-select[class*=col-] {
+ float: none;
+ display: inline-block;
+ margin-left: 0
+}
+
+.bootstrap-select.dropdown-menu-right,
+.bootstrap-select[class*=col-].dropdown-menu-right,
+.row .bootstrap-select[class*=col-].dropdown-menu-right {
+ float: right
+}
+
+.form-group .bootstrap-select,
+.form-horizontal .bootstrap-select,
+.form-inline .bootstrap-select {
+ margin-bottom: 0
+}
+
+.form-group-lg .bootstrap-select.form-control,
+.form-group-sm .bootstrap-select.form-control {
+ padding: 0
+}
+
+.form-group-lg .bootstrap-select.form-control .dropdown-toggle,
+.form-group-sm .bootstrap-select.form-control .dropdown-toggle {
+ height: 100%;
+ font-size: inherit;
+ line-height: inherit;
+ border-radius: inherit
+}
+
+.bootstrap-select.form-control-lg .dropdown-toggle,
+.bootstrap-select.form-control-sm .dropdown-toggle {
+ font-size: inherit;
+ line-height: inherit;
+ border-radius: inherit
+}
+
+.bootstrap-select.form-control-sm .dropdown-toggle {
+ padding: .25rem .5rem
+}
+
+.bootstrap-select.form-control-lg .dropdown-toggle {
+ padding: .5rem 1rem
+}
+
+.form-inline .bootstrap-select .form-control {
+ width: 100%
+}
+
+.bootstrap-select.disabled,
+.bootstrap-select>.disabled {
+ cursor: not-allowed
+}
+
+.bootstrap-select.disabled:focus,
+.bootstrap-select>.disabled:focus {
+ outline: 0 !important
+}
+
+.bootstrap-select.bs-container {
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 0 !important;
+ padding: 0 !important
+}
+
+.bootstrap-select.bs-container .dropdown-menu {
+ z-index: 1060
+}
+
+.bootstrap-select .dropdown-toggle .filter-option {
+ position: static;
+ top: 0;
+ left: 0;
+ float: left;
+ height: 100%;
+ width: 100%;
+ text-align: left;
+ overflow: hidden;
+ -webkit-box-flex: 0;
+ -webkit-flex: 0 1 auto;
+ -ms-flex: 0 1 auto;
+ flex: 0 1 auto
+}
+
+.bs3.bootstrap-select .dropdown-toggle .filter-option {
+ padding-right: inherit
+}
+
+.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option {
+ position: absolute;
+ padding-top: inherit;
+ padding-bottom: inherit;
+ padding-left: inherit;
+ float: none
+}
+
+.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option .filter-option-inner {
+ padding-right: inherit
+}
+
+.bootstrap-select .dropdown-toggle .filter-option-inner-inner {
+ overflow: hidden
+}
+
+.bootstrap-select .dropdown-toggle .filter-expand {
+ width: 0 !important;
+ float: left;
+ opacity: 0 !important;
+ overflow: hidden
+}
+
+.bootstrap-select .dropdown-toggle .caret {
+ position: absolute;
+ top: 50%;
+ right: 12px;
+ margin-top: -2px;
+ vertical-align: middle
+}
+
+.input-group .bootstrap-select.form-control .dropdown-toggle {
+ border-radius: inherit
+}
+
+.bootstrap-select[class*=col-] .dropdown-toggle {
+ width: 100%
+}
+
+.bootstrap-select .dropdown-menu {
+ min-width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bootstrap-select .dropdown-menu>.inner:focus {
+ outline: 0 !important
+}
+
+.bootstrap-select .dropdown-menu.inner {
+ position: static;
+ float: none;
+ border: 0;
+ padding: 0;
+ margin: 0;
+ border-radius: 0;
+ -webkit-box-shadow: none;
+ box-shadow: none
+}
+
+.bootstrap-select .dropdown-menu li {
+ position: relative
+}
+
+.bootstrap-select .dropdown-menu li.active small {
+ color: rgba(255, 255, 255, .5) !important
+}
+
+.bootstrap-select .dropdown-menu li.disabled a {
+ cursor: not-allowed
+}
+
+.bootstrap-select .dropdown-menu li a {
+ cursor: pointer;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none
+}
+
+.bootstrap-select .dropdown-menu li a.opt {
+ position: relative;
+ padding-left: 2.25em
+}
+
+.bootstrap-select .dropdown-menu li a span.check-mark {
+ display: none
+}
+
+.bootstrap-select .dropdown-menu li a span.text {
+ display: inline-block
+}
+
+.bootstrap-select .dropdown-menu li small {
+ padding-left: .5em
+}
+
+.bootstrap-select .dropdown-menu .notify {
+ position: absolute;
+ bottom: 5px;
+ width: 96%;
+ margin: 0 2%;
+ min-height: 26px;
+ padding: 3px 5px;
+ background: #f5f5f5;
+ border: 1px solid #e3e3e3;
+ -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
+ box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
+ pointer-events: none;
+ opacity: .9;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bootstrap-select .dropdown-menu .notify.fadeOut {
+ -webkit-animation: .3s linear 750ms forwards bs-notify-fadeOut;
+ -o-animation: .3s linear 750ms forwards bs-notify-fadeOut;
+ animation: .3s linear 750ms forwards bs-notify-fadeOut
+}
+
+.bootstrap-select .no-results {
+ padding: 3px;
+ background: #f5f5f5;
+ margin: 0 5px;
+ white-space: nowrap
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .filter-option {
+ position: static;
+ display: inline;
+ padding: 0
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner,
+.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner-inner {
+ display: inline
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .bs-caret:before {
+ content: '\00a0'
+}
+
+.bootstrap-select.fit-width .dropdown-toggle .caret {
+ position: static;
+ top: auto;
+ margin-top: -1px
+}
+
+.bootstrap-select.show-tick .dropdown-menu .selected span.check-mark {
+ position: absolute;
+ display: inline-block;
+ right: 15px;
+ top: 5px
+}
+
+.bootstrap-select.show-tick .dropdown-menu li a span.text {
+ margin-right: 34px
+}
+
+.bootstrap-select .bs-ok-default:after {
+ content: '';
+ display: block;
+ width: .5em;
+ height: 1em;
+ border-style: solid;
+ border-width: 0 .26em .26em 0;
+ -webkit-transform-style: preserve-3d;
+ transform-style: preserve-3d;
+ -webkit-transform: rotate(45deg);
+ -ms-transform: rotate(45deg);
+ -o-transform: rotate(45deg);
+ transform: rotate(45deg)
+}
+
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle {
+ z-index: 1061
+}
+
+.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:before {
+ content: '';
+ border-left: 7px solid transparent;
+ border-right: 7px solid transparent;
+ border-bottom: 7px solid rgba(204, 204, 204, .2);
+ position: absolute;
+ bottom: -4px;
+ left: 9px;
+ display: none
+}
+
+.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:after {
+ content: '';
+ border-left: 6px solid transparent;
+ border-right: 6px solid transparent;
+ border-bottom: 6px solid #fff;
+ position: absolute;
+ bottom: -4px;
+ left: 10px;
+ display: none
+}
+
+.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:before {
+ bottom: auto;
+ top: -4px;
+ border-top: 7px solid rgba(204, 204, 204, .2);
+ border-bottom: 0
+}
+
+.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:after {
+ bottom: auto;
+ top: -4px;
+ border-top: 6px solid #fff;
+ border-bottom: 0
+}
+
+.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:before {
+ right: 12px;
+ left: auto
+}
+
+.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:after {
+ right: 13px;
+ left: auto
+}
+
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:after,
+.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:before,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:after,
+.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:before {
+ display: block
+}
+
+.bs-actionsbox,
+.bs-donebutton,
+.bs-searchbox {
+ padding: 4px 8px
+}
+
+.bs-actionsbox {
+ width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bs-actionsbox .btn-group button {
+ width: 50%
+}
+
+.bs-donebutton {
+ float: left;
+ width: 100%;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box
+}
+
+.bs-donebutton .btn-group button {
+ width: 100%
+}
+
+.bs-searchbox+.bs-actionsbox {
+ padding: 0 8px 4px
+}
+
+.bs-searchbox .form-control {
+ margin-bottom: 0;
+ width: 100%;
+ float: none
+}
\ No newline at end of file
diff --git a/app/frontend/static/assets/images/serverjars/FULL-WHITE.svg b/app/frontend/static/assets/images/serverjars/FULL-WHITE.svg
deleted file mode 100644
index d5036723..00000000
--- a/app/frontend/static/assets/images/serverjars/FULL-WHITE.svg
+++ /dev/null
@@ -1,120 +0,0 @@
-
-
-
diff --git a/app/frontend/static/assets/images/serverjars/ICON.svg b/app/frontend/static/assets/images/serverjars/ICON.svg
deleted file mode 100644
index 2adc4cff..00000000
--- a/app/frontend/static/assets/images/serverjars/ICON.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js b/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
new file mode 100644
index 00000000..85e9683d
--- /dev/null
+++ b/app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
@@ -0,0 +1,9 @@
+/*!
+ * Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
+ *
+ * Copyright 2012-2020 SnapAppointments, LLC
+ * Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
+ */
+
+!function (e, t) { void 0 === e && void 0 !== window && (e = window), "function" == typeof define && define.amd ? define(["jquery"], function (e) { return t(e) }) : "object" == typeof module && module.exports ? module.exports = t(require("jquery")) : t(e.jQuery) }(this, function (e) { !function (P) { "use strict"; var d = ["sanitize", "whiteList", "sanitizeFn"], r = ["background", "cite", "href", "itemtype", "longdesc", "poster", "src", "xlink:href"], e = { "*": ["class", "dir", "id", "lang", "role", "tabindex", "style", /^aria-[\w-]*$/i], a: ["target", "href", "title", "rel"], area: [], b: [], br: [], col: [], code: [], div: [], em: [], hr: [], h1: [], h2: [], h3: [], h4: [], h5: [], h6: [], i: [], img: ["src", "alt", "title", "width", "height"], li: [], ol: [], p: [], pre: [], s: [], small: [], span: [], sub: [], sup: [], strong: [], u: [], ul: [] }, l = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi, a = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i; function v(e, t) { var i = e.nodeName.toLowerCase(); if (-1 !== P.inArray(i, t)) return -1 === P.inArray(i, r) || Boolean(e.nodeValue.match(l) || e.nodeValue.match(a)); for (var s = P(t).filter(function (e, t) { return t instanceof RegExp }), n = 0, o = s.length; n < o; n++)if (i.match(s[n])) return !0; return !1 } function W(e, t, i) { if (i && "function" == typeof i) return i(e); for (var s = Object.keys(t), n = 0, o = e.length; n < o; n++)for (var r = e[n].querySelectorAll("*"), l = 0, a = r.length; l < a; l++) { var c = r[l], d = c.nodeName.toLowerCase(); if (-1 !== s.indexOf(d)) for (var h = [].slice.call(c.attributes), p = [].concat(t["*"] || [], t[d] || []), u = 0, f = h.length; u < f; u++) { var m = h[u]; v(m, p) || c.removeAttribute(m.nodeName) } else c.parentNode.removeChild(c) } } "classList" in document.createElement("_") || function (e) { if ("Element" in e) { var t = "classList", i = "prototype", s = e.Element[i], n = Object, o = function () { var i = P(this); return { add: function (e) { return e = Array.prototype.slice.call(arguments).join(" "), i.addClass(e) }, remove: function (e) { return e = Array.prototype.slice.call(arguments).join(" "), i.removeClass(e) }, toggle: function (e, t) { return i.toggleClass(e, t) }, contains: function (e) { return i.hasClass(e) } } }; if (n.defineProperty) { var r = { get: o, enumerable: !0, configurable: !0 }; try { n.defineProperty(s, t, r) } catch (e) { void 0 !== e.number && -2146823252 !== e.number || (r.enumerable = !1, n.defineProperty(s, t, r)) } } else n[i].__defineGetter__ && s.__defineGetter__(t, o) } }(window); var t, c, i = document.createElement("_"); if (i.classList.add("c1", "c2"), !i.classList.contains("c2")) { var s = DOMTokenList.prototype.add, n = DOMTokenList.prototype.remove; DOMTokenList.prototype.add = function () { Array.prototype.forEach.call(arguments, s.bind(this)) }, DOMTokenList.prototype.remove = function () { Array.prototype.forEach.call(arguments, n.bind(this)) } } if (i.classList.toggle("c3", !1), i.classList.contains("c3")) { var o = DOMTokenList.prototype.toggle; DOMTokenList.prototype.toggle = function (e, t) { return 1 in arguments && !this.contains(e) == !t ? t : o.call(this, e) } } function h(e) { if (null == this) throw new TypeError; var t = String(this); if (e && "[object RegExp]" == c.call(e)) throw new TypeError; var i = t.length, s = String(e), n = s.length, o = 1 < arguments.length ? arguments[1] : void 0, r = o ? Number(o) : 0; r != r && (r = 0); var l = Math.min(Math.max(r, 0), i); if (i < n + l) return !1; for (var a = -1; ++a < n;)if (t.charCodeAt(l + a) != s.charCodeAt(a)) return !1; return !0 } function O(e, t) { var i, s = e.selectedOptions, n = []; if (t) { for (var o = 0, r = s.length; o < r; o++)(i = s[o]).disabled || "OPTGROUP" === i.parentNode.tagName && i.parentNode.disabled || n.push(i); return n } return s } function z(e, t) { for (var i, s = [], n = t || e.selectedOptions, o = 0, r = n.length; o < r; o++)(i = n[o]).disabled || "OPTGROUP" === i.parentNode.tagName && i.parentNode.disabled || s.push(i.value); return e.multiple ? s : s.length ? s[0] : null } i = null, String.prototype.startsWith || (t = function () { try { var e = {}, t = Object.defineProperty, i = t(e, e, e) && t } catch (e) { } return i }(), c = {}.toString, t ? t(String.prototype, "startsWith", { value: h, configurable: !0, writable: !0 }) : String.prototype.startsWith = h), Object.keys || (Object.keys = function (e, t, i) { for (t in i = [], e) i.hasOwnProperty.call(e, t) && i.push(t); return i }), HTMLSelectElement && !HTMLSelectElement.prototype.hasOwnProperty("selectedOptions") && Object.defineProperty(HTMLSelectElement.prototype, "selectedOptions", { get: function () { return this.querySelectorAll(":checked") } }); var p = { useDefault: !1, _set: P.valHooks.select.set }; P.valHooks.select.set = function (e, t) { return t && !p.useDefault && P(e).data("selected", !0), p._set.apply(this, arguments) }; var T = null, u = function () { try { return new Event("change"), !0 } catch (e) { return !1 } }(); function k(e, t, i, s) { for (var n = ["display", "subtext", "tokens"], o = !1, r = 0; r < n.length; r++) { var l = n[r], a = e[l]; if (a && (a = a.toString(), "display" === l && (a = a.replace(/<[^>]+>/g, "")), s && (a = w(a)), a = a.toUpperCase(), o = "contains" === i ? 0 <= a.indexOf(t) : a.startsWith(t))) break } return o } function N(e) { return parseInt(e, 10) || 0 } P.fn.triggerNative = function (e) { var t, i = this[0]; i.dispatchEvent ? (u ? t = new Event(e, { bubbles: !0 }) : (t = document.createEvent("Event")).initEvent(e, !0, !1), i.dispatchEvent(t)) : i.fireEvent ? ((t = document.createEventObject()).eventType = e, i.fireEvent("on" + e, t)) : this.trigger(e) }; var f = { "\xc0": "A", "\xc1": "A", "\xc2": "A", "\xc3": "A", "\xc4": "A", "\xc5": "A", "\xe0": "a", "\xe1": "a", "\xe2": "a", "\xe3": "a", "\xe4": "a", "\xe5": "a", "\xc7": "C", "\xe7": "c", "\xd0": "D", "\xf0": "d", "\xc8": "E", "\xc9": "E", "\xca": "E", "\xcb": "E", "\xe8": "e", "\xe9": "e", "\xea": "e", "\xeb": "e", "\xcc": "I", "\xcd": "I", "\xce": "I", "\xcf": "I", "\xec": "i", "\xed": "i", "\xee": "i", "\xef": "i", "\xd1": "N", "\xf1": "n", "\xd2": "O", "\xd3": "O", "\xd4": "O", "\xd5": "O", "\xd6": "O", "\xd8": "O", "\xf2": "o", "\xf3": "o", "\xf4": "o", "\xf5": "o", "\xf6": "o", "\xf8": "o", "\xd9": "U", "\xda": "U", "\xdb": "U", "\xdc": "U", "\xf9": "u", "\xfa": "u", "\xfb": "u", "\xfc": "u", "\xdd": "Y", "\xfd": "y", "\xff": "y", "\xc6": "Ae", "\xe6": "ae", "\xde": "Th", "\xfe": "th", "\xdf": "ss", "\u0100": "A", "\u0102": "A", "\u0104": "A", "\u0101": "a", "\u0103": "a", "\u0105": "a", "\u0106": "C", "\u0108": "C", "\u010a": "C", "\u010c": "C", "\u0107": "c", "\u0109": "c", "\u010b": "c", "\u010d": "c", "\u010e": "D", "\u0110": "D", "\u010f": "d", "\u0111": "d", "\u0112": "E", "\u0114": "E", "\u0116": "E", "\u0118": "E", "\u011a": "E", "\u0113": "e", "\u0115": "e", "\u0117": "e", "\u0119": "e", "\u011b": "e", "\u011c": "G", "\u011e": "G", "\u0120": "G", "\u0122": "G", "\u011d": "g", "\u011f": "g", "\u0121": "g", "\u0123": "g", "\u0124": "H", "\u0126": "H", "\u0125": "h", "\u0127": "h", "\u0128": "I", "\u012a": "I", "\u012c": "I", "\u012e": "I", "\u0130": "I", "\u0129": "i", "\u012b": "i", "\u012d": "i", "\u012f": "i", "\u0131": "i", "\u0134": "J", "\u0135": "j", "\u0136": "K", "\u0137": "k", "\u0138": "k", "\u0139": "L", "\u013b": "L", "\u013d": "L", "\u013f": "L", "\u0141": "L", "\u013a": "l", "\u013c": "l", "\u013e": "l", "\u0140": "l", "\u0142": "l", "\u0143": "N", "\u0145": "N", "\u0147": "N", "\u014a": "N", "\u0144": "n", "\u0146": "n", "\u0148": "n", "\u014b": "n", "\u014c": "O", "\u014e": "O", "\u0150": "O", "\u014d": "o", "\u014f": "o", "\u0151": "o", "\u0154": "R", "\u0156": "R", "\u0158": "R", "\u0155": "r", "\u0157": "r", "\u0159": "r", "\u015a": "S", "\u015c": "S", "\u015e": "S", "\u0160": "S", "\u015b": "s", "\u015d": "s", "\u015f": "s", "\u0161": "s", "\u0162": "T", "\u0164": "T", "\u0166": "T", "\u0163": "t", "\u0165": "t", "\u0167": "t", "\u0168": "U", "\u016a": "U", "\u016c": "U", "\u016e": "U", "\u0170": "U", "\u0172": "U", "\u0169": "u", "\u016b": "u", "\u016d": "u", "\u016f": "u", "\u0171": "u", "\u0173": "u", "\u0174": "W", "\u0175": "w", "\u0176": "Y", "\u0177": "y", "\u0178": "Y", "\u0179": "Z", "\u017b": "Z", "\u017d": "Z", "\u017a": "z", "\u017c": "z", "\u017e": "z", "\u0132": "IJ", "\u0133": "ij", "\u0152": "Oe", "\u0153": "oe", "\u0149": "'n", "\u017f": "s" }, m = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g, g = RegExp("[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff\\u1ab0-\\u1aff\\u1dc0-\\u1dff]", "g"); function b(e) { return f[e] } function w(e) { return (e = e.toString()) && e.replace(m, b).replace(g, "") } var I, x, y, $, S = (I = { "&": "&", "<": "<", ">": ">", '"': """, "'": "'", "`": "`" }, x = "(?:" + Object.keys(I).join("|") + ")", y = RegExp(x), $ = RegExp(x, "g"), function (e) { return e = null == e ? "" : "" + e, y.test(e) ? e.replace($, E) : e }); function E(e) { return I[e] } var C = { 32: " ", 48: "0", 49: "1", 50: "2", 51: "3", 52: "4", 53: "5", 54: "6", 55: "7", 56: "8", 57: "9", 59: ";", 65: "A", 66: "B", 67: "C", 68: "D", 69: "E", 70: "F", 71: "G", 72: "H", 73: "I", 74: "J", 75: "K", 76: "L", 77: "M", 78: "N", 79: "O", 80: "P", 81: "Q", 82: "R", 83: "S", 84: "T", 85: "U", 86: "V", 87: "W", 88: "X", 89: "Y", 90: "Z", 96: "0", 97: "1", 98: "2", 99: "3", 100: "4", 101: "5", 102: "6", 103: "7", 104: "8", 105: "9" }, A = 27, L = 13, D = 32, H = 9, B = 38, R = 40, M = { success: !1, major: "3" }; try { M.full = (P.fn.dropdown.Constructor.VERSION || "").split(" ")[0].split("."), M.major = M.full[0], M.success = !0 } catch (e) { } var U = 0, j = ".bs.select", V = { DISABLED: "disabled", DIVIDER: "divider", SHOW: "open", DROPUP: "dropup", MENU: "dropdown-menu", MENURIGHT: "dropdown-menu-right", MENULEFT: "dropdown-menu-left", BUTTONCLASS: "btn-default", POPOVERHEADER: "popover-title", ICONBASE: "glyphicon", TICKICON: "glyphicon-ok" }, F = { MENU: "." + V.MENU }, _ = { div: document.createElement("div"), span: document.createElement("span"), i: document.createElement("i"), subtext: document.createElement("small"), a: document.createElement("a"), li: document.createElement("li"), whitespace: document.createTextNode("\xa0"), fragment: document.createDocumentFragment() }; _.noResults = _.li.cloneNode(!1), _.noResults.className = "no-results", _.a.setAttribute("role", "option"), _.a.className = "dropdown-item", _.subtext.className = "text-muted", _.text = _.span.cloneNode(!1), _.text.className = "text", _.checkMark = _.span.cloneNode(!1); var G = new RegExp(B + "|" + R), q = new RegExp("^" + H + "$|" + A), K = { li: function (e, t, i) { var s = _.li.cloneNode(!1); return e && (1 === e.nodeType || 11 === e.nodeType ? s.appendChild(e) : s.innerHTML = e), void 0 !== t && "" !== t && (s.className = t), null != i && s.classList.add("optgroup-" + i), s }, a: function (e, t, i) { var s = _.a.cloneNode(!0); return e && (11 === e.nodeType ? s.appendChild(e) : s.insertAdjacentHTML("beforeend", e)), void 0 !== t && "" !== t && s.classList.add.apply(s.classList, t.split(/\s+/)), i && s.setAttribute("style", i), s }, text: function (e, t) { var i, s, n = _.text.cloneNode(!1); if (e.content) n.innerHTML = e.content; else { if (n.textContent = e.text, e.icon) { var o = _.whitespace.cloneNode(!1); (s = (!0 === t ? _.i : _.span).cloneNode(!1)).className = this.options.iconBase + " " + e.icon, _.fragment.appendChild(s), _.fragment.appendChild(o) } e.subtext && ((i = _.subtext.cloneNode(!1)).textContent = e.subtext, n.appendChild(i)) } if (!0 === t) for (; 0 < n.childNodes.length;)_.fragment.appendChild(n.childNodes[0]); else _.fragment.appendChild(n); return _.fragment }, label: function (e) { var t, i, s = _.text.cloneNode(!1); if (s.innerHTML = e.display, e.icon) { var n = _.whitespace.cloneNode(!1); (i = _.span.cloneNode(!1)).className = this.options.iconBase + " " + e.icon, _.fragment.appendChild(i), _.fragment.appendChild(n) } return e.subtext && ((t = _.subtext.cloneNode(!1)).textContent = e.subtext, s.appendChild(t)), _.fragment.appendChild(s), _.fragment } }; var Y = function (e, t) { var i = this; p.useDefault || (P.valHooks.select.set = p._set, p.useDefault = !0), this.$element = P(e), this.$newElement = null, this.$button = null, this.$menu = null, this.options = t, this.selectpicker = { main: {}, search: {}, current: {}, view: {}, isSearching: !1, keydown: { keyHistory: "", resetKeyHistory: { start: function () { return setTimeout(function () { i.selectpicker.keydown.keyHistory = "" }, 800) } } } }, this.sizeInfo = {}, null === this.options.title && (this.options.title = this.$element.attr("title")); var s = this.options.windowPadding; "number" == typeof s && (this.options.windowPadding = [s, s, s, s]), this.val = Y.prototype.val, this.render = Y.prototype.render, this.refresh = Y.prototype.refresh, this.setStyle = Y.prototype.setStyle, this.selectAll = Y.prototype.selectAll, this.deselectAll = Y.prototype.deselectAll, this.destroy = Y.prototype.destroy, this.remove = Y.prototype.remove, this.show = Y.prototype.show, this.hide = Y.prototype.hide, this.init() }; function Z(e) { var l, a = arguments, c = e; if ([].shift.apply(a), !M.success) { try { M.full = (P.fn.dropdown.Constructor.VERSION || "").split(" ")[0].split(".") } catch (e) { Y.BootstrapVersion ? M.full = Y.BootstrapVersion.split(" ")[0].split(".") : (M.full = [M.major, "0", "0"], console.warn("There was an issue retrieving Bootstrap's version. Ensure Bootstrap is being loaded before bootstrap-select and there is no namespace collision. If loading Bootstrap asynchronously, the version may need to be manually specified via $.fn.selectpicker.Constructor.BootstrapVersion.", e)) } M.major = M.full[0], M.success = !0 } if ("4" === M.major) { var t = []; Y.DEFAULTS.style === V.BUTTONCLASS && t.push({ name: "style", className: "BUTTONCLASS" }), Y.DEFAULTS.iconBase === V.ICONBASE && t.push({ name: "iconBase", className: "ICONBASE" }), Y.DEFAULTS.tickIcon === V.TICKICON && t.push({ name: "tickIcon", className: "TICKICON" }), V.DIVIDER = "dropdown-divider", V.SHOW = "show", V.BUTTONCLASS = "btn-light", V.POPOVERHEADER = "popover-header", V.ICONBASE = "", V.TICKICON = "bs-ok-default"; for (var i = 0; i < t.length; i++) { e = t[i]; Y.DEFAULTS[e.name] = V[e.className] } } var s = this.each(function () { var e = P(this); if (e.is("select")) { var t = e.data("selectpicker"), i = "object" == typeof c && c; if (t) { if (i) for (var s in i) Object.prototype.hasOwnProperty.call(i, s) && (t.options[s] = i[s]) } else { var n = e.data(); for (var o in n) Object.prototype.hasOwnProperty.call(n, o) && -1 !== P.inArray(o, d) && delete n[o]; var r = P.extend({}, Y.DEFAULTS, P.fn.selectpicker.defaults || {}, n, i); r.template = P.extend({}, Y.DEFAULTS.template, P.fn.selectpicker.defaults ? P.fn.selectpicker.defaults.template : {}, n.template, i.template), e.data("selectpicker", t = new Y(this, r)) } "string" == typeof c && (l = t[c] instanceof Function ? t[c].apply(t, a) : t.options[c]) } }); return void 0 !== l ? l : s } Y.VERSION = "1.13.18", Y.DEFAULTS = { noneSelectedText: "Nothing selected", noneResultsText: "No results matched {0}", countSelectedText: function (e, t) { return 1 == e ? "{0} item selected" : "{0} items selected" }, maxOptionsText: function (e, t) { return [1 == e ? "Limit reached ({n} item max)" : "Limit reached ({n} items max)", 1 == t ? "Group limit reached ({n} item max)" : "Group limit reached ({n} items max)"] }, selectAllText: "Select All", deselectAllText: "Deselect All", doneButton: !1, doneButtonText: "Close", multipleSeparator: ", ", styleBase: "btn", style: V.BUTTONCLASS, size: "auto", title: null, selectedTextFormat: "values", width: !1, container: !1, hideDisabled: !1, showSubtext: !1, showIcon: !0, showContent: !0, dropupAuto: !0, header: !1, liveSearch: !1, liveSearchPlaceholder: null, liveSearchNormalize: !1, liveSearchStyle: "contains", actionsBox: !1, iconBase: V.ICONBASE, tickIcon: V.TICKICON, showTick: !1, template: { caret: '' }, maxOptions: !1, mobile: !1, selectOnTab: !1, dropdownAlignRight: !1, windowPadding: 0, virtualScroll: 600, display: !1, sanitize: !0, sanitizeFn: null, whiteList: e }, Y.prototype = { constructor: Y, init: function () { var i = this, e = this.$element.attr("id"), t = this.$element[0], s = t.form; U++, this.selectId = "bs-select-" + U, t.classList.add("bs-select-hidden"), this.multiple = this.$element.prop("multiple"), this.autofocus = this.$element.prop("autofocus"), t.classList.contains("show-tick") && (this.options.showTick = !0), this.$newElement = this.createDropdown(), this.buildData(), this.$element.after(this.$newElement).prependTo(this.$newElement), s && null === t.form && (s.id || (s.id = "form-" + this.selectId), t.setAttribute("form", s.id)), this.$button = this.$newElement.children("button"), this.$menu = this.$newElement.children(F.MENU), this.$menuInner = this.$menu.children(".inner"), this.$searchbox = this.$menu.find("input"), t.classList.remove("bs-select-hidden"), !0 === this.options.dropdownAlignRight && this.$menu[0].classList.add(V.MENURIGHT), void 0 !== e && this.$button.attr("data-id", e), this.checkDisabled(), this.clickListener(), this.options.liveSearch ? (this.liveSearchListener(), this.focusedParent = this.$searchbox[0]) : this.focusedParent = this.$menuInner[0], this.setStyle(), this.render(), this.setWidth(), this.options.container ? this.selectPosition() : this.$element.on("hide" + j, function () { if (i.isVirtual()) { var e = i.$menuInner[0], t = e.firstChild.cloneNode(!1); e.replaceChild(t, e.firstChild), e.scrollTop = 0 } }), this.$menu.data("this", this), this.$newElement.data("this", this), this.options.mobile && this.mobile(), this.$newElement.on({ "hide.bs.dropdown": function (e) { i.$element.trigger("hide" + j, e) }, "hidden.bs.dropdown": function (e) { i.$element.trigger("hidden" + j, e) }, "show.bs.dropdown": function (e) { i.$element.trigger("show" + j, e) }, "shown.bs.dropdown": function (e) { i.$element.trigger("shown" + j, e) } }), t.hasAttribute("required") && this.$element.on("invalid" + j, function () { i.$button[0].classList.add("bs-invalid"), i.$element.on("shown" + j + ".invalid", function () { i.$element.val(i.$element.val()).off("shown" + j + ".invalid") }).on("rendered" + j, function () { this.validity.valid && i.$button[0].classList.remove("bs-invalid"), i.$element.off("rendered" + j) }), i.$button.on("blur" + j, function () { i.$element.trigger("focus").trigger("blur"), i.$button.off("blur" + j) }) }), setTimeout(function () { i.buildList(), i.$element.trigger("loaded" + j) }) }, createDropdown: function () { var e = this.multiple || this.options.showTick ? " show-tick" : "", t = this.multiple ? ' aria-multiselectable="true"' : "", i = "", s = this.autofocus ? " autofocus" : ""; M.major < 4 && this.$element.parent().hasClass("input-group") && (i = " input-group-btn"); var n, o = "", r = "", l = "", a = ""; return this.options.header && (o = '
Backing up {{data['server_stats']['world_size']}}
- {% end %} - -' + @@ -858,54 +765,56 @@ }); let nFiles = files.files.length; - for (i = 0; i < nFiles; i++) { - if (!doUpload) { - doUpload = true; - hideUploadBox(); - break; - } + const uploadPromises = []; + for (let i = 0; i < nFiles; i++) { + const file = files.files[i]; const progressHtml = ` -