mirror of
https://gitlab.com/crafty-controller/crafty-4.git
synced 2024-08-30 18:23:09 +00:00
Merge branch 'dev' into 'feature/steamcmd'
# Conflicts: # Dockerfile # app/classes/models/servers.py # app/classes/shared/helpers.py # app/classes/shared/main_controller.py # app/classes/shared/server.py # main.py
This commit is contained in:
commit
3dc1d90cf4
@ -3,16 +3,16 @@
|
|||||||
- **Install Type:** Git Cloned(Manual) / Installer / WinPackage / Docker
|
- **Install Type:** Git Cloned(Manual) / Installer / WinPackage / Docker
|
||||||
|
|
||||||
## What Happened?
|
## What Happened?
|
||||||
*A brief description of what happened when you tried to perform an action*
|
<!-- A brief description of what happened when you tried to perform an action -->
|
||||||
|
|
||||||
## Expected result
|
## Expected result
|
||||||
*What should have happened when you performed the actions*
|
<!-- What should have happened when you performed the actions -->
|
||||||
|
|
||||||
## Steps to reproduce
|
## Steps to reproduce
|
||||||
*List the steps required to produce the error. These should be as few as possible*
|
<!-- List the steps required to produce the error. These should be as few as possible -->
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
Any relevant screenshots which show the issue* !-->*
|
<!-- Any relevant screenshots which show the issue -->
|
||||||
|
|
||||||
## Priority/Severity
|
## Priority/Severity
|
||||||
- [ ] High (anything that impacts the normal user flow or blocks app usage)
|
- [ ] High (anything that impacts the normal user flow or blocks app usage)
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
## Summary
|
## Summary
|
||||||
*Outline the issue being faced, and why this needs to change*
|
<!-- Outline the issue being faced, and why this needs to change -->
|
||||||
|
|
||||||
## Area of the system
|
## Area of the system
|
||||||
*This might only be one part, but may involve multiple sections, Login/Dashboad/Terminal/Config*
|
<!-- This might only be one part, but may involve multiple sections, Login/Dashboad/Terminal/Config -->
|
||||||
|
|
||||||
## How does this currently work?
|
## How does this currently work?
|
||||||
|
<!-- A brief description of how the functionality currently operates -->
|
||||||
|
|
||||||
## What is the desired way of working?
|
## What is the desired way of working?
|
||||||
*After the change, what should the process/operation be?*
|
<!-- After the change, what should the process/operation be? -->
|
||||||
|
|
||||||
## Priority/Severity
|
## Priority/Severity
|
||||||
- [ ] High (This will bring a huge increase in performance/productivity/usability)
|
- [ ] High (This will bring a huge increase in performance/productivity/usability)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
## Problem Statement
|
## Problem Statement
|
||||||
*What is the issue being faced and needs addressing?*
|
<!-- What is the issue being faced and needs addressing? -->
|
||||||
|
|
||||||
## Who will benefit?
|
## Who will benefit?
|
||||||
*Will this fix a problem that only one user has, or will it benefit a lot of people*
|
<!-- Will this fix a problem that only one user has, or will it benefit a lot of people -->
|
||||||
|
|
||||||
## Benefits and risks
|
## Benefits and risks
|
||||||
What benefits does this bring?
|
What benefits does this bring?
|
||||||
@ -16,10 +16,10 @@
|
|||||||
|
|
||||||
|
|
||||||
## Proposed solution
|
## Proposed solution
|
||||||
*How would you like to see this issue resolved?*
|
<!-- How would you like to see this issue resolved? -->
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
*Are there any examples of this which exist in other software?*
|
<!-- Are there any examples of this which exist in other software? -->
|
||||||
|
|
||||||
## Priority/Severity
|
## Priority/Severity
|
||||||
- [ ] High (This will bring a huge increase in performance/productivity/usability)
|
- [ ] High (This will bring a huge increase in performance/productivity/usability)
|
||||||
|
@ -5,7 +5,7 @@ yamllint:
|
|||||||
stage: lint
|
stage: lint
|
||||||
image: registry.gitlab.com/pipeline-components/yamllint:latest
|
image: registry.gitlab.com/pipeline-components/yamllint:latest
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$CODE_QUALITY_DISABLED"
|
- if: "$CODE_QUALITY_DISABLED"
|
||||||
when: never
|
when: never
|
||||||
@ -18,7 +18,7 @@ jsonlint:
|
|||||||
stage: lint
|
stage: lint
|
||||||
image: registry.gitlab.com/pipeline-components/jsonlint:latest
|
image: registry.gitlab.com/pipeline-components/jsonlint:latest
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$CODE_QUALITY_DISABLED"
|
- if: "$CODE_QUALITY_DISABLED"
|
||||||
when: never
|
when: never
|
||||||
@ -33,7 +33,7 @@ black:
|
|||||||
stage: lint
|
stage: lint
|
||||||
image: registry.gitlab.com/pipeline-components/black:latest
|
image: registry.gitlab.com/pipeline-components/black:latest
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$CODE_QUALITY_DISABLED"
|
- if: "$CODE_QUALITY_DISABLED"
|
||||||
when: never
|
when: never
|
||||||
@ -46,7 +46,7 @@ pylint:
|
|||||||
stage: lint
|
stage: lint
|
||||||
image: registry.gitlab.com/pipeline-components/pylint:latest
|
image: registry.gitlab.com/pipeline-components/pylint:latest
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$CODE_QUALITY_DISABLED"
|
- if: "$CODE_QUALITY_DISABLED"
|
||||||
when: never
|
when: never
|
||||||
@ -69,7 +69,7 @@ sonarcloud-check:
|
|||||||
name: sonarsource/sonar-scanner-cli:latest
|
name: sonarsource/sonar-scanner-cli:latest
|
||||||
entrypoint: [""]
|
entrypoint: [""]
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$SONAR_TOKEN == null"
|
- if: "$SONAR_TOKEN == null"
|
||||||
when: never
|
when: never
|
||||||
@ -91,7 +91,7 @@ lang-check:
|
|||||||
stage: lint
|
stage: lint
|
||||||
image: alpine:latest
|
image: alpine:latest
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-medium-amd64
|
||||||
rules:
|
rules:
|
||||||
- if: "$CODE_QUALITY_DISABLED"
|
- if: "$CODE_QUALITY_DISABLED"
|
||||||
when: never
|
when: never
|
||||||
|
@ -1,22 +1,22 @@
|
|||||||
## What does this MR do and why?
|
## What does this MR do and why?
|
||||||
|
|
||||||
___Describe in detail what your merge request does and why.___<br>
|
<!-- Describe in detail what your merge request does and why. -->
|
||||||
> *Please keep this description updated with any discussion that takes place so*<br>
|
<!-- Please keep this description updated with any discussion that takes place so -->
|
||||||
*that reviewers can understand your intent. Keeping the description updated is*<br>
|
<!-- that reviewers can understand your intent. Keeping the description updated is -->
|
||||||
*especially important if they didn't participate in the discussion.*<br>
|
<!-- especially important if they didn't participate in the discussion. -->
|
||||||
|
|
||||||
|
|
||||||
## Screenshots or screen recordings
|
## Screenshots or screen recordings
|
||||||
|
|
||||||
___These are strongly recommended to assist reviewers and reduce the time to merge your change.___<br>
|
<!-- These are strongly recommended to assist reviewers and reduce the time to merge your change. -->
|
||||||
> *Please include any relevant screenshots or screen recordings that will assist*<br>
|
<!-- Please include any relevant screenshots or screen recordings that will assist, -->
|
||||||
*reviewers and future readers. If you need help visually verifying the change,*<br>
|
<!-- reviewers and future readers. If you need help visually verifying the change, -->
|
||||||
*please leave a comment and ping a GitLab reviewer, maintainer, or MR coach.*<br>
|
<!-- please leave a comment and ping a GitLab reviewer, maintainer, or MR coach. -->
|
||||||
|
|
||||||
|
|
||||||
## How to set up and validate locally
|
## How to set up and validate locally
|
||||||
|
|
||||||
___Numbered steps to set up and validate the change are strongly suggested.___
|
<!-- Numbered steps to set up and validate the change are strongly suggested. -->
|
||||||
|
|
||||||
|
|
||||||
## MR acceptance checklist
|
## MR acceptance checklist
|
||||||
|
@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}"
|
|||||||
|
|
||||||
# Iterate over each .json file in the directory
|
# Iterate over each .json file in the directory
|
||||||
for file in "${DIR}"/*.json; do
|
for file in "${DIR}"/*.json; do
|
||||||
# Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name
|
# Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name
|
||||||
if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then
|
if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then
|
||||||
|
|
||||||
# Get keys and subkeys from the current file
|
# Get keys and subkeys from the current file
|
||||||
current_keys=$(mktemp)
|
current_keys=$(mktemp)
|
||||||
|
48
.gitlab/scripts/linux_perms_fix.sh
Normal file
48
.gitlab/scripts/linux_perms_fix.sh
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Prompt the user for the directory path
|
||||||
|
read -p "Enter the directory path to set permissions (/var/opt/minecraft/crafty): " directory_path
|
||||||
|
|
||||||
|
# Count the total number of directories
|
||||||
|
total_dirs=$(find "$directory_path" -type d 2>/dev/null | wc -l)
|
||||||
|
|
||||||
|
# Count the total number of files
|
||||||
|
total_files=$(find "$directory_path" -type f 2>/dev/null | wc -l)
|
||||||
|
|
||||||
|
# Initialize a counter for directories and files
|
||||||
|
dir_count=0
|
||||||
|
file_count=0
|
||||||
|
|
||||||
|
# Function to print progress
|
||||||
|
print_progress() {
|
||||||
|
echo -ne "\rDirectories: $dir_count/$total_dirs Files: $file_count/$total_files"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if the script is running within a Docker container
|
||||||
|
if [ -f "/.dockerenv" ]; then
|
||||||
|
echo "Script is running within a Docker container. Exiting with error."
|
||||||
|
exit 1 # Exit with an error code if running in Docker
|
||||||
|
else
|
||||||
|
echo "Script is not running within a Docker container. Executing permissions changes..."
|
||||||
|
|
||||||
|
# Run the commands to set permissions for directories
|
||||||
|
echo "Changing permissions for directories:"
|
||||||
|
for dir in $(find "$directory_path" -type d 2>/dev/null); do
|
||||||
|
if [ -e "$dir" ]; then
|
||||||
|
sudo chmod 700 "$dir" && ((dir_count++))
|
||||||
|
fi
|
||||||
|
print_progress
|
||||||
|
done
|
||||||
|
|
||||||
|
# Run the commands to set permissions for files
|
||||||
|
echo -e "\nChanging permissions for files:"
|
||||||
|
for file in $(find "$directory_path" -type f 2>/dev/null); do
|
||||||
|
if [ -e "$file" ]; then
|
||||||
|
sudo chmod 644 "$file" && ((file_count++))
|
||||||
|
fi
|
||||||
|
print_progress
|
||||||
|
done
|
||||||
|
echo "You will now need to execute a chmod +x on all bedrock executables"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" # Adding a new line after the loop for better readability
|
@ -44,6 +44,7 @@ def main():
|
|||||||
if (
|
if (
|
||||||
"_incomplete" not in file
|
"_incomplete" not in file
|
||||||
and file != "en_EN.json"
|
and file != "en_EN.json"
|
||||||
|
and file != "humanized_index.json"
|
||||||
and file.endswith(".json")
|
and file.endswith(".json")
|
||||||
):
|
):
|
||||||
file_path = os.path.join(root, file)
|
file_path = os.path.join(root, file)
|
||||||
|
111
CHANGELOG.md
111
CHANGELOG.md
@ -1,20 +1,125 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
## --- [4.2.4] - 2023/TBD
|
## --- [4.4.2] - 2024/TBD
|
||||||
### New features
|
### New features
|
||||||
TBD
|
TBD
|
||||||
|
### Bug fixes
|
||||||
|
TBD
|
||||||
|
### Tweaks
|
||||||
|
TBD
|
||||||
|
### Lang
|
||||||
|
TBD
|
||||||
|
<br><br>
|
||||||
|
|
||||||
|
## --- [4.4.1] - 2024/08/06
|
||||||
|
### Patch Fixes
|
||||||
|
- Migrations | Fix orphan backup configurations crashing migration operation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
|
||||||
|
- Migrations | Fix missing default configuration if no server backup config exists during the migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
|
||||||
|
- Migrations | Fix extended runtime on move procedure during migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
|
||||||
|
|
||||||
|
**-----------------------------------------------------------------------------**
|
||||||
|
|
||||||
|
**Initial release was reverted for patching (See Merge Request: [!784](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/784))** *2024/07/28*
|
||||||
|
|
||||||
|
**-----------------------------------------------------------------------------**
|
||||||
|
### Refactor
|
||||||
|
- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711))
|
||||||
|
- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
|
||||||
|
- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
|
||||||
|
- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
|
||||||
|
- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
|
||||||
|
### Bug fixes
|
||||||
|
- Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764))
|
||||||
|
- Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768))
|
||||||
|
- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774))
|
||||||
|
- Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771))
|
||||||
|
- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775))
|
||||||
|
- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
|
||||||
|
- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
|
||||||
|
- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
|
||||||
|
- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765))
|
||||||
|
- Remove unused and problematic "dropdown-menu" ident from [!722](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) CSS ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/782))
|
||||||
|
### Tweaks
|
||||||
|
- Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760))
|
||||||
|
- Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766))
|
||||||
|
- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767))
|
||||||
|
- Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769))
|
||||||
|
- Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770))
|
||||||
|
- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
|
||||||
|
- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776))
|
||||||
|
- Change Role selector in server wizard to be a filter list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772))
|
||||||
|
### Lang
|
||||||
|
- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773))
|
||||||
|
- Add remaining `he_IL`, `th_TH` translations from **4.4.0** Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
|
||||||
|
- Fix `fr_FR` syntax issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/780) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/778))
|
||||||
|
- Add ru_RU Translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/779))
|
||||||
|
- Add `th_TH` translations for [!772](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/781))
|
||||||
|
<br><br>
|
||||||
|
|
||||||
|
## --- [4.4.0] - 2024/05/11
|
||||||
|
### Refactor
|
||||||
|
- Refactor API keys "super user" to "full access" ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/731) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/759))
|
||||||
|
- Refactor SBuilder to use Big Bucket Svc ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/755))
|
||||||
|
### Bug fixes
|
||||||
|
- Reset query arguments on login if `?next` is not available ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/750))
|
||||||
|
- Fix child schedule failing to load after del parent ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/753))
|
||||||
|
### Tweaks
|
||||||
|
- Add link to go back to dashboard on error page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/743))
|
||||||
|
- Set audit logging to logfile instead of DB ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/751))
|
||||||
|
### Lang
|
||||||
|
- Changes of phrase in `cs_CS` translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/749))
|
||||||
|
<br><br>
|
||||||
|
|
||||||
|
## --- [4.3.2] - 2024/04/07
|
||||||
|
### Refactor
|
||||||
|
- Refactor ServerJars caching and move to api.serverjars.com ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/744) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/746))
|
||||||
|
### Bug fixes
|
||||||
|
- Fix migrator issue when jumping versions ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/734))
|
||||||
|
- Fix backend issue causing error when restoring backups in 4.3.x ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/736))
|
||||||
|
- Fix backend issue causing error when cloning servers in 4.3.x ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/741))
|
||||||
|
- Bump orjson for CVE-2024-27454 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/747))
|
||||||
|
- Fix calling of orjson JSONDecodeError class ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/747))
|
||||||
|
- Fix stack on Crafty permissions route request in API ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/745))
|
||||||
|
### Tweaks
|
||||||
|
- Clean up remaining http handler references ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/733))
|
||||||
|
- Remove version disclosure on login page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/737))
|
||||||
|
- Add openjdk-21 for recent versions of MC ([Commit](https://gitlab.com/crafty-controller/crafty-4/-/commit/77b0c2c9d2eac124a7504a3d3916fa22d29fa9d1))
|
||||||
|
### Lang
|
||||||
|
- Update `it_IT, cs_CS` ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/739) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/742))
|
||||||
|
<br><br>
|
||||||
|
|
||||||
|
## --- [4.3.1] - 2024/03/18
|
||||||
|
### Bug fixes
|
||||||
|
- Fix Server ID Rework for backups, schedules, and roles (INT ID to UUID migration) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/729))
|
||||||
|
### Tweaks
|
||||||
|
- Remove http re-direct handler. Users should implement nginx configurations for port 80 redirects ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/730))
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
|
||||||
|
## --- [4.3.0] - 2024/03/09
|
||||||
|
### Breaking Changes
|
||||||
|
- This release includes database migrations that are not revertable. Once you update to this version you will not be able to rollback to a previous version.
|
||||||
|
- In this release, we've implemented a breaking change to enhance server identification within Crafty: instead of relying on numerical integers (1, 2, 3, etc.), Servers are now uniquely identified by their UUIDs. Please adapt your API clients accordingly.
|
||||||
|
|
||||||
### Refactor
|
### Refactor
|
||||||
- Refactor remote file downloads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
|
- Refactor remote file downloads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
|
||||||
### Bug fixes
|
### Bug fixes
|
||||||
- Fix Bedrock cert issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
|
- Fix Bedrock cert issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/719))
|
||||||
- Make sure default.json is read from correct location ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/714))
|
- Make sure default.json is read from correct location ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/714))
|
||||||
- Do not allow users at server limit to clone servers ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/718))
|
- Do not allow users at server limit to clone servers ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/718))
|
||||||
|
- Fix bug where you cannot get to config with unloaded server ([Commit](https://gitlab.com/crafty-controller/crafty-4/-/commit/9de08973b6bb2ddf91283c5c6b0e189ff34f7e24))
|
||||||
|
- Fix forge install v1.20, 1.20.1 and 1.20.2 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/710))
|
||||||
|
- Fix Sanitisation on Passwords ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/715) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/725))
|
||||||
|
- Fix `Upload Imports` on unix systems, that have a space in the root dir name ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/722))
|
||||||
|
- Fix Bedrock downloads, add `www` to download URL ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/723))
|
||||||
|
- Fire backup webhook 'after' backup has finished ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/727))
|
||||||
### Tweaks
|
### Tweaks
|
||||||
- Bump pyOpenSSL & cryptography for CVE-2024-0727, CVE-2023-50782 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/716))
|
- Bump pyOpenSSL & cryptography for CVE-2024-0727, CVE-2023-50782 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/716))
|
||||||
|
- Bump cryptography for CVE-2024-26130 ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/724))
|
||||||
### Lang
|
### Lang
|
||||||
TBD
|
- Update `de_DE, en_EN, es_ES, fr_FR, he_IL, lol_EN, lv_LV, nl_BE pl_PL, th_TH, tr_TR, uk_UA, zh_CN` translations for `4.3.0` ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/715))
|
||||||
<br><br>
|
<br><br>
|
||||||
|
|
||||||
## --- [4.2.3] - 2023/02/02
|
## --- [4.2.3] - 2024/02/02
|
||||||
### New features
|
### New features
|
||||||
- Use Papermc Group's API for `paper` & `folia` builds in server builder ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/688))
|
- Use Papermc Group's API for `paper` & `folia` builds in server builder ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/688))
|
||||||
- Allow omission of player count from Dashboard (e.g. for proxy servers) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/692))
|
- Allow omission of player count from Dashboard (e.g. for proxy servers) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/692))
|
||||||
|
@ -25,6 +25,7 @@ RUN apt-get update \
|
|||||||
openjdk-8-jre-headless \
|
openjdk-8-jre-headless \
|
||||||
openjdk-11-jre-headless \
|
openjdk-11-jre-headless \
|
||||||
openjdk-17-jre-headless \
|
openjdk-17-jre-headless \
|
||||||
|
openjdk-21-jre-headless \
|
||||||
lib32stdc++6 \
|
lib32stdc++6 \
|
||||||
tzdata \
|
tzdata \
|
||||||
&& apt-get autoremove \
|
&& apt-get autoremove \
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
|
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
|
||||||
# Crafty Controller 4.2.4
|
# Crafty Controller 4.4.2
|
||||||
> Python based Control Panel for your Minecraft Server
|
> Python based Control Panel for your Minecraft Server
|
||||||
|
|
||||||
## What is Crafty Controller?
|
## What is Crafty Controller?
|
||||||
|
@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge
|
|||||||
|
|
||||||
from app.classes.models.management import HelpersManagement, HelpersWebhooks
|
from app.classes.models.management import HelpersManagement, HelpersWebhooks
|
||||||
from app.classes.models.servers import HelperServers
|
from app.classes.models.servers import HelperServers
|
||||||
|
from app.classes.shared.helpers import Helpers
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -75,7 +76,7 @@ class ManagementController:
|
|||||||
# Commands Methods
|
# Commands Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
def send_command(self, user_id, server_id, remote_ip, command):
|
def send_command(self, user_id, server_id, remote_ip, command, action_id=None):
|
||||||
server_name = HelperServers.get_server_friendly_name(server_id)
|
server_name = HelperServers.get_server_friendly_name(server_id)
|
||||||
|
|
||||||
# Example: Admin issued command start_server for server Survival
|
# Example: Admin issued command start_server for server Survival
|
||||||
@ -86,7 +87,12 @@ class ManagementController:
|
|||||||
remote_ip,
|
remote_ip,
|
||||||
)
|
)
|
||||||
self.queue_command(
|
self.queue_command(
|
||||||
{"server_id": server_id, "user_id": user_id, "command": command}
|
{
|
||||||
|
"server_id": server_id,
|
||||||
|
"user_id": user_id,
|
||||||
|
"command": command,
|
||||||
|
"action_id": action_id,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def queue_command(self, command_data):
|
def queue_command(self, command_data):
|
||||||
@ -95,9 +101,6 @@ class ManagementController:
|
|||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
# Audit_Log Methods
|
# Audit_Log Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
|
||||||
def get_activity_log():
|
|
||||||
return HelpersManagement.get_activity_log()
|
|
||||||
|
|
||||||
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
||||||
return self.management_helper.add_to_audit_log(
|
return self.management_helper.add_to_audit_log(
|
||||||
@ -126,6 +129,7 @@ class ManagementController:
|
|||||||
cron_string="* * * * *",
|
cron_string="* * * * *",
|
||||||
parent=None,
|
parent=None,
|
||||||
delay=0,
|
delay=0,
|
||||||
|
action_id=None,
|
||||||
):
|
):
|
||||||
return HelpersManagement.create_scheduled_task(
|
return HelpersManagement.create_scheduled_task(
|
||||||
server_id,
|
server_id,
|
||||||
@ -140,6 +144,7 @@ class ManagementController:
|
|||||||
cron_string,
|
cron_string,
|
||||||
parent,
|
parent,
|
||||||
delay,
|
delay,
|
||||||
|
action_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -178,34 +183,47 @@ class ManagementController:
|
|||||||
# Backups Methods
|
# Backups Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_backup_config(server_id):
|
def get_backup_config(backup_id):
|
||||||
return HelpersManagement.get_backup_config(server_id)
|
return HelpersManagement.get_backup_config(backup_id)
|
||||||
|
|
||||||
def set_backup_config(
|
@staticmethod
|
||||||
self,
|
def get_backups_by_server(server_id, model=False):
|
||||||
server_id: int,
|
return HelpersManagement.get_backups_by_server(server_id, model)
|
||||||
backup_path: str = None,
|
|
||||||
max_backups: int = None,
|
@staticmethod
|
||||||
excluded_dirs: list = None,
|
def delete_backup_config(backup_id):
|
||||||
compress: bool = False,
|
HelpersManagement.remove_backup_config(backup_id)
|
||||||
shutdown: bool = False,
|
|
||||||
before: str = "",
|
@staticmethod
|
||||||
after: str = "",
|
def update_backup_config(backup_id, updates):
|
||||||
):
|
if "backup_location" in updates:
|
||||||
return self.management_helper.set_backup_config(
|
updates["backup_location"] = Helpers.wtol_path(updates["backup_location"])
|
||||||
server_id,
|
return HelpersManagement.update_backup_config(backup_id, updates)
|
||||||
backup_path,
|
|
||||||
max_backups,
|
def add_backup_config(self, data) -> str:
|
||||||
excluded_dirs,
|
if "backup_location" in data:
|
||||||
compress,
|
data["backup_location"] = Helpers.wtol_path(data["backup_location"])
|
||||||
shutdown,
|
return self.management_helper.add_backup_config(data)
|
||||||
before,
|
|
||||||
after,
|
def add_default_backup_config(self, server_id, backup_path):
|
||||||
|
return self.management_helper.add_backup_config(
|
||||||
|
{
|
||||||
|
"backup_name": "Default Backup",
|
||||||
|
"backup_location": Helpers.wtol_path(backup_path),
|
||||||
|
"max_backups": 0,
|
||||||
|
"before": "",
|
||||||
|
"after": "",
|
||||||
|
"compress": False,
|
||||||
|
"shutdown": False,
|
||||||
|
"server_id": server_id,
|
||||||
|
"excluded_dirs": [],
|
||||||
|
"default": True,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_excluded_backup_dirs(server_id: int):
|
def get_excluded_backup_dirs(backup_id: int):
|
||||||
return HelpersManagement.get_excluded_backup_dirs(server_id)
|
return HelpersManagement.get_excluded_backup_dirs(backup_id)
|
||||||
|
|
||||||
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
|
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
|
||||||
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
|
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
|
||||||
|
@ -17,6 +17,10 @@ class ServerPermsController:
|
|||||||
def get_server_user_list(server_id):
|
def get_server_user_list(server_id):
|
||||||
return PermissionsServers.get_server_user_list(server_id)
|
return PermissionsServers.get_server_user_list(server_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_permissions(permissions_mask):
|
||||||
|
return PermissionsServers.get_permissions(permissions_mask)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_defined_permissions():
|
def list_defined_permissions():
|
||||||
permissions_list = PermissionsServers.get_permissions_list()
|
permissions_list = PermissionsServers.get_permissions_list()
|
||||||
@ -47,7 +51,7 @@ class ServerPermsController:
|
|||||||
new_server_id,
|
new_server_id,
|
||||||
role.role_id,
|
role.role_id,
|
||||||
PermissionsServers.get_permissions_mask(
|
PermissionsServers.get_permissions_mask(
|
||||||
int(role.role_id), int(old_server_id)
|
int(role.role_id), old_server_id
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
# Permissions_Servers.add_role_server(
|
# Permissions_Servers.add_role_server(
|
||||||
@ -61,6 +65,22 @@ class ServerPermsController:
|
|||||||
def get_permissions_mask(role_id, server_id):
|
def get_permissions_mask(role_id, server_id):
|
||||||
return PermissionsServers.get_permissions_mask(role_id, server_id)
|
return PermissionsServers.get_permissions_mask(role_id, server_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_lowest_api_perm_mask(user_server_permissions_mask, api_key_permssions_mask):
|
||||||
|
mask = ""
|
||||||
|
# If this isn't an API key we'll know the request came from basic
|
||||||
|
# authentication and ignore the API key permissions mask.
|
||||||
|
if not api_key_permssions_mask:
|
||||||
|
return user_server_permissions_mask
|
||||||
|
for _index, (user_perm, api_perm) in enumerate(
|
||||||
|
zip(user_server_permissions_mask, api_key_permssions_mask)
|
||||||
|
):
|
||||||
|
if user_perm == "1" and api_perm == "1":
|
||||||
|
mask += "1"
|
||||||
|
else:
|
||||||
|
mask += "0"
|
||||||
|
return mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_permission(
|
def set_permission(
|
||||||
permission_mask, permission_tested: EnumPermissionsServer, value
|
permission_mask, permission_tested: EnumPermissionsServer, value
|
||||||
@ -82,6 +102,11 @@ class ServerPermsController:
|
|||||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
||||||
return PermissionsServers.get_api_key_permissions_list(key, server_id)
|
return PermissionsServers.get_api_key_permissions_list(key, server_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_user_permissions_mask(user_id: str, server_id: str):
|
||||||
|
user = HelperUsers.get_user_model(user_id)
|
||||||
|
return PermissionsServers.get_user_permissions_mask(user, server_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_authorized_servers_stats_from_roles(user_id):
|
def get_authorized_servers_stats_from_roles(user_id):
|
||||||
user_roles = HelperUsers.get_user_roles_id(user_id)
|
user_roles = HelperUsers.get_user_roles_id(user_id)
|
||||||
|
@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton):
|
|||||||
name: str,
|
name: str,
|
||||||
server_uuid: str,
|
server_uuid: str,
|
||||||
server_dir: str,
|
server_dir: str,
|
||||||
backup_path: str,
|
|
||||||
server_command: str,
|
server_command: str,
|
||||||
server_file: str,
|
server_file: str,
|
||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
@ -81,10 +80,9 @@ class ServersController(metaclass=Singleton):
|
|||||||
PeeweeException: If the server already exists
|
PeeweeException: If the server already exists
|
||||||
"""
|
"""
|
||||||
return HelperServers.create_server(
|
return HelperServers.create_server(
|
||||||
name,
|
|
||||||
server_uuid,
|
server_uuid,
|
||||||
|
name,
|
||||||
server_dir,
|
server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_file,
|
server_file,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -150,8 +148,7 @@ class ServersController(metaclass=Singleton):
|
|||||||
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
|
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
|
||||||
# Remove roles from server
|
# Remove roles from server
|
||||||
PermissionsServers.remove_roles_of_server(server_id)
|
PermissionsServers.remove_roles_of_server(server_id)
|
||||||
# Remove backup configs tied to server
|
self.management_helper.remove_all_server_backups(server_id)
|
||||||
self.management_helper.remove_backup_config(server_id)
|
|
||||||
# Finally remove server
|
# Finally remove server
|
||||||
self.servers_helper.remove_server(server_id)
|
self.servers_helper.remove_server(server_id)
|
||||||
|
|
||||||
@ -163,9 +160,9 @@ class ServersController(metaclass=Singleton):
|
|||||||
# Servers Methods
|
# Servers Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
def get_server_instance_by_id(self, server_id: t.Union[str, int]) -> ServerInstance:
|
def get_server_instance_by_id(self, server_id: t.Union[str, str]) -> ServerInstance:
|
||||||
for server in self.servers_list:
|
for server in self.servers_list:
|
||||||
if int(server["server_id"]) == int(server_id):
|
if server["server_id"] == server_id:
|
||||||
return server["server_obj"]
|
return server["server_obj"]
|
||||||
|
|
||||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
|
@ -52,9 +52,10 @@ class UsersController:
|
|||||||
},
|
},
|
||||||
"password": {
|
"password": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 8,
|
"minLength": self.helper.minimum_password_length,
|
||||||
"examples": ["crafty"],
|
"examples": ["crafty"],
|
||||||
"title": "Password",
|
"title": "Password",
|
||||||
|
"error": "passLength",
|
||||||
},
|
},
|
||||||
"email": {
|
"email": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
53
app/classes/logging/log_formatter.py
Normal file
53
app/classes/logging/log_formatter.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class JsonEncoderStrFallback(json.JSONEncoder):
|
||||||
|
def default(self, o):
|
||||||
|
try:
|
||||||
|
return super().default(o)
|
||||||
|
except TypeError as exc:
|
||||||
|
if "not JSON serializable" in str(exc):
|
||||||
|
return str(o)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class JsonEncoderDatetime(JsonEncoderStrFallback):
|
||||||
|
def default(self, o):
|
||||||
|
if isinstance(o, datetime):
|
||||||
|
return o.strftime("%Y-%m-%dT%H:%M:%S%z")
|
||||||
|
|
||||||
|
return super().default(o)
|
||||||
|
|
||||||
|
|
||||||
|
class JsonFormatter(logging.Formatter):
|
||||||
|
def formatTime(self, record, datefmt=None):
|
||||||
|
"""
|
||||||
|
Override formatTime to customize the time format.
|
||||||
|
"""
|
||||||
|
timestamp = datetime.fromtimestamp(record.created)
|
||||||
|
if datefmt:
|
||||||
|
# Use the specified date format
|
||||||
|
return timestamp.strftime(datefmt)
|
||||||
|
# Default date format: YYYY-MM-DD HH:MM:SS,mmm
|
||||||
|
secs = int(record.msecs)
|
||||||
|
return f"{timestamp.strftime('%Y-%m-%d %H:%M:%S')},{secs:03d}"
|
||||||
|
|
||||||
|
def format(self, record):
|
||||||
|
log_data = {
|
||||||
|
"level": record.levelname,
|
||||||
|
"time": self.formatTime(record),
|
||||||
|
"log_msg": record.getMessage(),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Filter out standard log record attributes and include only custom ones
|
||||||
|
custom_attrs = ["user_name", "user_id", "server_id", "source_ip"]
|
||||||
|
extra_attrs = {
|
||||||
|
key: value for key, value in record.__dict__.items() if key in custom_attrs
|
||||||
|
}
|
||||||
|
|
||||||
|
# Merge extra attributes with log data
|
||||||
|
log_data.update(extra_attrs)
|
||||||
|
return json.dumps(log_data)
|
236
app/classes/minecraft/bigbucket.py
Normal file
236
app/classes/minecraft/bigbucket.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from app.classes.controllers.servers_controller import ServersController
|
||||||
|
from app.classes.models.server_permissions import PermissionsServers
|
||||||
|
from app.classes.shared.file_helpers import FileHelpers
|
||||||
|
from app.classes.shared.websocket_manager import WebSocketManager
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
# Temp type var until sjars restores generic fetchTypes0
|
||||||
|
|
||||||
|
|
||||||
|
class BigBucket:
|
||||||
|
def __init__(self, helper):
|
||||||
|
self.helper = helper
|
||||||
|
# remove any trailing slash from config.json
|
||||||
|
# url since we add it on all the calls
|
||||||
|
self.base_url = str(
|
||||||
|
self.helper.get_setting("big_bucket_repo", "https://jars.arcadiatech.org")
|
||||||
|
).rstrip("/")
|
||||||
|
|
||||||
|
def _read_cache(self) -> dict:
|
||||||
|
cache_file = self.helper.big_bucket_cache
|
||||||
|
cache = {}
|
||||||
|
try:
|
||||||
|
with open(cache_file, "r", encoding="utf-8") as f:
|
||||||
|
cache = json.load(f)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unable to read big_bucket cache file: {e}")
|
||||||
|
|
||||||
|
return cache
|
||||||
|
|
||||||
|
def get_bucket_data(self):
|
||||||
|
data = self._read_cache()
|
||||||
|
return data.get("categories")
|
||||||
|
|
||||||
|
def _check_bucket_alive(self) -> bool:
|
||||||
|
logger.info("Checking Big Bucket status")
|
||||||
|
|
||||||
|
check_url = f"{self.base_url}/healthcheck"
|
||||||
|
try:
|
||||||
|
response = requests.get(check_url, timeout=2)
|
||||||
|
response_json = response.json()
|
||||||
|
if (
|
||||||
|
response.status_code in [200, 201]
|
||||||
|
and response_json.get("status") == "ok"
|
||||||
|
):
|
||||||
|
logger.info("Big bucket is alive and responding as expected")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unable to connect to big bucket due to error: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
"Big bucket manifest is not available as expected or unable to contact"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _get_big_bucket(self) -> dict:
|
||||||
|
logger.debug("Calling for big bucket manifest.")
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}/manifest.json", timeout=5)
|
||||||
|
if response.status_code in [200, 201]:
|
||||||
|
data = response.json()
|
||||||
|
del data["manifest_version"]
|
||||||
|
return data
|
||||||
|
return {}
|
||||||
|
except TimeoutError as e:
|
||||||
|
logger.error(f"Unable to get jars from remote with error {e}")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _refresh_cache(self):
|
||||||
|
"""
|
||||||
|
Contains the shared logic for refreshing the cache.
|
||||||
|
This method is called by both manual_refresh_cache and refresh_cache methods.
|
||||||
|
"""
|
||||||
|
if not self._check_bucket_alive():
|
||||||
|
logger.error("big bucket API is not available.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
cache_data = {
|
||||||
|
"last_refreshed": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"),
|
||||||
|
"categories": self._get_big_bucket(),
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
with open(
|
||||||
|
self.helper.big_bucket_cache, "w", encoding="utf-8"
|
||||||
|
) as cache_file:
|
||||||
|
json.dump(cache_data, cache_file, indent=4)
|
||||||
|
logger.info("Cache file successfully refreshed manually.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update cache file manually: {e}")
|
||||||
|
|
||||||
|
def manual_refresh_cache(self):
|
||||||
|
"""
|
||||||
|
Manually triggers the cache refresh process.
|
||||||
|
"""
|
||||||
|
logger.info("Manual bucket cache refresh initiated.")
|
||||||
|
self._refresh_cache()
|
||||||
|
logger.info("Manual refresh completed.")
|
||||||
|
|
||||||
|
def refresh_cache(self):
|
||||||
|
"""
|
||||||
|
Automatically trigger cache refresh process based age.
|
||||||
|
|
||||||
|
This method checks if the cache file is older than a specified number of days
|
||||||
|
before deciding to refresh.
|
||||||
|
"""
|
||||||
|
cache_file_path = self.helper.big_bucket_cache
|
||||||
|
|
||||||
|
# Determine if the cache is old and needs refreshing
|
||||||
|
cache_old = self.helper.is_file_older_than_x_days(cache_file_path)
|
||||||
|
|
||||||
|
# debug override
|
||||||
|
# cache_old = True
|
||||||
|
|
||||||
|
if not self._check_bucket_alive():
|
||||||
|
logger.error("big bucket API is not available.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not cache_old:
|
||||||
|
logger.info("Cache file is not old enough to require automatic refresh.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.info("Automatic cache refresh initiated due to old cache.")
|
||||||
|
self._refresh_cache()
|
||||||
|
|
||||||
|
def get_fetch_url(self, jar, server, version) -> str:
|
||||||
|
"""
|
||||||
|
Constructs the URL for downloading a server JAR file based on the server type.
|
||||||
|
Parameters:
|
||||||
|
jar (str): The category of the JAR file to download.
|
||||||
|
server (str): Server software name (e.g., "paper").
|
||||||
|
version (str): Server version.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str or None: URL for downloading the JAR file, or None if URL cannot be
|
||||||
|
constructed or an error occurs.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Read cache file for URL that is in a list of one item
|
||||||
|
return self.get_bucket_data()[jar]["types"][server]["versions"][version][
|
||||||
|
"url"
|
||||||
|
][0]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"An error occurred while constructing fetch URL: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def download_jar(self, jar, server, version, path, server_id):
|
||||||
|
update_thread = threading.Thread(
|
||||||
|
name=f"server_download-{server_id}-{server}-{version}",
|
||||||
|
target=self.a_download_jar,
|
||||||
|
daemon=True,
|
||||||
|
args=(jar, server, version, path, server_id),
|
||||||
|
)
|
||||||
|
update_thread.start()
|
||||||
|
|
||||||
|
def a_download_jar(self, jar, server, version, path, server_id):
|
||||||
|
"""
|
||||||
|
Downloads a server JAR file and performs post-download actions including
|
||||||
|
notifying users and setting import status.
|
||||||
|
|
||||||
|
This method waits for the server registration to complete, retrieves the
|
||||||
|
download URL for the specified server JAR file.
|
||||||
|
|
||||||
|
Upon successful download, it either runs the installer for
|
||||||
|
Forge servers or simply finishes the import process for other types. It
|
||||||
|
notifies server users about the completion of the download.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- jar (str): The category of the JAR file to download.
|
||||||
|
- server (str): The type of server software (e.g., 'forge', 'paper').
|
||||||
|
- version (str): The version of the server software.
|
||||||
|
- path (str): The local filesystem path where the JAR file will be saved.
|
||||||
|
- server_id (str): The unique identifier for the server being updated or
|
||||||
|
imported, used for notifying users and setting the import status.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- bool: True if the JAR file was successfully downloaded and saved;
|
||||||
|
False otherwise.
|
||||||
|
|
||||||
|
The method ensures that the server is properly registered before proceeding
|
||||||
|
with the download and handles exceptions by logging errors and reverting
|
||||||
|
the import status if necessary.
|
||||||
|
"""
|
||||||
|
# delaying download for server register to finish
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
fetch_url = self.get_fetch_url(jar, server, version)
|
||||||
|
if not fetch_url:
|
||||||
|
return False
|
||||||
|
|
||||||
|
server_users = PermissionsServers.get_server_user_list(server_id)
|
||||||
|
|
||||||
|
# Make sure the server is registered before updating its stats
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
ServersController.set_import(server_id)
|
||||||
|
for user in server_users:
|
||||||
|
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
||||||
|
break
|
||||||
|
except Exception as ex:
|
||||||
|
logger.debug(f"Server not registered yet. Delaying download - {ex}")
|
||||||
|
|
||||||
|
# Initiate Download
|
||||||
|
jar_dir = os.path.dirname(path)
|
||||||
|
jar_name = os.path.basename(path)
|
||||||
|
logger.info(fetch_url)
|
||||||
|
success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
|
||||||
|
|
||||||
|
# Post-download actions
|
||||||
|
if success:
|
||||||
|
if server == "forge-installer":
|
||||||
|
# If this is the newer Forge version, run the installer
|
||||||
|
ServersController.finish_import(server_id, True)
|
||||||
|
else:
|
||||||
|
ServersController.finish_import(server_id)
|
||||||
|
|
||||||
|
# Notify users
|
||||||
|
for user in server_users:
|
||||||
|
WebSocketManager().broadcast_user(
|
||||||
|
user, "notification", "Executable download finished"
|
||||||
|
)
|
||||||
|
time.sleep(3) # Delay for user notification
|
||||||
|
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
||||||
|
else:
|
||||||
|
logger.error(f"Unable to save jar to {path} due to download failure.")
|
||||||
|
ServersController.finish_import(server_id)
|
||||||
|
|
||||||
|
return success
|
@ -1,361 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from app.classes.controllers.servers_controller import ServersController
|
|
||||||
from app.classes.models.server_permissions import PermissionsServers
|
|
||||||
from app.classes.shared.file_helpers import FileHelpers
|
|
||||||
from app.classes.shared.websocket_manager import WebSocketManager
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
PAPERJARS = ["paper", "folia"]
|
|
||||||
|
|
||||||
|
|
||||||
class ServerJars:
|
|
||||||
def __init__(self, helper):
|
|
||||||
self.helper = helper
|
|
||||||
self.base_url = "https://serverjars.com"
|
|
||||||
self.paper_base = "https://api.papermc.io"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_paper_jars():
|
|
||||||
return PAPERJARS
|
|
||||||
|
|
||||||
def get_paper_versions(self, project):
|
|
||||||
"""
|
|
||||||
Retrieves a list of versions for a specified project from the PaperMC API.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
project (str): The project name to query for available versions.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: A list of version strings available for the project. Returns an empty
|
|
||||||
list if the API call fails or if no versions are found.
|
|
||||||
|
|
||||||
This function makes a GET request to the PaperMC API to fetch available project
|
|
||||||
versions, The versions are returned in reverse order, with the most recent
|
|
||||||
version first.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
response = requests.get(
|
|
||||||
f"{self.paper_base}/v2/projects/{project}/", timeout=2
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
api_data = response.json()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error loading project versions for {project}: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
versions = api_data.get("versions", [])
|
|
||||||
versions.reverse() # Ensure the most recent version comes first
|
|
||||||
return versions
|
|
||||||
|
|
||||||
def get_paper_build(self, project, version):
|
|
||||||
"""
|
|
||||||
Fetches the latest build for a specified project and version from PaperMC API.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
project (str): Project name, typically a server software like 'paper'.
|
|
||||||
version (str): Project version to fetch the build number for.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int or None: Latest build number if successful, None if not or on error.
|
|
||||||
|
|
||||||
This method attempts to query the PaperMC API for the latest build and
|
|
||||||
handles exceptions by logging errors and returning None.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
response = requests.get(
|
|
||||||
f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
|
|
||||||
timeout=2,
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
api_data = response.json()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error fetching build for {project} {version}: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
builds = api_data.get("builds", [])
|
|
||||||
return builds[-1] if builds else None
|
|
||||||
|
|
||||||
def get_fetch_url(self, jar, server, version):
|
|
||||||
"""
|
|
||||||
Constructs the URL for downloading a server JAR file based on the server type.
|
|
||||||
|
|
||||||
Supports two main types of server JAR sources:
|
|
||||||
- ServerJars API for servers not in PAPERJARS.
|
|
||||||
- Paper API for servers available through the Paper project.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
jar (str): Name of the JAR file.
|
|
||||||
server (str): Server software name (e.g., "paper").
|
|
||||||
version (str): Server version.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str or None: URL for downloading the JAR file, or None if URL cannot be
|
|
||||||
constructed or an error occurs.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Check if the server type is not specifically handled by Paper.
|
|
||||||
if server not in PAPERJARS:
|
|
||||||
return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
|
|
||||||
|
|
||||||
# For Paper servers, attempt to get the build for the specified version.
|
|
||||||
paper_build_info = self.get_paper_build(server, version)
|
|
||||||
if paper_build_info is None:
|
|
||||||
# Log an error or handle the case where paper_build_info is None
|
|
||||||
logger.error(
|
|
||||||
"Error: Unable to get build information for server:"
|
|
||||||
f" {server}, version: {version}"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
build = paper_build_info.get("build")
|
|
||||||
if not build:
|
|
||||||
# Log an error or handle the case where build is None or not found
|
|
||||||
logger.error(
|
|
||||||
f"Error: Build number not found for server:"
|
|
||||||
f" {server}, version: {version}"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Construct and return the URL for downloading the Paper server JAR.
|
|
||||||
return (
|
|
||||||
f"{self.paper_base}/v2/projects/{server}/versions/{version}/"
|
|
||||||
f"builds/{build}/downloads/{server}-{version}-{build}.jar"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"An error occurred while constructing fetch URL: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_api_result(self, call_url: str):
|
|
||||||
full_url = f"{self.base_url}{call_url}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests.get(full_url, timeout=2)
|
|
||||||
response.raise_for_status()
|
|
||||||
api_data = json.loads(response.content)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unable to load {full_url} api due to error: {e}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
api_result = api_data.get("status")
|
|
||||||
api_response = api_data.get("response", {})
|
|
||||||
|
|
||||||
if api_result != "success":
|
|
||||||
logger.error(f"Api returned a failed status: {api_result}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return api_response
|
|
||||||
|
|
||||||
def _read_cache(self):
|
|
||||||
cache_file = self.helper.serverjar_cache
|
|
||||||
cache = {}
|
|
||||||
try:
|
|
||||||
with open(cache_file, "r", encoding="utf-8") as f:
|
|
||||||
cache = json.load(f)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unable to read serverjars.com cache file: {e}")
|
|
||||||
|
|
||||||
return cache
|
|
||||||
|
|
||||||
def get_serverjar_data(self):
|
|
||||||
data = self._read_cache()
|
|
||||||
return data.get("types")
|
|
||||||
|
|
||||||
def _check_api_alive(self):
|
|
||||||
logger.info("Checking serverjars.com API status")
|
|
||||||
|
|
||||||
check_url = f"{self.base_url}/api/fetchTypes"
|
|
||||||
try:
|
|
||||||
response = requests.get(check_url, timeout=2)
|
|
||||||
|
|
||||||
if response.status_code in [200, 201]:
|
|
||||||
logger.info("Serverjars.com API is alive")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
logger.error("unable to contact serverjars.com api")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def manual_refresh_cache(self):
|
|
||||||
cache_file = self.helper.serverjar_cache
|
|
||||||
|
|
||||||
# debug override
|
|
||||||
# cache_old = True
|
|
||||||
|
|
||||||
# if the API is down... we bomb out
|
|
||||||
if not self._check_api_alive():
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.info("Manual Refresh requested.")
|
|
||||||
now = datetime.now()
|
|
||||||
data = {
|
|
||||||
"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
|
|
||||||
"types": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
jar_types = self._get_server_type_list()
|
|
||||||
data["types"].update(jar_types)
|
|
||||||
for s in data["types"]:
|
|
||||||
data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
|
|
||||||
for j in data["types"].get(s):
|
|
||||||
versions = self._get_jar_details(j, s)
|
|
||||||
data["types"][s].update({j: versions})
|
|
||||||
for item in PAPERJARS:
|
|
||||||
data["types"]["servers"][item] = self.get_paper_versions(item)
|
|
||||||
# save our cache
|
|
||||||
try:
|
|
||||||
with open(cache_file, "w", encoding="utf-8") as f:
|
|
||||||
f.write(json.dumps(data, indent=4))
|
|
||||||
logger.info("Cache file refreshed")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
|
||||||
|
|
||||||
def refresh_cache(self):
|
|
||||||
cache_file = self.helper.serverjar_cache
|
|
||||||
cache_old = self.helper.is_file_older_than_x_days(cache_file)
|
|
||||||
|
|
||||||
# debug override
|
|
||||||
# cache_old = True
|
|
||||||
|
|
||||||
# if the API is down... we bomb out
|
|
||||||
if not self._check_api_alive():
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.info("Checking Cache file age")
|
|
||||||
# if file is older than 1 day
|
|
||||||
|
|
||||||
if cache_old:
|
|
||||||
logger.info("Cache file is over 1 day old, refreshing")
|
|
||||||
now = datetime.now()
|
|
||||||
data = {
|
|
||||||
"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
|
|
||||||
"types": {},
|
|
||||||
}
|
|
||||||
|
|
||||||
jar_types = self._get_server_type_list()
|
|
||||||
data["types"].update(jar_types)
|
|
||||||
for s in data["types"]:
|
|
||||||
data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
|
|
||||||
for j in data["types"].get(s):
|
|
||||||
versions = self._get_jar_details(j, s)
|
|
||||||
data["types"][s].update({j: versions})
|
|
||||||
for item in PAPERJARS:
|
|
||||||
data["types"]["servers"][item] = self.get_paper_versions(item)
|
|
||||||
# save our cache
|
|
||||||
try:
|
|
||||||
with open(cache_file, "w", encoding="utf-8") as f:
|
|
||||||
f.write(json.dumps(data, indent=4))
|
|
||||||
logger.info("Cache file refreshed")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
|
||||||
|
|
||||||
def _get_jar_details(self, server_type, jar_type="servers"):
|
|
||||||
url = f"/api/fetchAll/{jar_type}/{server_type}"
|
|
||||||
response = self._get_api_result(url)
|
|
||||||
temp = []
|
|
||||||
for v in response:
|
|
||||||
temp.append(v.get("version"))
|
|
||||||
time.sleep(0.5)
|
|
||||||
return temp
|
|
||||||
|
|
||||||
def _get_server_type_list(self):
|
|
||||||
url = "/api/fetchTypes/"
|
|
||||||
response = self._get_api_result(url)
|
|
||||||
if "bedrock" in response.keys():
|
|
||||||
# remove pocketmine from options
|
|
||||||
del response["bedrock"]
|
|
||||||
return response
|
|
||||||
|
|
||||||
def download_jar(self, jar, server, version, path, server_id):
|
|
||||||
update_thread = threading.Thread(
|
|
||||||
name=f"server_download-{server_id}-{server}-{version}",
|
|
||||||
target=self.a_download_jar,
|
|
||||||
daemon=True,
|
|
||||||
args=(jar, server, version, path, server_id),
|
|
||||||
)
|
|
||||||
update_thread.start()
|
|
||||||
|
|
||||||
def a_download_jar(self, jar, server, version, path, server_id):
|
|
||||||
"""
|
|
||||||
Downloads a server JAR file and performs post-download actions including
|
|
||||||
notifying users and setting import status.
|
|
||||||
|
|
||||||
This method waits for the server registration to complete, retrieves the
|
|
||||||
download URL for the specified server JAR file.
|
|
||||||
|
|
||||||
Upon successful download, it either runs the installer for
|
|
||||||
Forge servers or simply finishes the import process for other types. It
|
|
||||||
notifies server users about the completion of the download.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
- jar (str): The name of the JAR file to download.
|
|
||||||
- server (str): The type of server software (e.g., 'forge', 'paper').
|
|
||||||
- version (str): The version of the server software.
|
|
||||||
- path (str): The local filesystem path where the JAR file will be saved.
|
|
||||||
- server_id (str): The unique identifier for the server being updated or
|
|
||||||
imported, used for notifying users and setting the import status.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- bool: True if the JAR file was successfully downloaded and saved;
|
|
||||||
False otherwise.
|
|
||||||
|
|
||||||
The method ensures that the server is properly registered before proceeding
|
|
||||||
with the download and handles exceptions by logging errors and reverting
|
|
||||||
the import status if necessary.
|
|
||||||
"""
|
|
||||||
# delaying download for server register to finish
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
fetch_url = self.get_fetch_url(jar, server, version)
|
|
||||||
if not fetch_url:
|
|
||||||
return False
|
|
||||||
|
|
||||||
server_users = PermissionsServers.get_server_user_list(server_id)
|
|
||||||
|
|
||||||
# Make sure the server is registered before updating its stats
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
ServersController.set_import(server_id)
|
|
||||||
for user in server_users:
|
|
||||||
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
|
||||||
break
|
|
||||||
except Exception as ex:
|
|
||||||
logger.debug(f"Server not registered yet. Delaying download - {ex}")
|
|
||||||
|
|
||||||
# Initiate Download
|
|
||||||
jar_dir = os.path.dirname(path)
|
|
||||||
jar_name = os.path.basename(path)
|
|
||||||
logger.info(fetch_url)
|
|
||||||
success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
|
|
||||||
|
|
||||||
# Post-download actions
|
|
||||||
if success:
|
|
||||||
if server == "forge":
|
|
||||||
# If this is the newer Forge version, run the installer
|
|
||||||
ServersController.finish_import(server_id, True)
|
|
||||||
else:
|
|
||||||
ServersController.finish_import(server_id)
|
|
||||||
|
|
||||||
# Notify users
|
|
||||||
for user in server_users:
|
|
||||||
WebSocketManager().broadcast_user(
|
|
||||||
user, "notification", "Executable download finished"
|
|
||||||
)
|
|
||||||
time.sleep(3) # Delay for user notification
|
|
||||||
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
|
||||||
else:
|
|
||||||
logger.error(f"Unable to save jar to {path} due to download failure.")
|
|
||||||
ServersController.finish_import(server_id)
|
|
||||||
|
|
||||||
return success
|
|
@ -86,7 +86,7 @@ class Stats:
|
|||||||
def get_node_stats(self) -> NodeStatsReturnDict:
|
def get_node_stats(self) -> NodeStatsReturnDict:
|
||||||
try:
|
try:
|
||||||
cpu_freq = psutil.cpu_freq()
|
cpu_freq = psutil.cpu_freq()
|
||||||
except (NotImplementedError, FileNotFoundError):
|
except (NotImplementedError, AttributeError, FileNotFoundError):
|
||||||
cpu_freq = None
|
cpu_freq = None
|
||||||
if cpu_freq is None:
|
if cpu_freq is None:
|
||||||
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)
|
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)
|
||||||
|
@ -187,7 +187,7 @@ class PermissionsCrafty:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_api_key_permissions_list(key: ApiKeys):
|
def get_api_key_permissions_list(key: ApiKeys):
|
||||||
user = HelperUsers.get_user(key.user_id)
|
user = HelperUsers.get_user(key.user_id)
|
||||||
if user["superuser"] and key.superuser:
|
if user["superuser"] and key.full_access:
|
||||||
return PermissionsCrafty.get_permissions_list()
|
return PermissionsCrafty.get_permissions_list()
|
||||||
if user["superuser"]:
|
if user["superuser"]:
|
||||||
# User is superuser but API key isn't
|
# User is superuser but API key isn't
|
||||||
|
@ -16,28 +16,11 @@ from app.classes.models.base_model import BaseModel
|
|||||||
from app.classes.models.users import HelperUsers
|
from app.classes.models.users import HelperUsers
|
||||||
from app.classes.models.servers import Servers
|
from app.classes.models.servers import Servers
|
||||||
from app.classes.models.server_permissions import PermissionsServers
|
from app.classes.models.server_permissions import PermissionsServers
|
||||||
from app.classes.shared.main_models import DatabaseShortcuts
|
from app.classes.shared.helpers import Helpers
|
||||||
from app.classes.shared.websocket_manager import WebSocketManager
|
from app.classes.shared.websocket_manager import WebSocketManager
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
auth_logger = logging.getLogger("audit_log")
|
||||||
|
|
||||||
# **********************************************************************************
|
|
||||||
# Audit_Log Class
|
|
||||||
# **********************************************************************************
|
|
||||||
class AuditLog(BaseModel):
|
|
||||||
audit_id = AutoField()
|
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
|
||||||
user_name = CharField(default="")
|
|
||||||
user_id = IntegerField(default=0, index=True)
|
|
||||||
source_ip = CharField(default="127.0.0.1")
|
|
||||||
server_id = IntegerField(
|
|
||||||
default=None, index=True
|
|
||||||
) # When auditing global events, use server ID 0
|
|
||||||
log_msg = TextField(default="")
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
table_name = "audit_log"
|
|
||||||
|
|
||||||
|
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@ -79,7 +62,7 @@ class HostStats(BaseModel):
|
|||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
class Webhooks(BaseModel):
|
class Webhooks(BaseModel):
|
||||||
id = AutoField()
|
id = AutoField()
|
||||||
server_id = IntegerField(null=True)
|
server_id = ForeignKeyField(Servers, backref="webhook_server", null=True)
|
||||||
name = CharField(default="Custom Webhook", max_length=64)
|
name = CharField(default="Custom Webhook", max_length=64)
|
||||||
url = CharField(default="")
|
url = CharField(default="")
|
||||||
webhook_type = CharField(default="Custom")
|
webhook_type = CharField(default="Custom")
|
||||||
@ -105,6 +88,7 @@ class Schedules(BaseModel):
|
|||||||
interval_type = CharField()
|
interval_type = CharField()
|
||||||
start_time = CharField(null=True)
|
start_time = CharField(null=True)
|
||||||
command = CharField(null=True)
|
command = CharField(null=True)
|
||||||
|
action_id = CharField(null=True)
|
||||||
name = CharField()
|
name = CharField()
|
||||||
one_time = BooleanField(default=False)
|
one_time = BooleanField(default=False)
|
||||||
cron_string = CharField(default="")
|
cron_string = CharField(default="")
|
||||||
@ -120,13 +104,19 @@ class Schedules(BaseModel):
|
|||||||
# Backups Class
|
# Backups Class
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
class Backups(BaseModel):
|
class Backups(BaseModel):
|
||||||
|
backup_id = CharField(primary_key=True, default=Helpers.create_uuid)
|
||||||
|
backup_name = CharField(default="New Backup")
|
||||||
|
backup_location = CharField(default="")
|
||||||
excluded_dirs = CharField(null=True)
|
excluded_dirs = CharField(null=True)
|
||||||
max_backups = IntegerField()
|
max_backups = IntegerField(default=0)
|
||||||
server_id = ForeignKeyField(Servers, backref="backups_server")
|
server_id = ForeignKeyField(Servers, backref="backups_server")
|
||||||
compress = BooleanField(default=False)
|
compress = BooleanField(default=False)
|
||||||
shutdown = BooleanField(default=False)
|
shutdown = BooleanField(default=False)
|
||||||
before = CharField(default="")
|
before = CharField(default="")
|
||||||
after = CharField(default="")
|
after = CharField(default="")
|
||||||
|
default = BooleanField(default=False)
|
||||||
|
status = CharField(default='{"status": "Standby", "message": ""}')
|
||||||
|
enabled = BooleanField(default=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = "backups"
|
table_name = "backups"
|
||||||
@ -149,10 +139,6 @@ class HelpersManagement:
|
|||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
# Audit_Log Methods
|
# Audit_Log Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
|
||||||
def get_activity_log():
|
|
||||||
query = AuditLog.select()
|
|
||||||
return DatabaseShortcuts.return_db_rows(query)
|
|
||||||
|
|
||||||
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
||||||
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
|
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
|
||||||
@ -166,50 +152,28 @@ class HelpersManagement:
|
|||||||
WebSocketManager().broadcast_user(user, "notification", audit_msg)
|
WebSocketManager().broadcast_user(user, "notification", audit_msg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error broadcasting to user {user} - {e}")
|
logger.error(f"Error broadcasting to user {user} - {e}")
|
||||||
|
auth_logger.info(
|
||||||
AuditLog.insert(
|
str(log_msg),
|
||||||
{
|
extra={
|
||||||
AuditLog.user_name: user_data["username"],
|
"user_name": user_data["username"],
|
||||||
AuditLog.user_id: user_id,
|
"user_id": user_id,
|
||||||
AuditLog.server_id: server_id,
|
"server_id": server_id,
|
||||||
AuditLog.log_msg: audit_msg,
|
"source_ip": source_ip,
|
||||||
AuditLog.source_ip: source_ip,
|
},
|
||||||
}
|
)
|
||||||
).execute()
|
|
||||||
# deletes records when there's more than 300
|
|
||||||
ordered = AuditLog.select().order_by(+AuditLog.created)
|
|
||||||
for item in ordered:
|
|
||||||
if not self.helper.get_setting("max_audit_entries"):
|
|
||||||
max_entries = 300
|
|
||||||
else:
|
|
||||||
max_entries = self.helper.get_setting("max_audit_entries")
|
|
||||||
if AuditLog.select().count() > max_entries:
|
|
||||||
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
|
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
|
||||||
AuditLog.insert(
|
if isinstance(server_id, Servers) and server_id is not None:
|
||||||
{
|
server_id = server_id.server_id
|
||||||
AuditLog.user_name: user_name,
|
auth_logger.info(
|
||||||
AuditLog.user_id: user_id,
|
str(log_msg),
|
||||||
AuditLog.server_id: server_id,
|
extra={
|
||||||
AuditLog.log_msg: log_msg,
|
"user_name": user_name,
|
||||||
AuditLog.source_ip: source_ip,
|
"user_id": user_id,
|
||||||
}
|
"server_id": server_id,
|
||||||
).execute()
|
"source_ip": source_ip,
|
||||||
# deletes records when there's more than 300
|
},
|
||||||
ordered = AuditLog.select().order_by(+AuditLog.created)
|
)
|
||||||
for item in ordered:
|
|
||||||
# configurable through app/config/config.json
|
|
||||||
if not self.helper.get_setting("max_audit_entries"):
|
|
||||||
max_entries = 300
|
|
||||||
else:
|
|
||||||
max_entries = self.helper.get_setting("max_audit_entries")
|
|
||||||
if AuditLog.select().count() > max_entries:
|
|
||||||
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_crafty_row():
|
def create_crafty_row():
|
||||||
@ -307,6 +271,7 @@ class HelpersManagement:
|
|||||||
cron_string="* * * * *",
|
cron_string="* * * * *",
|
||||||
parent=None,
|
parent=None,
|
||||||
delay=0,
|
delay=0,
|
||||||
|
action_id=None,
|
||||||
):
|
):
|
||||||
sch_id = Schedules.insert(
|
sch_id = Schedules.insert(
|
||||||
{
|
{
|
||||||
@ -317,6 +282,7 @@ class HelpersManagement:
|
|||||||
Schedules.interval_type: interval_type,
|
Schedules.interval_type: interval_type,
|
||||||
Schedules.start_time: start_time,
|
Schedules.start_time: start_time,
|
||||||
Schedules.command: command,
|
Schedules.command: command,
|
||||||
|
Schedules.action_id: action_id,
|
||||||
Schedules.name: name,
|
Schedules.name: name,
|
||||||
Schedules.one_time: one_time,
|
Schedules.one_time: one_time,
|
||||||
Schedules.cron_string: cron_string,
|
Schedules.cron_string: cron_string,
|
||||||
@ -337,7 +303,7 @@ class HelpersManagement:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_scheduled_task_by_server(server_id):
|
def delete_scheduled_task_by_server(server_id):
|
||||||
Schedules.delete().where(Schedules.server_id == int(server_id)).execute()
|
Schedules.delete().where(Schedules.server_id == server_id).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scheduled_task(schedule_id):
|
def get_scheduled_task(schedule_id):
|
||||||
@ -379,133 +345,83 @@ class HelpersManagement:
|
|||||||
# Backups Methods
|
# Backups Methods
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_backup_config(server_id):
|
def get_backup_config(backup_id):
|
||||||
try:
|
return model_to_dict(Backups.get(Backups.backup_id == backup_id))
|
||||||
row = (
|
|
||||||
Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
|
||||||
)
|
|
||||||
conf = {
|
|
||||||
"backup_path": row.server_id.backup_path,
|
|
||||||
"excluded_dirs": row.excluded_dirs,
|
|
||||||
"max_backups": row.max_backups,
|
|
||||||
"server_id": row.server_id_id,
|
|
||||||
"compress": row.compress,
|
|
||||||
"shutdown": row.shutdown,
|
|
||||||
"before": row.before,
|
|
||||||
"after": row.after,
|
|
||||||
}
|
|
||||||
except IndexError:
|
|
||||||
conf = {
|
|
||||||
"backup_path": None,
|
|
||||||
"excluded_dirs": None,
|
|
||||||
"max_backups": 0,
|
|
||||||
"server_id": server_id,
|
|
||||||
"compress": False,
|
|
||||||
"shutdown": False,
|
|
||||||
"before": "",
|
|
||||||
"after": "",
|
|
||||||
}
|
|
||||||
return conf
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_backup_config(server_id):
|
def get_backups_by_server(server_id, model=False):
|
||||||
|
if not model:
|
||||||
|
data = {}
|
||||||
|
for backup in (
|
||||||
|
Backups.select().where(Backups.server_id == server_id).execute()
|
||||||
|
):
|
||||||
|
data[str(backup.backup_id)] = {
|
||||||
|
"backup_id": backup.backup_id,
|
||||||
|
"backup_name": backup.backup_name,
|
||||||
|
"backup_location": backup.backup_location,
|
||||||
|
"excluded_dirs": backup.excluded_dirs,
|
||||||
|
"max_backups": backup.max_backups,
|
||||||
|
"server_id": backup.server_id_id,
|
||||||
|
"compress": backup.compress,
|
||||||
|
"shutdown": backup.shutdown,
|
||||||
|
"before": backup.before,
|
||||||
|
"after": backup.after,
|
||||||
|
"default": backup.default,
|
||||||
|
"enabled": backup.enabled,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
data = Backups.select().where(Backups.server_id == server_id).execute()
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_default_server_backup(server_id: str) -> dict:
|
||||||
|
print(server_id)
|
||||||
|
bu_query = Backups.select().where(
|
||||||
|
Backups.server_id == server_id,
|
||||||
|
Backups.default == True, # pylint: disable=singleton-comparison
|
||||||
|
)
|
||||||
|
for item in bu_query:
|
||||||
|
print("HI", item)
|
||||||
|
backup_model = bu_query.first()
|
||||||
|
|
||||||
|
if backup_model:
|
||||||
|
return model_to_dict(backup_model)
|
||||||
|
raise IndexError
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def remove_all_server_backups(server_id):
|
||||||
Backups.delete().where(Backups.server_id == server_id).execute()
|
Backups.delete().where(Backups.server_id == server_id).execute()
|
||||||
|
|
||||||
def set_backup_config(
|
@staticmethod
|
||||||
self,
|
def remove_backup_config(backup_id):
|
||||||
server_id: int,
|
Backups.delete().where(Backups.backup_id == backup_id).execute()
|
||||||
backup_path: str = None,
|
|
||||||
max_backups: int = None,
|
def add_backup_config(self, conf) -> str:
|
||||||
excluded_dirs: list = None,
|
if "excluded_dirs" in conf:
|
||||||
compress: bool = False,
|
dirs_to_exclude = ",".join(conf["excluded_dirs"])
|
||||||
shutdown: bool = False,
|
|
||||||
before: str = "",
|
|
||||||
after: str = "",
|
|
||||||
):
|
|
||||||
logger.debug(f"Updating server {server_id} backup config with {locals()}")
|
|
||||||
if Backups.select().where(Backups.server_id == server_id).exists():
|
|
||||||
new_row = False
|
|
||||||
conf = {}
|
|
||||||
else:
|
|
||||||
conf = {
|
|
||||||
"excluded_dirs": None,
|
|
||||||
"max_backups": 0,
|
|
||||||
"server_id": server_id,
|
|
||||||
"compress": False,
|
|
||||||
"shutdown": False,
|
|
||||||
"before": "",
|
|
||||||
"after": "",
|
|
||||||
}
|
|
||||||
new_row = True
|
|
||||||
if max_backups is not None:
|
|
||||||
conf["max_backups"] = max_backups
|
|
||||||
if excluded_dirs is not None:
|
|
||||||
dirs_to_exclude = ",".join(excluded_dirs)
|
|
||||||
conf["excluded_dirs"] = dirs_to_exclude
|
conf["excluded_dirs"] = dirs_to_exclude
|
||||||
conf["compress"] = compress
|
if len(self.get_backups_by_server(conf["server_id"], True)) <= 0:
|
||||||
conf["shutdown"] = shutdown
|
conf["default"] = True
|
||||||
conf["before"] = before
|
backup = Backups.create(**conf)
|
||||||
conf["after"] = after
|
|
||||||
if not new_row:
|
|
||||||
with self.database.atomic():
|
|
||||||
if backup_path is not None:
|
|
||||||
server_rows = (
|
|
||||||
Servers.update(backup_path=backup_path)
|
|
||||||
.where(Servers.server_id == server_id)
|
|
||||||
.execute()
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
server_rows = 0
|
|
||||||
backup_rows = (
|
|
||||||
Backups.update(conf).where(Backups.server_id == server_id).execute()
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
f"Updating existing backup record. "
|
|
||||||
f"{server_rows}+{backup_rows} rows affected"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
with self.database.atomic():
|
|
||||||
conf["server_id"] = server_id
|
|
||||||
if backup_path is not None:
|
|
||||||
Servers.update(backup_path=backup_path).where(
|
|
||||||
Servers.server_id == server_id
|
|
||||||
)
|
|
||||||
Backups.create(**conf)
|
|
||||||
logger.debug("Creating new backup record.")
|
logger.debug("Creating new backup record.")
|
||||||
|
return backup.backup_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_excluded_backup_dirs(server_id: int):
|
def update_backup_config(backup_id, data):
|
||||||
excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
|
if "excluded_dirs" in data:
|
||||||
|
dirs_to_exclude = ",".join(data["excluded_dirs"])
|
||||||
|
data["excluded_dirs"] = dirs_to_exclude
|
||||||
|
Backups.update(**data).where(Backups.backup_id == backup_id).execute()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_excluded_backup_dirs(backup_id: int):
|
||||||
|
excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"]
|
||||||
if excluded_dirs is not None and excluded_dirs != "":
|
if excluded_dirs is not None and excluded_dirs != "":
|
||||||
dir_list = excluded_dirs.split(",")
|
dir_list = excluded_dirs.split(",")
|
||||||
else:
|
else:
|
||||||
dir_list = []
|
dir_list = []
|
||||||
return dir_list
|
return dir_list
|
||||||
|
|
||||||
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
|
|
||||||
dir_list = self.get_excluded_backup_dirs(server_id)
|
|
||||||
if dir_to_add not in dir_list:
|
|
||||||
dir_list.append(dir_to_add)
|
|
||||||
excluded_dirs = ",".join(dir_list)
|
|
||||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
f"Not adding {dir_to_add} to excluded directories - "
|
|
||||||
f"already in the excluded directory list for server ID {server_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
|
|
||||||
dir_list = self.get_excluded_backup_dirs(server_id)
|
|
||||||
if dir_to_del in dir_list:
|
|
||||||
dir_list.remove(dir_to_del)
|
|
||||||
excluded_dirs = ",".join(dir_list)
|
|
||||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
f"Not removing {dir_to_del} from excluded directories - "
|
|
||||||
f"not in the excluded directory list for server ID {server_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
# Webhooks Class
|
# Webhooks Class
|
||||||
|
@ -264,7 +264,7 @@ class PermissionsServers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
||||||
user = HelperUsers.get_user(key.user_id)
|
user = HelperUsers.get_user(key.user_id)
|
||||||
if user["superuser"] and key.superuser:
|
if user["superuser"] and key.full_access:
|
||||||
return PermissionsServers.get_permissions_list()
|
return PermissionsServers.get_permissions_list()
|
||||||
roles_list = HelperUsers.get_user_roles_id(user["user_id"])
|
roles_list = HelperUsers.get_user_roles_id(user["user_id"])
|
||||||
role_server = (
|
role_server = (
|
||||||
|
@ -71,7 +71,7 @@ class HelperServerStats:
|
|||||||
database = None
|
database = None
|
||||||
|
|
||||||
def __init__(self, server_id):
|
def __init__(self, server_id):
|
||||||
self.server_id = int(server_id)
|
self.server_id = server_id
|
||||||
self.init_database(self.server_id)
|
self.init_database(self.server_id)
|
||||||
|
|
||||||
def init_database(self, server_id):
|
def init_database(self, server_id):
|
||||||
|
@ -3,7 +3,6 @@ import datetime
|
|||||||
import typing as t
|
import typing as t
|
||||||
from peewee import (
|
from peewee import (
|
||||||
CharField,
|
CharField,
|
||||||
AutoField,
|
|
||||||
DateTimeField,
|
DateTimeField,
|
||||||
BooleanField,
|
BooleanField,
|
||||||
IntegerField,
|
IntegerField,
|
||||||
@ -13,6 +12,9 @@ from playhouse.shortcuts import model_to_dict
|
|||||||
from app.classes.shared.main_models import DatabaseShortcuts
|
from app.classes.shared.main_models import DatabaseShortcuts
|
||||||
from app.classes.models.base_model import BaseModel
|
from app.classes.models.base_model import BaseModel
|
||||||
|
|
||||||
|
# from app.classes.models.users import Users
|
||||||
|
from app.classes.shared.helpers import Helpers
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -20,12 +22,10 @@ logger = logging.getLogger(__name__)
|
|||||||
# Servers Model
|
# Servers Model
|
||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
class Servers(BaseModel):
|
class Servers(BaseModel):
|
||||||
server_id = AutoField()
|
server_id = CharField(primary_key=True, default=Helpers.create_uuid())
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
server_uuid = CharField(default="", index=True)
|
|
||||||
server_name = CharField(default="Server", index=True)
|
server_name = CharField(default="Server", index=True)
|
||||||
path = CharField(default="")
|
path = CharField(default="")
|
||||||
backup_path = CharField(default="")
|
|
||||||
executable = CharField(default="")
|
executable = CharField(default="")
|
||||||
log_path = CharField(default="")
|
log_path = CharField(default="")
|
||||||
execution_command = CharField(default="")
|
execution_command = CharField(default="")
|
||||||
@ -40,6 +40,7 @@ class Servers(BaseModel):
|
|||||||
type = CharField(default="minecraft-java")
|
type = CharField(default="minecraft-java")
|
||||||
show_status = BooleanField(default=1)
|
show_status = BooleanField(default=1)
|
||||||
created_by = IntegerField(default=-100)
|
created_by = IntegerField(default=-100)
|
||||||
|
# created_by = ForeignKeyField(Users, backref="creator_server", null=True)
|
||||||
shutdown_timeout = IntegerField(default=60)
|
shutdown_timeout = IntegerField(default=60)
|
||||||
ignored_exits = CharField(default="0")
|
ignored_exits = CharField(default="0")
|
||||||
app_id = IntegerField(null=True)
|
app_id = IntegerField(null=True)
|
||||||
@ -61,10 +62,9 @@ class HelperServers:
|
|||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_server(
|
def create_server(
|
||||||
|
server_id: str,
|
||||||
name: str,
|
name: str,
|
||||||
server_uuid: str,
|
|
||||||
server_dir: str,
|
server_dir: str,
|
||||||
backup_path: str,
|
|
||||||
server_command: str,
|
server_command: str,
|
||||||
server_file: str,
|
server_file: str,
|
||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
@ -81,7 +81,6 @@ class HelperServers:
|
|||||||
name: The name of the server
|
name: The name of the server
|
||||||
server_uuid: This is the UUID of the server
|
server_uuid: This is the UUID of the server
|
||||||
server_dir: The directory where the server is located
|
server_dir: The directory where the server is located
|
||||||
backup_path: The path to the backup folder
|
|
||||||
server_command: The command to start the server
|
server_command: The command to start the server
|
||||||
server_file: The name of the server file
|
server_file: The name of the server file
|
||||||
server_log_file: The path to the server log file
|
server_log_file: The path to the server log file
|
||||||
@ -97,26 +96,24 @@ class HelperServers:
|
|||||||
Raises:
|
Raises:
|
||||||
PeeweeException: If the server already exists
|
PeeweeException: If the server already exists
|
||||||
"""
|
"""
|
||||||
return Servers.insert(
|
return Servers.create(
|
||||||
{
|
server_id=server_id,
|
||||||
Servers.server_name: name,
|
server_uuid=server_id,
|
||||||
Servers.server_uuid: server_uuid,
|
server_name=name,
|
||||||
Servers.path: server_dir,
|
path=server_dir,
|
||||||
Servers.executable: server_file,
|
executable=server_file,
|
||||||
Servers.execution_command: server_command,
|
execution_command=server_command,
|
||||||
Servers.auto_start: False,
|
auto_start=False,
|
||||||
Servers.auto_start_delay: 10,
|
auto_start_delay=10,
|
||||||
Servers.crash_detection: False,
|
crash_detection=False,
|
||||||
Servers.log_path: server_log_file,
|
log_path=server_log_file,
|
||||||
Servers.server_port: server_port,
|
server_port=server_port,
|
||||||
Servers.server_ip: server_host,
|
server_ip=server_host,
|
||||||
Servers.stop_command: server_stop,
|
stop_command=server_stop,
|
||||||
Servers.backup_path: backup_path,
|
type=server_type,
|
||||||
Servers.type: server_type,
|
created_by=created_by,
|
||||||
Servers.created_by: created_by,
|
app_id: app_id,
|
||||||
Servers.app_id: app_id,
|
).server_id
|
||||||
}
|
|
||||||
).execute()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_obj(server_id):
|
def get_server_obj(server_id):
|
||||||
|
@ -38,7 +38,7 @@ class Users(BaseModel):
|
|||||||
superuser = BooleanField(default=False)
|
superuser = BooleanField(default=False)
|
||||||
lang = CharField(default="en_EN")
|
lang = CharField(default="en_EN")
|
||||||
support_logs = CharField(default="")
|
support_logs = CharField(default="")
|
||||||
valid_tokens_from = DateTimeField(default=datetime.datetime.now)
|
valid_tokens_from = DateTimeField(default=Helpers.get_utc_now)
|
||||||
server_order = CharField(default="")
|
server_order = CharField(default="")
|
||||||
preparing = BooleanField(default=False)
|
preparing = BooleanField(default=False)
|
||||||
hints = BooleanField(default=True)
|
hints = BooleanField(default=True)
|
||||||
@ -71,7 +71,7 @@ class ApiKeys(BaseModel):
|
|||||||
user_id = ForeignKeyField(Users, backref="api_token", index=True)
|
user_id = ForeignKeyField(Users, backref="api_token", index=True)
|
||||||
server_permissions = CharField(default="00000000")
|
server_permissions = CharField(default="00000000")
|
||||||
crafty_permissions = CharField(default="000")
|
crafty_permissions = CharField(default="000")
|
||||||
superuser = BooleanField(default=False)
|
full_access = BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = "api_keys"
|
table_name = "api_keys"
|
||||||
@ -119,7 +119,6 @@ class HelperUsers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_total():
|
def get_user_total():
|
||||||
count = Users.select().where(Users.username != "system").count()
|
count = Users.select().where(Users.username != "system").count()
|
||||||
print(count)
|
|
||||||
return count
|
return count
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -408,7 +407,7 @@ class HelperUsers:
|
|||||||
def add_user_api_key(
|
def add_user_api_key(
|
||||||
name: str,
|
name: str,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
superuser: bool = False,
|
full_access: bool = False,
|
||||||
server_permissions_mask: t.Optional[str] = None,
|
server_permissions_mask: t.Optional[str] = None,
|
||||||
crafty_permissions_mask: t.Optional[str] = None,
|
crafty_permissions_mask: t.Optional[str] = None,
|
||||||
):
|
):
|
||||||
@ -426,7 +425,7 @@ class HelperUsers:
|
|||||||
if crafty_permissions_mask is not None
|
if crafty_permissions_mask is not None
|
||||||
else {}
|
else {}
|
||||||
),
|
),
|
||||||
ApiKeys.superuser: superuser,
|
ApiKeys.full_access: full_access,
|
||||||
}
|
}
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime
|
||||||
from typing import Optional, Dict, Any, Tuple
|
from typing import Optional, Dict, Any, Tuple
|
||||||
import jwt
|
import jwt
|
||||||
from jwt import PyJWTError
|
from jwt import PyJWTError
|
||||||
@ -62,7 +63,17 @@ class Authentication:
|
|||||||
user = HelperUsers.get_user(user_id)
|
user = HelperUsers.get_user(user_id)
|
||||||
# TODO: Have a cache or something so we don't constantly
|
# TODO: Have a cache or something so we don't constantly
|
||||||
# have to query the database
|
# have to query the database
|
||||||
if int(user.get("valid_tokens_from").timestamp()) < iat:
|
valid_tokens_from_str = user.get("valid_tokens_from")
|
||||||
|
# It's possible this will be a string or a dt coming from the DB
|
||||||
|
# We need to account for that
|
||||||
|
try:
|
||||||
|
valid_tokens_from_dt = datetime.strptime(
|
||||||
|
valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z"
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
valid_tokens_from_dt = valid_tokens_from_str
|
||||||
|
# Convert the string to a datetime object
|
||||||
|
if int(valid_tokens_from_dt.timestamp()) < iat:
|
||||||
# Success!
|
# Success!
|
||||||
return key, data, user
|
return key, data, user
|
||||||
return None
|
return None
|
||||||
|
@ -18,7 +18,12 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
class MainPrompt(cmd.Cmd):
|
class MainPrompt(cmd.Cmd):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, helper, tasks_manager, migration_manager, main_controller, import3
|
self,
|
||||||
|
helper,
|
||||||
|
tasks_manager,
|
||||||
|
migration_manager,
|
||||||
|
main_controller,
|
||||||
|
import3,
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.helper: Helpers = helper
|
self.helper: Helpers = helper
|
||||||
@ -77,11 +82,11 @@ class MainPrompt(cmd.Cmd):
|
|||||||
# get new password from user
|
# get new password from user
|
||||||
new_pass = getpass.getpass(prompt=f"NEW password for: {username} > ")
|
new_pass = getpass.getpass(prompt=f"NEW password for: {username} > ")
|
||||||
# check to make sure it fits our requirements.
|
# check to make sure it fits our requirements.
|
||||||
if len(new_pass) > 512:
|
if len(new_pass) < self.helper.minimum_password_length:
|
||||||
Console.warning("Passwords must be greater than 6char long and under 512")
|
Console.warning(
|
||||||
return False
|
"Passwords must be greater than"
|
||||||
if len(new_pass) < 6:
|
f" {self.helper.minimum_password_length} char long"
|
||||||
Console.warning("Passwords must be greater than 6char long and under 512")
|
)
|
||||||
return False
|
return False
|
||||||
# grab repeated password input
|
# grab repeated password input
|
||||||
new_pass_conf = getpass.getpass(prompt="Re-enter your password: > ")
|
new_pass_conf = getpass.getpass(prompt="Re-enter your password: > ")
|
||||||
|
@ -4,7 +4,10 @@ import logging
|
|||||||
import pathlib
|
import pathlib
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from zipfile import ZipFile, ZIP_DEFLATED
|
import hashlib
|
||||||
|
from typing import BinaryIO
|
||||||
|
import mimetypes
|
||||||
|
from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import ssl
|
import ssl
|
||||||
import time
|
import time
|
||||||
@ -22,6 +25,7 @@ class FileHelpers:
|
|||||||
|
|
||||||
def __init__(self, helper):
|
def __init__(self, helper):
|
||||||
self.helper: Helpers = helper
|
self.helper: Helpers = helper
|
||||||
|
self.mime_types = mimetypes.MimeTypes()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ssl_get_file(
|
def ssl_get_file(
|
||||||
@ -142,6 +146,32 @@ class FileHelpers:
|
|||||||
logger.error(f"Path specified is not a file or does not exist. {path}")
|
logger.error(f"Path specified is not a file or does not exist. {path}")
|
||||||
return e
|
return e
|
||||||
|
|
||||||
|
def check_mime_types(self, file_path):
|
||||||
|
m_type, _value = self.mime_types.guess_type(file_path)
|
||||||
|
return m_type
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_file_hash(file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Takes one parameter of file path.
|
||||||
|
It will generate a SHA256 hash for the path and return it.
|
||||||
|
"""
|
||||||
|
sha256_hash = hashlib.sha256()
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
for byte_block in iter(lambda: f.read(4096), b""):
|
||||||
|
sha256_hash.update(byte_block)
|
||||||
|
return sha256_hash.hexdigest()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_buffer_hash(buffer: BinaryIO) -> str:
|
||||||
|
"""
|
||||||
|
Takes one argument of a stream buffer. Will return a
|
||||||
|
sha256 hash of the buffer
|
||||||
|
"""
|
||||||
|
sha256_hash = hashlib.sha256()
|
||||||
|
sha256_hash.update(buffer)
|
||||||
|
return sha256_hash.hexdigest()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
|
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
|
||||||
# pylint: disable=unexpected-keyword-arg
|
# pylint: disable=unexpected-keyword-arg
|
||||||
@ -153,8 +183,7 @@ class FileHelpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def move_dir(src_path, dest_path):
|
def move_dir(src_path, dest_path):
|
||||||
FileHelpers.copy_dir(src_path, dest_path)
|
shutil.move(src_path, dest_path)
|
||||||
FileHelpers.del_dirs(src_path)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def move_dir_exist(src_path, dest_path):
|
def move_dir_exist(src_path, dest_path):
|
||||||
@ -163,8 +192,7 @@ class FileHelpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def move_file(src_path, dest_path):
|
def move_file(src_path, dest_path):
|
||||||
FileHelpers.copy_file(src_path, dest_path)
|
shutil.move(src_path, dest_path)
|
||||||
FileHelpers.del_file(src_path)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_archive(path_to_destination, path_to_zip, comment=""):
|
def make_archive(path_to_destination, path_to_zip, comment=""):
|
||||||
@ -229,74 +257,15 @@ class FileHelpers:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def make_compressed_backup(
|
|
||||||
self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
|
|
||||||
):
|
|
||||||
# create a ZipFile object
|
|
||||||
path_to_destination += ".zip"
|
|
||||||
ex_replace = [p.replace("\\", "/") for p in excluded_dirs]
|
|
||||||
total_bytes = 0
|
|
||||||
dir_bytes = Helpers.get_dir_size(path_to_zip)
|
|
||||||
results = {
|
|
||||||
"percent": 0,
|
|
||||||
"total_files": self.helper.human_readable_file_size(dir_bytes),
|
|
||||||
}
|
|
||||||
WebSocketManager().broadcast_page_params(
|
|
||||||
"/panel/server_detail",
|
|
||||||
{"id": str(server_id)},
|
|
||||||
"backup_status",
|
|
||||||
results,
|
|
||||||
)
|
|
||||||
with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
|
|
||||||
zip_file.comment = bytes(
|
|
||||||
comment, "utf-8"
|
|
||||||
) # comments over 65535 bytes will be truncated
|
|
||||||
for root, dirs, files in os.walk(path_to_zip, topdown=True):
|
|
||||||
for l_dir in dirs:
|
|
||||||
if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace:
|
|
||||||
dirs.remove(l_dir)
|
|
||||||
ziproot = path_to_zip
|
|
||||||
for file in files:
|
|
||||||
if (
|
|
||||||
str(os.path.join(root, file)).replace("\\", "/")
|
|
||||||
not in ex_replace
|
|
||||||
and file != "crafty.sqlite"
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
logger.info(f"backing up: {os.path.join(root, file)}")
|
|
||||||
if os.name == "nt":
|
|
||||||
zip_file.write(
|
|
||||||
os.path.join(root, file),
|
|
||||||
os.path.join(root.replace(ziproot, ""), file),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
zip_file.write(
|
|
||||||
os.path.join(root, file),
|
|
||||||
os.path.join(root.replace(ziproot, "/"), file),
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(
|
|
||||||
f"Error backing up: {os.path.join(root, file)}!"
|
|
||||||
f" - Error was: {e}"
|
|
||||||
)
|
|
||||||
total_bytes += os.path.getsize(os.path.join(root, file))
|
|
||||||
percent = round((total_bytes / dir_bytes) * 100, 2)
|
|
||||||
results = {
|
|
||||||
"percent": percent,
|
|
||||||
"total_files": self.helper.human_readable_file_size(dir_bytes),
|
|
||||||
}
|
|
||||||
WebSocketManager().broadcast_page_params(
|
|
||||||
"/panel/server_detail",
|
|
||||||
{"id": str(server_id)},
|
|
||||||
"backup_status",
|
|
||||||
results,
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def make_backup(
|
def make_backup(
|
||||||
self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
|
self,
|
||||||
|
path_to_destination,
|
||||||
|
path_to_zip,
|
||||||
|
excluded_dirs,
|
||||||
|
server_id,
|
||||||
|
backup_id,
|
||||||
|
comment="",
|
||||||
|
compressed=None,
|
||||||
):
|
):
|
||||||
# create a ZipFile object
|
# create a ZipFile object
|
||||||
path_to_destination += ".zip"
|
path_to_destination += ".zip"
|
||||||
@ -313,7 +282,15 @@ class FileHelpers:
|
|||||||
"backup_status",
|
"backup_status",
|
||||||
results,
|
results,
|
||||||
)
|
)
|
||||||
with ZipFile(path_to_destination, "w") as zip_file:
|
WebSocketManager().broadcast_page_params(
|
||||||
|
"/panel/edit_backup",
|
||||||
|
{"id": str(server_id)},
|
||||||
|
"backup_status",
|
||||||
|
results,
|
||||||
|
)
|
||||||
|
# Set the compression mode based on the `compressed` parameter
|
||||||
|
compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED
|
||||||
|
with ZipFile(path_to_destination, "w", compression_mode) as zip_file:
|
||||||
zip_file.comment = bytes(
|
zip_file.comment = bytes(
|
||||||
comment, "utf-8"
|
comment, "utf-8"
|
||||||
) # comments over 65535 bytes will be truncated
|
) # comments over 65535 bytes will be truncated
|
||||||
@ -364,6 +341,7 @@ class FileHelpers:
|
|||||||
results = {
|
results = {
|
||||||
"percent": percent,
|
"percent": percent,
|
||||||
"total_files": self.helper.human_readable_file_size(dir_bytes),
|
"total_files": self.helper.human_readable_file_size(dir_bytes),
|
||||||
|
"backup_id": backup_id,
|
||||||
}
|
}
|
||||||
# send status results to page.
|
# send status results to page.
|
||||||
WebSocketManager().broadcast_page_params(
|
WebSocketManager().broadcast_page_params(
|
||||||
@ -372,6 +350,12 @@ class FileHelpers:
|
|||||||
"backup_status",
|
"backup_status",
|
||||||
results,
|
results,
|
||||||
)
|
)
|
||||||
|
WebSocketManager().broadcast_page_params(
|
||||||
|
"/panel/edit_backup",
|
||||||
|
{"id": str(server_id)},
|
||||||
|
"backup_status",
|
||||||
|
results,
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -19,7 +19,7 @@ import shutil
|
|||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
import itertools
|
import itertools
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from socket import gethostname
|
from socket import gethostname
|
||||||
from contextlib import redirect_stderr, suppress
|
from contextlib import redirect_stderr, suppress
|
||||||
import libgravatar
|
import libgravatar
|
||||||
@ -72,9 +72,10 @@ class Helpers:
|
|||||||
self.db_path = os.path.join(
|
self.db_path = os.path.join(
|
||||||
self.root_dir, "app", "config", "db", "crafty.sqlite"
|
self.root_dir, "app", "config", "db", "crafty.sqlite"
|
||||||
)
|
)
|
||||||
self.serverjar_cache = os.path.join(self.config_dir, "serverjars.json")
|
self.big_bucket_cache = os.path.join(self.config_dir, "bigbucket.json")
|
||||||
self.steamapps_cache = os.path.join(self.config_dir, "steamapps.json")
|
self.steamapps_cache = os.path.join(self.config_dir, "steamapps.json")
|
||||||
self.credits_cache = os.path.join(self.config_dir, "credits.json")
|
self.credits_cache = os.path.join(self.config_dir, "credits.json")
|
||||||
|
|
||||||
self.passhasher = PasswordHasher()
|
self.passhasher = PasswordHasher()
|
||||||
self.exiting = False
|
self.exiting = False
|
||||||
|
|
||||||
@ -82,6 +83,7 @@ class Helpers:
|
|||||||
self.update_available = False
|
self.update_available = False
|
||||||
self.ignored_names = ["crafty_managed.txt", "db_stats"]
|
self.ignored_names = ["crafty_managed.txt", "db_stats"]
|
||||||
self.crafty_starting = False
|
self.crafty_starting = False
|
||||||
|
self.minimum_password_length = 8
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def auto_installer_fix(ex):
|
def auto_installer_fix(ex):
|
||||||
@ -118,7 +120,7 @@ class Helpers:
|
|||||||
Get latest bedrock executable url \n\n
|
Get latest bedrock executable url \n\n
|
||||||
returns url if successful, False if not
|
returns url if successful, False if not
|
||||||
"""
|
"""
|
||||||
url = "https://minecraft.net/en-us/download/server/bedrock/"
|
url = "https://www.minecraft.net/en-us/download/server/bedrock/"
|
||||||
headers = {
|
headers = {
|
||||||
"Accept-Encoding": "identity",
|
"Accept-Encoding": "identity",
|
||||||
"Accept-Language": "en",
|
"Accept-Language": "en",
|
||||||
@ -496,7 +498,6 @@ class Helpers:
|
|||||||
# Config.json was removed from the repo to make it easier for users
|
# Config.json was removed from the repo to make it easier for users
|
||||||
# To make non-breaking changes to the file.
|
# To make non-breaking changes to the file.
|
||||||
return {
|
return {
|
||||||
"http_port": 8000,
|
|
||||||
"https_port": 8443,
|
"https_port": 8443,
|
||||||
"language": "en_EN",
|
"language": "en_EN",
|
||||||
"cookie_expire": 30,
|
"cookie_expire": 30,
|
||||||
@ -509,7 +510,6 @@ class Helpers:
|
|||||||
"max_log_lines": 700,
|
"max_log_lines": 700,
|
||||||
"max_audit_entries": 300,
|
"max_audit_entries": 300,
|
||||||
"disabled_language_files": [],
|
"disabled_language_files": [],
|
||||||
"stream_size_GB": 1,
|
|
||||||
"keywords": ["help", "chunk"],
|
"keywords": ["help", "chunk"],
|
||||||
"allow_nsfw_profile_pictures": False,
|
"allow_nsfw_profile_pictures": False,
|
||||||
"enable_user_self_delete": False,
|
"enable_user_self_delete": False,
|
||||||
@ -517,6 +517,7 @@ class Helpers:
|
|||||||
"monitored_mounts": mounts,
|
"monitored_mounts": mounts,
|
||||||
"dir_size_poll_freq_minutes": 5,
|
"dir_size_poll_freq_minutes": 5,
|
||||||
"crafty_logs_delete_after_days": 0,
|
"crafty_logs_delete_after_days": 0,
|
||||||
|
"big_bucket_repo": "https://jars.arcadiatech.org",
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_all_settings(self):
|
def get_all_settings(self):
|
||||||
@ -640,6 +641,10 @@ class Helpers:
|
|||||||
version = f"{major}.{minor}.{sub}"
|
version = f"{major}.{minor}.{sub}"
|
||||||
return str(version)
|
return str(version)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_utc_now() -> datetime:
|
||||||
|
return datetime.fromtimestamp(time.time(), tz=timezone.utc)
|
||||||
|
|
||||||
def encode_pass(self, password):
|
def encode_pass(self, password):
|
||||||
return self.passhasher.hash(password)
|
return self.passhasher.hash(password)
|
||||||
|
|
||||||
@ -1006,6 +1011,11 @@ class Helpers:
|
|||||||
except PermissionError as e:
|
except PermissionError as e:
|
||||||
logger.critical(f"Check generated exception due to permssion error: {e}")
|
logger.critical(f"Check generated exception due to permssion error: {e}")
|
||||||
return False
|
return False
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
logger.critical(
|
||||||
|
f"Check generated exception due to file does not exist error: {e}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
def create_self_signed_cert(self, cert_dir=None):
|
def create_self_signed_cert(self, cert_dir=None):
|
||||||
if cert_dir is None:
|
if cert_dir is None:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import pathlib
|
import pathlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -32,7 +33,7 @@ from app.classes.shared.console import Console
|
|||||||
from app.classes.shared.helpers import Helpers
|
from app.classes.shared.helpers import Helpers
|
||||||
from app.classes.shared.file_helpers import FileHelpers
|
from app.classes.shared.file_helpers import FileHelpers
|
||||||
from app.classes.shared.import_helper import ImportHelpers
|
from app.classes.shared.import_helper import ImportHelpers
|
||||||
from app.classes.minecraft.serverjars import ServerJars
|
from app.classes.minecraft.bigbucket import BigBucket
|
||||||
from app.classes.shared.websocket_manager import WebSocketManager
|
from app.classes.shared.websocket_manager import WebSocketManager
|
||||||
from app.classes.steamcmd.serverapps import SteamApps
|
from app.classes.steamcmd.serverapps import SteamApps
|
||||||
|
|
||||||
@ -45,8 +46,10 @@ class Controller:
|
|||||||
self.helper: Helpers = helper
|
self.helper: Helpers = helper
|
||||||
self.file_helper: FileHelpers = file_helper
|
self.file_helper: FileHelpers = file_helper
|
||||||
self.import_helper: ImportHelpers = import_helper
|
self.import_helper: ImportHelpers = import_helper
|
||||||
self.server_jars: ServerJars = ServerJars(helper)
|
|
||||||
|
self.big_bucket: BigBucket = BigBucket(helper)
|
||||||
self.steam_apps: SteamApps = SteamApps(helper)
|
self.steam_apps: SteamApps = SteamApps(helper)
|
||||||
|
|
||||||
self.users_helper: HelperUsers = HelperUsers(database, self.helper)
|
self.users_helper: HelperUsers = HelperUsers(database, self.helper)
|
||||||
self.roles_helper: HelperRoles = HelperRoles(database)
|
self.roles_helper: HelperRoles = HelperRoles(database)
|
||||||
self.servers_helper: HelperServers = HelperServers(database)
|
self.servers_helper: HelperServers = HelperServers(database)
|
||||||
@ -242,7 +245,7 @@ class Controller:
|
|||||||
try:
|
try:
|
||||||
os.mkdir(final_path)
|
os.mkdir(final_path)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
final_path += "_" + server["server_uuid"]
|
final_path += "_" + server["server_id"]
|
||||||
os.mkdir(final_path)
|
os.mkdir(final_path)
|
||||||
try:
|
try:
|
||||||
FileHelpers.copy_file(
|
FileHelpers.copy_file(
|
||||||
@ -254,6 +257,19 @@ class Controller:
|
|||||||
# Copy crafty logs to archive dir
|
# Copy crafty logs to archive dir
|
||||||
full_log_name = os.path.join(crafty_path, "logs")
|
full_log_name = os.path.join(crafty_path, "logs")
|
||||||
FileHelpers.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
|
FileHelpers.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
|
||||||
|
thread_dump = ""
|
||||||
|
for thread in threading.enumerate():
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
thread_dump += (
|
||||||
|
f"Name: {thread.name}\tIdentifier:"
|
||||||
|
f" {thread.ident}\tTID/PID: {thread.native_id}\n"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(f"Name: {thread.name}\tIdentifier: {thread.ident}")
|
||||||
|
with open(
|
||||||
|
os.path.join(temp_dir, "crafty_thread_dump.txt"), "a", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
|
f.write(thread_dump)
|
||||||
self.support_scheduler.add_job(
|
self.support_scheduler.add_job(
|
||||||
self.log_status,
|
self.log_status,
|
||||||
"interval",
|
"interval",
|
||||||
@ -439,7 +455,7 @@ class Controller:
|
|||||||
if root_create_data["create_type"] == "download_jar":
|
if root_create_data["create_type"] == "download_jar":
|
||||||
if Helpers.is_os_windows():
|
if Helpers.is_os_windows():
|
||||||
# Let's check for and setup for install server commands
|
# Let's check for and setup for install server commands
|
||||||
if create_data["type"] == "forge":
|
if create_data["type"] == "forge-installer":
|
||||||
server_command = (
|
server_command = (
|
||||||
f"java -Xms{Helpers.float_to_string(min_mem)}M "
|
f"java -Xms{Helpers.float_to_string(min_mem)}M "
|
||||||
f"-Xmx{Helpers.float_to_string(max_mem)}M "
|
f"-Xmx{Helpers.float_to_string(max_mem)}M "
|
||||||
@ -452,7 +468,7 @@ class Controller:
|
|||||||
f'-jar "{server_file}" nogui'
|
f'-jar "{server_file}" nogui'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if create_data["type"] == "forge":
|
if create_data["type"] == "forge-installer":
|
||||||
server_command = (
|
server_command = (
|
||||||
f"java -Xms{Helpers.float_to_string(min_mem)}M "
|
f"java -Xms{Helpers.float_to_string(min_mem)}M "
|
||||||
f"-Xmx{Helpers.float_to_string(max_mem)}M "
|
f"-Xmx{Helpers.float_to_string(max_mem)}M "
|
||||||
@ -565,7 +581,6 @@ class Controller:
|
|||||||
name=data["name"],
|
name=data["name"],
|
||||||
server_uuid=server_fs_uuid,
|
server_uuid=server_fs_uuid,
|
||||||
server_dir=new_server_path,
|
server_dir=new_server_path,
|
||||||
backup_path=backup_path,
|
|
||||||
server_command=server_command,
|
server_command=server_command,
|
||||||
server_file=server_file,
|
server_file=server_file,
|
||||||
server_log_file=log_location,
|
server_log_file=log_location,
|
||||||
@ -575,26 +590,23 @@ class Controller:
|
|||||||
server_host=monitoring_host,
|
server_host=monitoring_host,
|
||||||
server_type=monitoring_type,
|
server_type=monitoring_type,
|
||||||
)
|
)
|
||||||
self.management.set_backup_config(
|
self.management.add_default_backup_config(
|
||||||
new_server_id,
|
new_server_id,
|
||||||
backup_path,
|
backup_path,
|
||||||
)
|
)
|
||||||
if data["create_type"] == "minecraft_java":
|
if data["create_type"] == "minecraft_java":
|
||||||
if root_create_data["create_type"] == "download_jar":
|
if root_create_data["create_type"] == "download_jar":
|
||||||
# modded update urls from server jars will only update the installer
|
# modded update urls from server jars will only update the installer
|
||||||
if (
|
if create_data["type"] != "forge-installer":
|
||||||
create_data["category"] != "modded"
|
|
||||||
and create_data["type"] not in ServerJars.get_paper_jars()
|
|
||||||
):
|
|
||||||
server_obj = self.servers.get_server_obj(new_server_id)
|
server_obj = self.servers.get_server_obj(new_server_id)
|
||||||
url = (
|
url = self.big_bucket.get_fetch_url(
|
||||||
"https://serverjars.com/api/fetchJar/"
|
create_data["category"],
|
||||||
f"{create_data['category']}"
|
create_data["type"],
|
||||||
f"/{create_data['type']}/{create_data['version']}"
|
create_data["version"],
|
||||||
)
|
)
|
||||||
server_obj.executable_update_url = url
|
server_obj.executable_update_url = url
|
||||||
self.servers.update_server(server_obj)
|
self.servers.update_server(server_obj)
|
||||||
self.server_jars.download_jar(
|
self.big_bucket.download_jar(
|
||||||
create_data["category"],
|
create_data["category"],
|
||||||
create_data["type"],
|
create_data["type"],
|
||||||
create_data["version"],
|
create_data["version"],
|
||||||
@ -654,11 +666,11 @@ class Controller:
|
|||||||
# and add the user to it if he's not a superuser
|
# and add the user to it if he's not a superuser
|
||||||
if len(captured_roles) == 0:
|
if len(captured_roles) == 0:
|
||||||
if not exec_user["superuser"]:
|
if not exec_user["superuser"]:
|
||||||
new_server_uuid = self.servers.get_server_data_by_id(new_server_id).get(
|
new_server_id = self.servers.get_server_data_by_id(new_server_id).get(
|
||||||
"server_uuid"
|
"server_id"
|
||||||
)
|
)
|
||||||
role_id = self.roles.add_role(
|
role_id = self.roles.add_role(
|
||||||
f"Creator of Server with uuid={new_server_uuid}",
|
f"Creator of Server with id={new_server_id}",
|
||||||
exec_user["user_id"],
|
exec_user["user_id"],
|
||||||
)
|
)
|
||||||
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||||
@ -669,7 +681,7 @@ class Controller:
|
|||||||
role_id = role
|
role_id = role
|
||||||
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
self.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||||
|
|
||||||
return new_server_id, server_fs_uuid
|
return new_server_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def verify_jar_server(server_path: str, server_jar: str):
|
def verify_jar_server(server_path: str, server_jar: str):
|
||||||
@ -733,7 +745,6 @@ class Controller:
|
|||||||
server_name,
|
server_name,
|
||||||
server_id,
|
server_id,
|
||||||
new_server_dir,
|
new_server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_jar,
|
server_jar,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -787,7 +798,6 @@ class Controller:
|
|||||||
server_name,
|
server_name,
|
||||||
server_id,
|
server_id,
|
||||||
new_server_dir,
|
new_server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_exe,
|
server_exe,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -832,7 +842,6 @@ class Controller:
|
|||||||
server_name,
|
server_name,
|
||||||
server_id,
|
server_id,
|
||||||
new_server_dir,
|
new_server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_exe,
|
server_exe,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -880,7 +889,6 @@ class Controller:
|
|||||||
server_name,
|
server_name,
|
||||||
server_id,
|
server_id,
|
||||||
new_server_dir,
|
new_server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_exe,
|
server_exe,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -904,16 +912,13 @@ class Controller:
|
|||||||
# **********************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
|
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
|
||||||
server_data = self.servers.get_server_data_by_id(old_server_id)
|
|
||||||
server_obj = self.servers.get_server_obj(new_server_id)
|
server_obj = self.servers.get_server_obj(new_server_id)
|
||||||
old_bu_path = server_data["backup_path"]
|
|
||||||
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
|
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
|
||||||
backup_path = old_bu_path
|
backup_path = os.path.join(self.helper.backup_path, old_server_id)
|
||||||
backup_path = Path(backup_path)
|
backup_path = Path(backup_path)
|
||||||
backup_path_components = list(backup_path.parts)
|
backup_path_components = list(backup_path.parts)
|
||||||
backup_path_components[-1] = new_uuid
|
backup_path_components[-1] = new_uuid
|
||||||
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
|
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
|
||||||
server_obj.backup_path = new_bu_path
|
|
||||||
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
|
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
|
||||||
try:
|
try:
|
||||||
os.rmdir(default_backup_dir)
|
os.rmdir(default_backup_dir)
|
||||||
@ -927,7 +932,6 @@ class Controller:
|
|||||||
name: str,
|
name: str,
|
||||||
server_uuid: str,
|
server_uuid: str,
|
||||||
server_dir: str,
|
server_dir: str,
|
||||||
backup_path: str,
|
|
||||||
server_command: str,
|
server_command: str,
|
||||||
server_file: str,
|
server_file: str,
|
||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
@ -943,7 +947,6 @@ class Controller:
|
|||||||
name,
|
name,
|
||||||
server_uuid,
|
server_uuid,
|
||||||
server_dir,
|
server_dir,
|
||||||
backup_path,
|
|
||||||
server_command,
|
server_command,
|
||||||
server_file,
|
server_file,
|
||||||
server_log_file,
|
server_log_file,
|
||||||
@ -1009,14 +1012,14 @@ class Controller:
|
|||||||
f"Unable to delete server files for server with ID: "
|
f"Unable to delete server files for server with ID: "
|
||||||
f"{server_id} with error logged: {e}"
|
f"{server_id} with error logged: {e}"
|
||||||
)
|
)
|
||||||
if Helpers.check_path_exists(
|
backup_configs = HelpersManagement.get_backups_by_server(
|
||||||
self.servers.get_server_data_by_id(server_id)["backup_path"]
|
server_id, True
|
||||||
):
|
)
|
||||||
|
for config in backup_configs:
|
||||||
|
if Helpers.check_path_exists(config.backup_location):
|
||||||
FileHelpers.del_dirs(
|
FileHelpers.del_dirs(
|
||||||
Helpers.get_os_understandable_path(
|
Helpers.get_os_understandable_path(
|
||||||
self.servers.get_server_data_by_id(server_id)[
|
config.backup_location
|
||||||
"backup_path"
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1119,7 +1122,7 @@ class Controller:
|
|||||||
for server in servers:
|
for server in servers:
|
||||||
server_path = server.get("path")
|
server_path = server.get("path")
|
||||||
new_local_server_path = os.path.join(
|
new_local_server_path = os.path.join(
|
||||||
new_server_path, server.get("server_uuid")
|
new_server_path, server.get("server_id")
|
||||||
)
|
)
|
||||||
if os.path.isdir(server_path):
|
if os.path.isdir(server_path):
|
||||||
WebSocketManager().broadcast_page(
|
WebSocketManager().broadcast_page(
|
||||||
@ -1155,7 +1158,7 @@ class Controller:
|
|||||||
server_obj.path = new_local_server_path
|
server_obj.path = new_local_server_path
|
||||||
failed = False
|
failed = False
|
||||||
for s in self.servers.failed_servers:
|
for s in self.servers.failed_servers:
|
||||||
if int(s["server_id"]) == int(server.get("server_id")):
|
if s["server_id"] == server.get("server_id"):
|
||||||
failed = True
|
failed = True
|
||||||
if not failed:
|
if not failed:
|
||||||
self.servers.update_server(server_obj)
|
self.servers.update_server(server_obj)
|
||||||
|
@ -18,12 +18,21 @@ class DatabaseBuilder:
|
|||||||
logger.info("Fresh Install Detected - Creating Default Settings")
|
logger.info("Fresh Install Detected - Creating Default Settings")
|
||||||
Console.info("Fresh Install Detected - Creating Default Settings")
|
Console.info("Fresh Install Detected - Creating Default Settings")
|
||||||
default_data = self.helper.find_default_password()
|
default_data = self.helper.find_default_password()
|
||||||
if password not in default_data:
|
if "password" not in default_data:
|
||||||
Console.help(
|
Console.help(
|
||||||
"No default password found. Using password created "
|
"No default password found. Using password created "
|
||||||
"by Crafty. Find it in app/config/default-creds.txt"
|
"by Crafty. Find it in app/config/default-creds.txt"
|
||||||
)
|
)
|
||||||
username = default_data.get("username", "admin")
|
username = default_data.get("username", "admin")
|
||||||
|
if self.helper.minimum_password_length > len(
|
||||||
|
default_data.get("password", password)
|
||||||
|
):
|
||||||
|
Console.critical(
|
||||||
|
"Default password too short"
|
||||||
|
" using Crafty's created default."
|
||||||
|
" Find it in app/config/default-creds.txt"
|
||||||
|
)
|
||||||
|
else:
|
||||||
password = default_data.get("password", password)
|
password = default_data.get("password", password)
|
||||||
|
|
||||||
self.users_helper.add_user(
|
self.users_helper.add_user(
|
||||||
|
@ -200,6 +200,21 @@ class Migrator(object):
|
|||||||
)
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@get_model
|
||||||
|
def alter_column_type(
|
||||||
|
self,
|
||||||
|
model: peewee.Model,
|
||||||
|
column_name: str,
|
||||||
|
field: peewee.Field,
|
||||||
|
) -> peewee.Model:
|
||||||
|
"""
|
||||||
|
Alter field data type in database.
|
||||||
|
"""
|
||||||
|
self.operations.append(
|
||||||
|
self.migrator.alter_column_type(model._meta.table_name, column_name, field)
|
||||||
|
)
|
||||||
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
|
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
|
||||||
"""
|
"""
|
||||||
@ -354,9 +369,12 @@ class MigrationManager(object):
|
|||||||
@cached_property
|
@cached_property
|
||||||
def migrator(self) -> Migrator:
|
def migrator(self) -> Migrator:
|
||||||
"""
|
"""
|
||||||
Create migrator and setup it with fake migrations.
|
Create migrator
|
||||||
"""
|
"""
|
||||||
migrator = Migrator(self.database)
|
migrator = Migrator(self.database)
|
||||||
|
# Running false migrations to retrives the schemes of
|
||||||
|
# the precedents created tables in the table_dict element
|
||||||
|
# It's useful to run the new migrations
|
||||||
for name in self.done:
|
for name in self.done:
|
||||||
self.up_one(name, migrator, True)
|
self.up_one(name, migrator, True)
|
||||||
return migrator
|
return migrator
|
||||||
|
@ -209,9 +209,6 @@ class ServerInstance:
|
|||||||
self.server_scheduler.start()
|
self.server_scheduler.start()
|
||||||
self.dir_scheduler.start()
|
self.dir_scheduler.start()
|
||||||
self.start_dir_calc_task()
|
self.start_dir_calc_task()
|
||||||
self.backup_thread = threading.Thread(
|
|
||||||
target=self.a_backup_server, daemon=True, name=f"backup_{self.name}"
|
|
||||||
)
|
|
||||||
self.is_backingup = False
|
self.is_backingup = False
|
||||||
# Reset crash and update at initialization
|
# Reset crash and update at initialization
|
||||||
self.stats_helper.server_crash_reset()
|
self.stats_helper.server_crash_reset()
|
||||||
@ -765,12 +762,17 @@ class ServerInstance:
|
|||||||
try:
|
try:
|
||||||
# Getting the forge version from the executable command
|
# Getting the forge version from the executable command
|
||||||
version = re.findall(
|
version = re.findall(
|
||||||
r"forge-([0-9\.]+)((?:)|(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
|
r"forge-installer-([0-9\.]+)((?:)|"
|
||||||
|
r"(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
|
||||||
server_obj.execution_command,
|
server_obj.execution_command,
|
||||||
)
|
)
|
||||||
version_param = version[0][0].split(".")
|
version_param = version[0][0].split(".")
|
||||||
version_major = int(version_param[0])
|
version_major = int(version_param[0])
|
||||||
version_minor = int(version_param[1])
|
version_minor = int(version_param[1])
|
||||||
|
if len(version_param) > 2:
|
||||||
|
version_sub = int(version_param[2])
|
||||||
|
else:
|
||||||
|
version_sub = 0
|
||||||
|
|
||||||
# Checking which version we are with
|
# Checking which version we are with
|
||||||
if version_major <= 1 and version_minor < 17:
|
if version_major <= 1 and version_minor < 17:
|
||||||
@ -804,8 +806,8 @@ class ServerInstance:
|
|||||||
server_obj.execution_command = execution_command
|
server_obj.execution_command = execution_command
|
||||||
Console.debug(SUCCESSMSG)
|
Console.debug(SUCCESSMSG)
|
||||||
|
|
||||||
elif version_major <= 1 and version_minor < 20:
|
elif version_major <= 1 and version_minor <= 20 and version_sub < 3:
|
||||||
# NEW VERSION >= 1.17 and <= 1.20
|
# NEW VERSION >= 1.17 and <= 1.20.2
|
||||||
# (no jar file in server dir, only run.bat and run.sh)
|
# (no jar file in server dir, only run.bat and run.sh)
|
||||||
|
|
||||||
run_file_path = ""
|
run_file_path = ""
|
||||||
@ -852,7 +854,7 @@ class ServerInstance:
|
|||||||
server_obj.execution_command = execution_command
|
server_obj.execution_command = execution_command
|
||||||
Console.debug(SUCCESSMSG)
|
Console.debug(SUCCESSMSG)
|
||||||
else:
|
else:
|
||||||
# NEW VERSION >= 1.20
|
# NEW VERSION >= 1.20.3
|
||||||
# (executable jar is back in server dir)
|
# (executable jar is back in server dir)
|
||||||
|
|
||||||
# Retrieving the executable jar filename
|
# Retrieving the executable jar filename
|
||||||
@ -1010,8 +1012,7 @@ class ServerInstance:
|
|||||||
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
||||||
|
|
||||||
def restart_threaded_server(self, user_id):
|
def restart_threaded_server(self, user_id):
|
||||||
bu_conf = HelpersManagement.get_backup_config(self.server_id)
|
if self.is_backingup:
|
||||||
if self.is_backingup and bu_conf["shutdown"]:
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Restart command detected. Supressing - server has"
|
"Restart command detected. Supressing - server has"
|
||||||
" backup shutdown enabled and server is currently backing up."
|
" backup shutdown enabled and server is currently backing up."
|
||||||
@ -1181,13 +1182,16 @@ class ServerInstance:
|
|||||||
f.write("eula=true")
|
f.write("eula=true")
|
||||||
self.run_threaded_server(user_id)
|
self.run_threaded_server(user_id)
|
||||||
|
|
||||||
@callback
|
def server_backup_threader(self, backup_id, update=False):
|
||||||
def backup_server(self):
|
# Check to see if we're already backing up
|
||||||
if self.settings["backup_path"] == "":
|
if self.check_backup_by_id(backup_id):
|
||||||
logger.critical("Backup path is None. Canceling Backup!")
|
return False
|
||||||
return
|
|
||||||
backup_thread = threading.Thread(
|
backup_thread = threading.Thread(
|
||||||
target=self.a_backup_server, daemon=True, name=f"backup_{self.name}"
|
target=self.backup_server,
|
||||||
|
daemon=True,
|
||||||
|
name=f"backup_{backup_id}",
|
||||||
|
args=[backup_id, update],
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Starting Backup Thread for server {self.settings['server_name']}."
|
f"Starting Backup Thread for server {self.settings['server_name']}."
|
||||||
@ -1198,26 +1202,20 @@ class ServerInstance:
|
|||||||
"Backup Thread - Local server path not defined. "
|
"Backup Thread - Local server path not defined. "
|
||||||
"Setting local server path variable."
|
"Setting local server path variable."
|
||||||
)
|
)
|
||||||
# checks if the backup thread is currently alive for this server
|
|
||||||
if not self.is_backingup:
|
|
||||||
try:
|
try:
|
||||||
backup_thread.start()
|
backup_thread.start()
|
||||||
self.is_backingup = True
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f"Failed to start backup: {ex}")
|
logger.error(f"Failed to start backup: {ex}")
|
||||||
return False
|
return False
|
||||||
else:
|
|
||||||
logger.error(
|
|
||||||
f"Backup is already being processed for server "
|
|
||||||
f"{self.settings['server_name']}. Canceling backup request"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
|
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
|
||||||
|
|
||||||
def a_backup_server(self):
|
@callback
|
||||||
|
def backup_server(self, backup_id, update):
|
||||||
was_server_running = None
|
was_server_running = None
|
||||||
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
|
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
|
||||||
server_users = PermissionsServers.get_server_user_list(self.server_id)
|
server_users = PermissionsServers.get_server_user_list(self.server_id)
|
||||||
|
# Alert the start of the backup to the authorized users.
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
WebSocketManager().broadcast_user(
|
WebSocketManager().broadcast_user(
|
||||||
user,
|
user,
|
||||||
@ -1227,30 +1225,40 @@ class ServerInstance:
|
|||||||
).format(self.name),
|
).format(self.name),
|
||||||
)
|
)
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
conf = HelpersManagement.get_backup_config(self.server_id)
|
|
||||||
|
# Get the backup config
|
||||||
|
conf = HelpersManagement.get_backup_config(backup_id)
|
||||||
|
# Adjust the location to include the backup ID for destination.
|
||||||
|
backup_location = os.path.join(conf["backup_location"], conf["backup_id"])
|
||||||
|
|
||||||
|
# Check if the backup location even exists.
|
||||||
|
if not backup_location:
|
||||||
|
Console.critical("No backup path found. Canceling")
|
||||||
|
return None
|
||||||
if conf["before"]:
|
if conf["before"]:
|
||||||
if self.check_running():
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Found running server and send command option. Sending command"
|
"Found running server and send command option. Sending command"
|
||||||
)
|
)
|
||||||
self.send_command(conf["before"])
|
self.send_command(conf["before"])
|
||||||
|
# Pause to let command run
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
if conf["shutdown"]:
|
if conf["shutdown"]:
|
||||||
if conf["before"]:
|
|
||||||
# pause to let people read message.
|
|
||||||
time.sleep(5)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Found shutdown preference. Delaying"
|
"Found shutdown preference. Delaying"
|
||||||
+ "backup start. Shutting down server."
|
+ "backup start. Shutting down server."
|
||||||
)
|
)
|
||||||
|
if not update:
|
||||||
|
was_server_running = False
|
||||||
if self.check_running():
|
if self.check_running():
|
||||||
self.stop_server()
|
self.stop_server()
|
||||||
was_server_running = True
|
was_server_running = True
|
||||||
|
|
||||||
self.helper.ensure_dir_exists(self.settings["backup_path"])
|
self.helper.ensure_dir_exists(backup_location)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
backup_filename = (
|
backup_filename = (
|
||||||
f"{self.settings['backup_path']}/"
|
f"{backup_location}/"
|
||||||
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
|
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -1258,42 +1266,36 @@ class ServerInstance:
|
|||||||
f" (ID#{self.server_id}, path={self.server_path}) "
|
f" (ID#{self.server_id}, path={self.server_path}) "
|
||||||
f"at '{backup_filename}'"
|
f"at '{backup_filename}'"
|
||||||
)
|
)
|
||||||
excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id)
|
excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id)
|
||||||
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
|
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
|
||||||
if conf["compress"]:
|
|
||||||
logger.debug(
|
|
||||||
"Found compress backup to be true. Calling compressed archive"
|
|
||||||
)
|
|
||||||
self.file_helper.make_compressed_backup(
|
|
||||||
Helpers.get_os_understandable_path(backup_filename),
|
|
||||||
server_dir,
|
|
||||||
excluded_dirs,
|
|
||||||
self.server_id,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
"Found compress backup to be false. Calling NON-compressed archive"
|
|
||||||
)
|
|
||||||
self.file_helper.make_backup(
|
self.file_helper.make_backup(
|
||||||
Helpers.get_os_understandable_path(backup_filename),
|
Helpers.get_os_understandable_path(backup_filename),
|
||||||
server_dir,
|
server_dir,
|
||||||
excluded_dirs,
|
excluded_dirs,
|
||||||
self.server_id,
|
self.server_id,
|
||||||
|
backup_id,
|
||||||
|
conf["backup_name"],
|
||||||
|
conf["compress"],
|
||||||
)
|
)
|
||||||
|
|
||||||
while (
|
while (
|
||||||
len(self.list_backups()) > conf["max_backups"]
|
len(self.list_backups(conf)) > conf["max_backups"]
|
||||||
and conf["max_backups"] > 0
|
and conf["max_backups"] > 0
|
||||||
):
|
):
|
||||||
backup_list = self.list_backups()
|
backup_list = self.list_backups(conf)
|
||||||
oldfile = backup_list[0]
|
oldfile = backup_list[0]
|
||||||
oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
|
oldfile_path = f"{backup_location}/{oldfile['path']}"
|
||||||
logger.info(f"Removing old backup '{oldfile['path']}'")
|
logger.info(f"Removing old backup '{oldfile['path']}'")
|
||||||
os.remove(Helpers.get_os_understandable_path(oldfile_path))
|
os.remove(Helpers.get_os_understandable_path(oldfile_path))
|
||||||
|
|
||||||
self.is_backingup = False
|
|
||||||
logger.info(f"Backup of server: {self.name} completed")
|
logger.info(f"Backup of server: {self.name} completed")
|
||||||
results = {"percent": 100, "total_files": 0, "current_file": 0}
|
results = {
|
||||||
|
"percent": 100,
|
||||||
|
"total_files": 0,
|
||||||
|
"current_file": 0,
|
||||||
|
"backup_id": backup_id,
|
||||||
|
}
|
||||||
if len(WebSocketManager().clients) > 0:
|
if len(WebSocketManager().clients) > 0:
|
||||||
WebSocketManager().broadcast_page_params(
|
WebSocketManager().broadcast_page_params(
|
||||||
"/panel/server_detail",
|
"/panel/server_detail",
|
||||||
@ -1318,7 +1320,6 @@ class ServerInstance:
|
|||||||
)
|
)
|
||||||
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
|
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
self.last_backup_failed = False
|
|
||||||
if conf["after"]:
|
if conf["after"]:
|
||||||
if self.check_running():
|
if self.check_running():
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@ -1326,12 +1327,21 @@ class ServerInstance:
|
|||||||
)
|
)
|
||||||
self.send_command(conf["after"])
|
self.send_command(conf["after"])
|
||||||
# pause to let people read message.
|
# pause to let people read message.
|
||||||
|
HelpersManagement.update_backup_config(
|
||||||
|
backup_id,
|
||||||
|
{"status": json.dumps({"status": "Standby", "message": ""})},
|
||||||
|
)
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
except:
|
except Exception as e:
|
||||||
logger.exception(
|
logger.exception(
|
||||||
f"Failed to create backup of server {self.name} (ID {self.server_id})"
|
f"Failed to create backup of server {self.name} (ID {self.server_id})"
|
||||||
)
|
)
|
||||||
results = {"percent": 100, "total_files": 0, "current_file": 0}
|
results = {
|
||||||
|
"percent": 100,
|
||||||
|
"total_files": 0,
|
||||||
|
"current_file": 0,
|
||||||
|
"backup_id": backup_id,
|
||||||
|
}
|
||||||
if len(WebSocketManager().clients) > 0:
|
if len(WebSocketManager().clients) > 0:
|
||||||
WebSocketManager().broadcast_page_params(
|
WebSocketManager().broadcast_page_params(
|
||||||
"/panel/server_detail",
|
"/panel/server_detail",
|
||||||
@ -1339,56 +1349,51 @@ class ServerInstance:
|
|||||||
"backup_status",
|
"backup_status",
|
||||||
results,
|
results,
|
||||||
)
|
)
|
||||||
self.is_backingup = False
|
|
||||||
if was_server_running:
|
if was_server_running:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Backup complete. User had shutdown preference. Starting server."
|
"Backup complete. User had shutdown preference. Starting server."
|
||||||
)
|
)
|
||||||
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
|
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
|
||||||
self.last_backup_failed = True
|
HelpersManagement.update_backup_config(
|
||||||
|
backup_id,
|
||||||
def backup_status(self, source_path, dest_path):
|
{"status": json.dumps({"status": "Failed", "message": f"{e}"})},
|
||||||
results = Helpers.calc_percent(source_path, dest_path)
|
|
||||||
self.backup_stats = results
|
|
||||||
if len(WebSocketManager().clients) > 0:
|
|
||||||
WebSocketManager().broadcast_page_params(
|
|
||||||
"/panel/server_detail",
|
|
||||||
{"id": str(self.server_id)},
|
|
||||||
"backup_status",
|
|
||||||
results,
|
|
||||||
)
|
)
|
||||||
|
self.set_backup_status()
|
||||||
|
|
||||||
def last_backup_status(self):
|
def last_backup_status(self):
|
||||||
return self.last_backup_failed
|
return self.last_backup_failed
|
||||||
|
|
||||||
def send_backup_status(self):
|
def set_backup_status(self):
|
||||||
try:
|
backups = HelpersManagement.get_backups_by_server(self.server_id, True)
|
||||||
return self.backup_stats
|
alert = False
|
||||||
except:
|
for backup in backups:
|
||||||
return {"percent": 0, "total_files": 0}
|
if json.loads(backup.status)["status"] == "Failed":
|
||||||
|
alert = True
|
||||||
|
self.last_backup_failed = alert
|
||||||
|
|
||||||
def list_backups(self):
|
def list_backups(self, backup_config: dict) -> list:
|
||||||
if not self.settings["backup_path"]:
|
if not backup_config:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Error putting backup file list for server with ID: {self.server_id}"
|
f"Error putting backup file list for server with ID: {self.server_id}"
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
backup_location = os.path.join(
|
||||||
|
backup_config["backup_location"], backup_config["backup_id"]
|
||||||
|
)
|
||||||
if not Helpers.check_path_exists(
|
if not Helpers.check_path_exists(
|
||||||
Helpers.get_os_understandable_path(self.settings["backup_path"])
|
Helpers.get_os_understandable_path(backup_location)
|
||||||
):
|
):
|
||||||
return []
|
return []
|
||||||
files = Helpers.get_human_readable_files_sizes(
|
files = Helpers.get_human_readable_files_sizes(
|
||||||
Helpers.list_dir_by_date(
|
Helpers.list_dir_by_date(
|
||||||
Helpers.get_os_understandable_path(self.settings["backup_path"])
|
Helpers.get_os_understandable_path(backup_location)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"path": os.path.relpath(
|
"path": os.path.relpath(
|
||||||
f["path"],
|
f["path"],
|
||||||
start=Helpers.get_os_understandable_path(
|
start=Helpers.get_os_understandable_path(backup_location),
|
||||||
self.settings["backup_path"]
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
"size": f["size"],
|
"size": f["size"],
|
||||||
}
|
}
|
||||||
@ -1400,7 +1405,7 @@ class ServerInstance:
|
|||||||
def server_upgrade(self):
|
def server_upgrade(self):
|
||||||
self.stats_helper.set_update(True)
|
self.stats_helper.set_update(True)
|
||||||
update_thread = threading.Thread(
|
update_thread = threading.Thread(
|
||||||
target=self.a_server_upgrade, daemon=True, name=f"exe_update_{self.name}"
|
target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}"
|
||||||
)
|
)
|
||||||
update_thread.start()
|
update_thread.start()
|
||||||
|
|
||||||
@ -1441,14 +1446,32 @@ class ServerInstance:
|
|||||||
def check_update(self):
|
def check_update(self):
|
||||||
return self.stats_helper.get_server_stats()["updating"]
|
return self.stats_helper.get_server_stats()["updating"]
|
||||||
|
|
||||||
def a_server_upgrade(self):
|
def threaded_jar_update(self):
|
||||||
server_users = PermissionsServers.get_server_user_list(self.server_id)
|
server_users = PermissionsServers.get_server_user_list(self.server_id)
|
||||||
|
# check to make sure a backup config actually exists before starting the update
|
||||||
|
if len(self.management_helper.get_backups_by_server(self.server_id, True)) <= 0:
|
||||||
|
for user in server_users:
|
||||||
|
WebSocketManager().broadcast_user(
|
||||||
|
user,
|
||||||
|
"notification",
|
||||||
|
"Backup config does not exist for "
|
||||||
|
+ self.name
|
||||||
|
+ ". canceling update.",
|
||||||
|
)
|
||||||
|
logger.error(f"Back config does not exist for {self.name}. Update Failed.")
|
||||||
|
self.stats_helper.set_update(False)
|
||||||
|
return
|
||||||
was_started = "-1"
|
was_started = "-1"
|
||||||
|
|
||||||
###############################
|
###############################
|
||||||
# Backup Server ###############
|
# Backup Server ###############
|
||||||
###############################
|
###############################
|
||||||
self.backup_server()
|
|
||||||
|
# Get default backup configuration
|
||||||
|
backup_config = HelpersManagement.get_default_server_backup(self.server_id)
|
||||||
|
# start threaded backup
|
||||||
|
self.server_backup_threader(backup_config["backup_id"], True)
|
||||||
|
|
||||||
# checks if server is running. Calls shutdown if it is running.
|
# checks if server is running. Calls shutdown if it is running.
|
||||||
if self.check_running():
|
if self.check_running():
|
||||||
was_started = True
|
was_started = True
|
||||||
@ -1477,54 +1500,30 @@ class ServerInstance:
|
|||||||
"string": message,
|
"string": message,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
backup_dir = os.path.join(
|
|
||||||
Helpers.get_os_understandable_path(self.settings["path"]),
|
|
||||||
"crafty_executable_backups",
|
|
||||||
)
|
|
||||||
# checks if backup directory already exists
|
|
||||||
if os.path.isdir(backup_dir):
|
|
||||||
backup_executable = os.path.join(backup_dir, self.settings["executable"])
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Executable backup directory not found for Server: {self.name}."
|
|
||||||
f" Creating one."
|
|
||||||
)
|
|
||||||
os.mkdir(backup_dir)
|
|
||||||
backup_executable = os.path.join(backup_dir, self.settings["executable"])
|
|
||||||
|
|
||||||
if len(os.listdir(backup_dir)) > 0:
|
|
||||||
# removes old backup
|
|
||||||
logger.info(f"Old backups found for server: {self.name}. Removing...")
|
|
||||||
for item in os.listdir(backup_dir):
|
|
||||||
os.remove(os.path.join(backup_dir, item))
|
|
||||||
logger.info(f"Old backups removed for server: {self.name}.")
|
|
||||||
else:
|
|
||||||
logger.info(f"No old backups found for server: {self.name}")
|
|
||||||
|
|
||||||
current_executable = os.path.join(
|
current_executable = os.path.join(
|
||||||
Helpers.get_os_understandable_path(self.settings["path"]),
|
Helpers.get_os_understandable_path(self.settings["path"]),
|
||||||
self.settings["executable"],
|
self.settings["executable"],
|
||||||
)
|
)
|
||||||
|
backing_up = True
|
||||||
try:
|
|
||||||
# copies to backup dir
|
|
||||||
FileHelpers.copy_file(current_executable, backup_executable)
|
|
||||||
except FileNotFoundError:
|
|
||||||
logger.error("Could not create backup of jarfile. File not found.")
|
|
||||||
|
|
||||||
# wait for backup
|
# wait for backup
|
||||||
while self.is_backingup:
|
while backing_up:
|
||||||
time.sleep(10)
|
# Check to see if we're already backing up
|
||||||
|
backing_up = self.check_backup_by_id(backup_config["backup_id"])
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
# check if backup was successful
|
# check if backup was successful
|
||||||
if self.last_backup_failed:
|
backup_status = json.loads(
|
||||||
|
HelpersManagement.get_backup_config(backup_config["backup_id"])["status"]
|
||||||
|
)["status"]
|
||||||
|
if backup_status == "Failed":
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
WebSocketManager().broadcast_user(
|
WebSocketManager().broadcast_user(
|
||||||
user,
|
user,
|
||||||
"notification",
|
"notification",
|
||||||
"Backup failed for " + self.name + ". canceling update.",
|
"Backup failed for " + self.name + ". canceling update.",
|
||||||
)
|
)
|
||||||
return False
|
self.stats_helper.set_update(False)
|
||||||
|
return
|
||||||
|
|
||||||
################################
|
################################
|
||||||
# Executable Download ##########
|
# Executable Download ##########
|
||||||
@ -1638,12 +1637,6 @@ class ServerInstance:
|
|||||||
WebSocketManager().broadcast_user_page(
|
WebSocketManager().broadcast_user_page(
|
||||||
user, "/panel/dashboard", "send_start_reload", {}
|
user, "/panel/dashboard", "send_start_reload", {}
|
||||||
)
|
)
|
||||||
WebSocketManager().broadcast_user(
|
|
||||||
user,
|
|
||||||
"notification",
|
|
||||||
"Executable update finished for " + self.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.management_helper.add_to_audit_log_raw(
|
self.management_helper.add_to_audit_log_raw(
|
||||||
"Alert",
|
"Alert",
|
||||||
"-1",
|
"-1",
|
||||||
@ -1766,6 +1759,14 @@ class ServerInstance:
|
|||||||
except:
|
except:
|
||||||
Console.critical("Can't broadcast server status to websocket")
|
Console.critical("Can't broadcast server status to websocket")
|
||||||
|
|
||||||
|
def check_backup_by_id(self, backup_id: str) -> bool:
|
||||||
|
# Check to see if we're already backing up
|
||||||
|
for thread in threading.enumerate():
|
||||||
|
if thread.getName() == f"backup_{backup_id}":
|
||||||
|
Console.debug(f"Backup with id {backup_id} already running!")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def get_servers_stats(self):
|
def get_servers_stats(self):
|
||||||
server_stats = {}
|
server_stats = {}
|
||||||
|
|
||||||
|
@ -140,7 +140,7 @@ class TasksManager:
|
|||||||
)
|
)
|
||||||
|
|
||||||
elif command == "backup_server":
|
elif command == "backup_server":
|
||||||
svr.backup_server()
|
svr.server_backup_threader(cmd["action_id"])
|
||||||
|
|
||||||
elif command == "update_executable":
|
elif command == "update_executable":
|
||||||
svr.server_upgrade()
|
svr.server_upgrade()
|
||||||
@ -240,6 +240,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": schedule.command,
|
"command": schedule.command,
|
||||||
|
"action_id": schedule.action_id,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -268,6 +269,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": schedule.command,
|
"command": schedule.command,
|
||||||
|
"action_id": schedule.action_id,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -284,6 +286,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": schedule.command,
|
"command": schedule.command,
|
||||||
|
"action_id": schedule.action_id,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -303,6 +306,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": schedule.command,
|
"command": schedule.command,
|
||||||
|
"action_id": schedule.action_id,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -337,6 +341,7 @@ class TasksManager:
|
|||||||
job_data["cron_string"],
|
job_data["cron_string"],
|
||||||
job_data["parent"],
|
job_data["parent"],
|
||||||
job_data["delay"],
|
job_data["delay"],
|
||||||
|
job_data["action_id"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Checks to make sure some doofus didn't actually make the newly
|
# Checks to make sure some doofus didn't actually make the newly
|
||||||
@ -367,6 +372,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -393,6 +399,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -409,6 +416,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -428,6 +436,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -520,6 +529,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -543,6 +553,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -559,6 +570,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -578,6 +590,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": job_data["command"],
|
"command": job_data["command"],
|
||||||
|
"action_id": job_data["action_id"],
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -653,6 +666,7 @@ class TasksManager:
|
|||||||
"system"
|
"system"
|
||||||
),
|
),
|
||||||
"command": schedule.command,
|
"command": schedule.command,
|
||||||
|
"action_id": schedule.action_id,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -685,16 +699,16 @@ class TasksManager:
|
|||||||
id="stats",
|
id="stats",
|
||||||
)
|
)
|
||||||
|
|
||||||
def serverjar_cache_refresher(self):
|
def big_bucket_cache_refresher(self):
|
||||||
logger.info("Refreshing serverjars.com cache on start")
|
logger.info("Refreshing big bucket cache on start")
|
||||||
self.controller.server_jars.refresh_cache()
|
self.controller.big_bucket.refresh_cache()
|
||||||
|
|
||||||
logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
|
logger.info("Scheduling big bucket cache refresh service every 12 hours")
|
||||||
self.scheduler.add_job(
|
self.scheduler.add_job(
|
||||||
self.controller.server_jars.refresh_cache,
|
self.controller.big_bucket.refresh_cache,
|
||||||
"interval",
|
"interval",
|
||||||
hours=12,
|
hours=12,
|
||||||
id="serverjars",
|
id="big_bucket",
|
||||||
)
|
)
|
||||||
|
|
||||||
def steamapps_cache_refresher(self):
|
def steamapps_cache_refresher(self):
|
||||||
@ -797,6 +811,18 @@ class TasksManager:
|
|||||||
self.helper.ensure_dir_exists(
|
self.helper.ensure_dir_exists(
|
||||||
os.path.join(self.controller.project_root, "import", "upload")
|
os.path.join(self.controller.project_root, "import", "upload")
|
||||||
)
|
)
|
||||||
|
self.helper.ensure_dir_exists(
|
||||||
|
os.path.join(self.controller.project_root, "temp")
|
||||||
|
)
|
||||||
|
for file in os.listdir(os.path.join(self.controller.project_root, "temp")):
|
||||||
|
if self.helper.is_file_older_than_x_days(
|
||||||
|
os.path.join(self.controller.project_root, "temp", file)
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
os.remove(os.path.join(file))
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.debug("Could not clear out file from temp directory")
|
||||||
|
|
||||||
for file in os.listdir(
|
for file in os.listdir(
|
||||||
os.path.join(self.controller.project_root, "import", "upload")
|
os.path.join(self.controller.project_root, "import", "upload")
|
||||||
):
|
):
|
||||||
@ -805,7 +831,7 @@ class TasksManager:
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
os.remove(os.path.join(file))
|
os.remove(os.path.join(file))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logger.debug("Could not clear out file from import directory")
|
logger.debug("Could not clear out file from import directory")
|
||||||
|
|
||||||
def log_watcher(self):
|
def log_watcher(self):
|
||||||
|
@ -20,7 +20,7 @@ class Translation:
|
|||||||
def get_language_file(self, language: str):
|
def get_language_file(self, language: str):
|
||||||
return os.path.join(self.translations_path, str(language) + ".json")
|
return os.path.join(self.translations_path, str(language) + ".json")
|
||||||
|
|
||||||
def translate(self, page, word, language):
|
def translate(self, page, word, language, error=True):
|
||||||
fallback_language = "en_EN"
|
fallback_language = "en_EN"
|
||||||
|
|
||||||
translated_word = self.translate_inner(page, word, language)
|
translated_word = self.translate_inner(page, word, language)
|
||||||
@ -37,7 +37,9 @@ class Translation:
|
|||||||
if hasattr(translated_word, "__iter__"):
|
if hasattr(translated_word, "__iter__"):
|
||||||
# Multiline strings
|
# Multiline strings
|
||||||
return "\n".join(translated_word)
|
return "\n".join(translated_word)
|
||||||
|
if error:
|
||||||
return "Error while getting translation"
|
return "Error while getting translation"
|
||||||
|
return word
|
||||||
|
|
||||||
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
||||||
language_file = self.get_language_file(language)
|
language_file = self.get_language_file(language)
|
||||||
|
@ -6,6 +6,7 @@ import nh3
|
|||||||
import tornado.web
|
import tornado.web
|
||||||
|
|
||||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||||
|
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||||
from app.classes.models.users import ApiKeys
|
from app.classes.models.users import ApiKeys
|
||||||
from app.classes.shared.helpers import Helpers
|
from app.classes.shared.helpers import Helpers
|
||||||
from app.classes.shared.file_helpers import FileHelpers
|
from app.classes.shared.file_helpers import FileHelpers
|
||||||
@ -182,6 +183,7 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
t.List[str],
|
t.List[str],
|
||||||
bool,
|
bool,
|
||||||
t.Dict[str, t.Any],
|
t.Dict[str, t.Any],
|
||||||
|
str,
|
||||||
]
|
]
|
||||||
]:
|
]:
|
||||||
try:
|
try:
|
||||||
@ -190,9 +192,12 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
superuser = user["superuser"]
|
superuser = user["superuser"]
|
||||||
|
server_permissions_api_mask = ""
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.full_access
|
||||||
|
server_permissions_api_mask = api_key.server_permissions
|
||||||
|
if api_key.full_access:
|
||||||
|
server_permissions_api_mask = "1" * len(EnumPermissionsServer)
|
||||||
exec_user_role = set()
|
exec_user_role = set()
|
||||||
if superuser:
|
if superuser:
|
||||||
authorized_servers = self.controller.servers.get_all_defined_servers()
|
authorized_servers = self.controller.servers.get_all_defined_servers()
|
||||||
@ -214,6 +219,7 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
user["user_id"]
|
user["user_id"]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(user["roles"])
|
logger.debug(user["roles"])
|
||||||
for r in user["roles"]:
|
for r in user["roles"]:
|
||||||
role = self.controller.roles.get_role(r)
|
role = self.controller.roles.get_role(r)
|
||||||
@ -234,6 +240,7 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
exec_user_role,
|
exec_user_role,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
server_permissions_api_mask,
|
||||||
)
|
)
|
||||||
logging.debug("Auth unsuccessful")
|
logging.debug("Auth unsuccessful")
|
||||||
auth_log.error(
|
auth_log.error(
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
import logging
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from app.classes.web.base_handler import BaseHandler
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandler(BaseHandler):
|
|
||||||
def get(self):
|
|
||||||
url = str(self.request.host)
|
|
||||||
port = 443
|
|
||||||
url_list = url.split(":")
|
|
||||||
if url_list[0] != "":
|
|
||||||
url = "https://" + url_list[0]
|
|
||||||
else:
|
|
||||||
url = "https://" + url
|
|
||||||
db_port = self.helper.get_setting("https_port")
|
|
||||||
try:
|
|
||||||
resp = requests.head(url + ":" + str(port), timeout=(0.5, 5))
|
|
||||||
resp.raise_for_status()
|
|
||||||
except Exception:
|
|
||||||
port = db_port
|
|
||||||
self.redirect(url + ":" + str(port))
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandlerPage(BaseHandler):
|
|
||||||
def get(self):
|
|
||||||
url = str(self.request.host)
|
|
||||||
port = 443
|
|
||||||
url_list = url.split(":")
|
|
||||||
if url_list[0] != "":
|
|
||||||
url = "https://" + url_list[0]
|
|
||||||
else:
|
|
||||||
url = "https://" + url
|
|
||||||
db_port = self.helper.get_setting("https_port")
|
|
||||||
try:
|
|
||||||
resp = requests.head(url + ":" + str(port), timeout=(0.5, 5))
|
|
||||||
resp.raise_for_status()
|
|
||||||
except Exception:
|
|
||||||
port = db_port
|
|
||||||
self.redirect(url + ":" + str(port))
|
|
@ -1,33 +0,0 @@
|
|||||||
import logging
|
|
||||||
import requests
|
|
||||||
from app.classes.web.base_handler import BaseHandler
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandlerPage(BaseHandler):
|
|
||||||
def get(self):
|
|
||||||
url = self.request.full_url
|
|
||||||
port = 443
|
|
||||||
if url[len(url) - 1] == "/":
|
|
||||||
url = url.strip(url[len(url) - 1])
|
|
||||||
url_list = url.split("/")
|
|
||||||
if url_list[0] != "":
|
|
||||||
primary_url = url_list[0] + ":" + str(port) + "/"
|
|
||||||
backup_url = (
|
|
||||||
url_list[0] + ":" + str(self.helper.get_setting("https_port")) + "/"
|
|
||||||
)
|
|
||||||
for i in range(len(url_list) - 1):
|
|
||||||
primary_url += url_list[i + 1]
|
|
||||||
backup_url += url_list[i + 1]
|
|
||||||
else:
|
|
||||||
primary_url = url + str(port)
|
|
||||||
backup_url = url + str(self.helper.get_setting("https_port"))
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = requests.head(primary_url, timeout=(0.5, 5))
|
|
||||||
resp.raise_for_status()
|
|
||||||
url = primary_url
|
|
||||||
except Exception:
|
|
||||||
url = backup_url
|
|
||||||
self.redirect("https://" + url + ":" + str(port))
|
|
@ -41,6 +41,8 @@ SUBPAGE_PERMS = {
|
|||||||
"webhooks": EnumPermissionsServer.CONFIG,
|
"webhooks": EnumPermissionsServer.CONFIG,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules"
|
||||||
|
|
||||||
|
|
||||||
class PanelHandler(BaseHandler):
|
class PanelHandler(BaseHandler):
|
||||||
def get_user_roles(self) -> t.Dict[str, list]:
|
def get_user_roles(self) -> t.Dict[str, list]:
|
||||||
@ -168,13 +170,13 @@ class PanelHandler(BaseHandler):
|
|||||||
# Commented out because there is no server access control for API keys,
|
# Commented out because there is no server access control for API keys,
|
||||||
# they just inherit from the host user
|
# they just inherit from the host user
|
||||||
# if api_key is not None:
|
# if api_key is not None:
|
||||||
# superuser = superuser and api_key.superuser
|
# superuser = superuser and api_key.full_access
|
||||||
|
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
self.redirect("/panel/error?error=Invalid Server ID")
|
self.redirect("/panel/error?error=Invalid Server ID")
|
||||||
return None
|
return None
|
||||||
for server in self.controller.servers.failed_servers:
|
for server in self.controller.servers.failed_servers:
|
||||||
if int(server_id) == server["server_id"]:
|
if server_id == server["server_id"]:
|
||||||
self.failed_server = True
|
self.failed_server = True
|
||||||
return server_id
|
return server_id
|
||||||
# Does this server exist?
|
# Does this server exist?
|
||||||
@ -242,7 +244,7 @@ class PanelHandler(BaseHandler):
|
|||||||
api_key, _token_data, exec_user = self.current_user
|
api_key, _token_data, exec_user = self.current_user
|
||||||
superuser = exec_user["superuser"]
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.full_access
|
||||||
|
|
||||||
if superuser: # TODO: Figure out a better solution
|
if superuser: # TODO: Figure out a better solution
|
||||||
defined_servers = self.controller.servers.list_defined_servers()
|
defined_servers = self.controller.servers.list_defined_servers()
|
||||||
@ -351,7 +353,7 @@ class PanelHandler(BaseHandler):
|
|||||||
"created": api_key.created,
|
"created": api_key.created,
|
||||||
"server_permissions": api_key.server_permissions,
|
"server_permissions": api_key.server_permissions,
|
||||||
"crafty_permissions": api_key.crafty_permissions,
|
"crafty_permissions": api_key.crafty_permissions,
|
||||||
"superuser": api_key.superuser,
|
"full_access": api_key.full_access,
|
||||||
}
|
}
|
||||||
if api_key is not None
|
if api_key is not None
|
||||||
else None
|
else None
|
||||||
@ -556,7 +558,7 @@ class PanelHandler(BaseHandler):
|
|||||||
"server_id": {
|
"server_id": {
|
||||||
"server_id": server_id,
|
"server_id": server_id,
|
||||||
"server_name": server_temp_obj["server_name"],
|
"server_name": server_temp_obj["server_name"],
|
||||||
"server_uuid": server_temp_obj["server_uuid"],
|
"server_uuid": server_temp_obj["server_id"],
|
||||||
"path": server_temp_obj["path"],
|
"path": server_temp_obj["path"],
|
||||||
"log_path": server_temp_obj["log_path"],
|
"log_path": server_temp_obj["log_path"],
|
||||||
"executable": server_temp_obj["executable"],
|
"executable": server_temp_obj["executable"],
|
||||||
@ -574,6 +576,7 @@ class PanelHandler(BaseHandler):
|
|||||||
"crash_detection": server_temp_obj["crash_detection"],
|
"crash_detection": server_temp_obj["crash_detection"],
|
||||||
"show_status": server_temp_obj["show_status"],
|
"show_status": server_temp_obj["show_status"],
|
||||||
"ignored_exits": server_temp_obj["ignored_exits"],
|
"ignored_exits": server_temp_obj["ignored_exits"],
|
||||||
|
"count_players": server_temp_obj["count_players"],
|
||||||
},
|
},
|
||||||
"running": False,
|
"running": False,
|
||||||
"crashed": False,
|
"crashed": False,
|
||||||
@ -676,36 +679,18 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["java_versions"] = page_java
|
page_data["java_versions"] = page_java
|
||||||
if subpage == "backup":
|
if subpage == "backup":
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
page_data["backup_config"] = (
|
|
||||||
self.controller.management.get_backup_config(server_id)
|
page_data["backups"] = self.controller.management.get_backups_by_server(
|
||||||
)
|
server_id, model=True
|
||||||
exclusions = []
|
|
||||||
page_data["exclusions"] = (
|
|
||||||
self.controller.management.get_excluded_backup_dirs(server_id)
|
|
||||||
)
|
)
|
||||||
page_data["backing_up"] = (
|
page_data["backing_up"] = (
|
||||||
self.controller.servers.get_server_instance_by_id(
|
self.controller.servers.get_server_instance_by_id(
|
||||||
server_id
|
server_id
|
||||||
).is_backingup
|
).is_backingup
|
||||||
)
|
)
|
||||||
page_data["backup_stats"] = (
|
|
||||||
self.controller.servers.get_server_instance_by_id(
|
|
||||||
server_id
|
|
||||||
).send_backup_status()
|
|
||||||
)
|
|
||||||
# makes it so relative path is the only thing shown
|
# makes it so relative path is the only thing shown
|
||||||
for file in page_data["exclusions"]:
|
|
||||||
if Helpers.is_os_windows():
|
|
||||||
exclusions.append(file.replace(server_info["path"] + "\\", ""))
|
|
||||||
else:
|
|
||||||
exclusions.append(file.replace(server_info["path"] + "/", ""))
|
|
||||||
page_data["exclusions"] = exclusions
|
|
||||||
self.controller.servers.refresh_server_settings(server_id)
|
self.controller.servers.refresh_server_settings(server_id)
|
||||||
try:
|
|
||||||
page_data["backup_list"] = server.list_backups()
|
|
||||||
except:
|
|
||||||
page_data["backup_list"] = []
|
|
||||||
page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"])
|
|
||||||
|
|
||||||
if subpage == "metrics":
|
if subpage == "metrics":
|
||||||
try:
|
try:
|
||||||
@ -779,20 +764,23 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
elif page == "download_backup":
|
elif page == "download_backup":
|
||||||
file = self.get_argument("file", "")
|
file = self.get_argument("file", "")
|
||||||
|
backup_id = self.get_argument("backup_id", "")
|
||||||
|
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
return
|
return
|
||||||
|
backup_config = self.controller.management.get_backup_config(backup_id)
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
|
backup_location = os.path.join(backup_config["backup_location"], backup_id)
|
||||||
backup_file = os.path.abspath(
|
backup_file = os.path.abspath(
|
||||||
os.path.join(
|
os.path.join(
|
||||||
Helpers.get_os_understandable_path(server_info["backup_path"]), file
|
Helpers.get_os_understandable_path(backup_location),
|
||||||
|
file,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if not self.helper.is_subdir(
|
if not self.helper.is_subdir(
|
||||||
backup_file,
|
backup_file,
|
||||||
Helpers.get_os_understandable_path(server_info["backup_path"]),
|
Helpers.get_os_understandable_path(backup_location),
|
||||||
) or not os.path.isfile(backup_file):
|
) or not os.path.isfile(backup_file):
|
||||||
self.redirect("/panel/error?error=Invalid path detected")
|
self.redirect("/panel/error?error=Invalid path detected")
|
||||||
return
|
return
|
||||||
@ -891,6 +879,8 @@ class PanelHandler(BaseHandler):
|
|||||||
os.path.join(self.helper.root_dir, "app", "translations")
|
os.path.join(self.helper.root_dir, "app", "translations")
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
if file == "humanized_index.json":
|
||||||
|
continue
|
||||||
if file.endswith(".json"):
|
if file.endswith(".json"):
|
||||||
if file.split(".")[0] not in self.helper.get_setting(
|
if file.split(".")[0] not in self.helper.get_setting(
|
||||||
"disabled_language_files"
|
"disabled_language_files"
|
||||||
@ -1129,6 +1119,9 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
||||||
server_id
|
server_id
|
||||||
)
|
)
|
||||||
|
page_data["backups"] = self.controller.management.get_backups_by_server(
|
||||||
|
server_id, True
|
||||||
|
)
|
||||||
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
|
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
|
||||||
server_id
|
server_id
|
||||||
)
|
)
|
||||||
@ -1149,6 +1142,7 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["schedule"]["delay"] = 0
|
page_data["schedule"]["delay"] = 0
|
||||||
page_data["schedule"]["time"] = ""
|
page_data["schedule"]["time"] = ""
|
||||||
page_data["schedule"]["interval"] = 1
|
page_data["schedule"]["interval"] = 1
|
||||||
|
page_data["schedule"]["action_id"] = ""
|
||||||
# we don't need to check difficulty here.
|
# we don't need to check difficulty here.
|
||||||
# We'll just default to basic for new schedules
|
# We'll just default to basic for new schedules
|
||||||
page_data["schedule"]["difficulty"] = "basic"
|
page_data["schedule"]["difficulty"] = "basic"
|
||||||
@ -1157,7 +1151,7 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
|
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access To Schedules")
|
self.redirect(SCHEDULE_AUTH_ERROR_URL)
|
||||||
return
|
return
|
||||||
|
|
||||||
template = "panel/server_schedule_edit.html"
|
template = "panel/server_schedule_edit.html"
|
||||||
@ -1194,6 +1188,9 @@ class PanelHandler(BaseHandler):
|
|||||||
exec_user["user_id"], server_id
|
exec_user["user_id"], server_id
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
page_data["backups"] = self.controller.management.get_backups_by_server(
|
||||||
|
server_id, True
|
||||||
|
)
|
||||||
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
||||||
server_id
|
server_id
|
||||||
)
|
)
|
||||||
@ -1208,6 +1205,7 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["schedule"]["server_id"] = server_id
|
page_data["schedule"]["server_id"] = server_id
|
||||||
page_data["schedule"]["schedule_id"] = schedule.schedule_id
|
page_data["schedule"]["schedule_id"] = schedule.schedule_id
|
||||||
page_data["schedule"]["action"] = schedule.action
|
page_data["schedule"]["action"] = schedule.action
|
||||||
|
page_data["schedule"]["action_id"] = schedule.action_id
|
||||||
if schedule.name:
|
if schedule.name:
|
||||||
page_data["schedule"]["name"] = schedule.name
|
page_data["schedule"]["name"] = schedule.name
|
||||||
else:
|
else:
|
||||||
@ -1236,6 +1234,8 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["schedule"]["interval_type"] = schedule.interval_type
|
page_data["schedule"]["interval_type"] = schedule.interval_type
|
||||||
if schedule.interval_type == "reaction":
|
if schedule.interval_type == "reaction":
|
||||||
difficulty = "reaction"
|
difficulty = "reaction"
|
||||||
|
page_data["parent"] = None
|
||||||
|
if schedule.parent:
|
||||||
page_data["parent"] = self.controller.management.get_scheduled_task(
|
page_data["parent"] = self.controller.management.get_scheduled_task(
|
||||||
schedule.parent
|
schedule.parent
|
||||||
)
|
)
|
||||||
@ -1249,11 +1249,141 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
|
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access To Schedules")
|
self.redirect(SCHEDULE_AUTH_ERROR_URL)
|
||||||
return
|
return
|
||||||
|
|
||||||
template = "panel/server_schedule_edit.html"
|
template = "panel/server_schedule_edit.html"
|
||||||
|
|
||||||
|
elif page == "edit_backup":
|
||||||
|
server_id = self.get_argument("id", None)
|
||||||
|
backup_id = self.get_argument("backup_id", None)
|
||||||
|
page_data["active_link"] = "backups"
|
||||||
|
page_data["permissions"] = {
|
||||||
|
"Commands": EnumPermissionsServer.COMMANDS,
|
||||||
|
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||||
|
"Logs": EnumPermissionsServer.LOGS,
|
||||||
|
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||||
|
"Backup": EnumPermissionsServer.BACKUP,
|
||||||
|
"Files": EnumPermissionsServer.FILES,
|
||||||
|
"Config": EnumPermissionsServer.CONFIG,
|
||||||
|
"Players": EnumPermissionsServer.PLAYERS,
|
||||||
|
}
|
||||||
|
if not self.failed_server:
|
||||||
|
server_obj = self.controller.servers.get_server_instance_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["backup_failed"] = server_obj.last_backup_status()
|
||||||
|
page_data["user_permissions"] = (
|
||||||
|
self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
|
page_data["backup_config"] = self.controller.management.get_backup_config(
|
||||||
|
backup_id
|
||||||
|
)
|
||||||
|
page_data["backups"] = self.controller.management.get_backups_by_server(
|
||||||
|
server_id, model=True
|
||||||
|
)
|
||||||
|
exclusions = []
|
||||||
|
page_data["backing_up"] = self.controller.servers.get_server_instance_by_id(
|
||||||
|
server_id
|
||||||
|
).is_backingup
|
||||||
|
self.controller.servers.refresh_server_settings(server_id)
|
||||||
|
try:
|
||||||
|
page_data["backup_list"] = server.list_backups(
|
||||||
|
page_data["backup_config"]
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
page_data["backup_list"] = []
|
||||||
|
page_data["backup_path"] = Helpers.wtol_path(
|
||||||
|
page_data["backup_config"]["backup_location"]
|
||||||
|
)
|
||||||
|
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["server_stats"]["server_type"] = (
|
||||||
|
self.controller.servers.get_server_type_by_id(server_id)
|
||||||
|
)
|
||||||
|
page_data["exclusions"] = (
|
||||||
|
self.controller.management.get_excluded_backup_dirs(backup_id)
|
||||||
|
)
|
||||||
|
# Make exclusion paths relative for page
|
||||||
|
for file in page_data["exclusions"]:
|
||||||
|
if Helpers.is_os_windows():
|
||||||
|
exclusions.append(file.replace(server_info["path"] + "\\", ""))
|
||||||
|
else:
|
||||||
|
exclusions.append(file.replace(server_info["path"] + "/", ""))
|
||||||
|
page_data["exclusions"] = exclusions
|
||||||
|
|
||||||
|
if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
|
||||||
|
if not superuser:
|
||||||
|
self.redirect(SCHEDULE_AUTH_ERROR_URL)
|
||||||
|
return
|
||||||
|
template = "panel/server_backup_edit.html"
|
||||||
|
|
||||||
|
elif page == "add_backup":
|
||||||
|
server_id = self.get_argument("id", None)
|
||||||
|
backup_id = self.get_argument("backup_id", None)
|
||||||
|
page_data["active_link"] = "backups"
|
||||||
|
page_data["permissions"] = {
|
||||||
|
"Commands": EnumPermissionsServer.COMMANDS,
|
||||||
|
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||||
|
"Logs": EnumPermissionsServer.LOGS,
|
||||||
|
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||||
|
"Backup": EnumPermissionsServer.BACKUP,
|
||||||
|
"Files": EnumPermissionsServer.FILES,
|
||||||
|
"Config": EnumPermissionsServer.CONFIG,
|
||||||
|
"Players": EnumPermissionsServer.PLAYERS,
|
||||||
|
}
|
||||||
|
if not self.failed_server:
|
||||||
|
server_obj = self.controller.servers.get_server_instance_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["backup_failed"] = server_obj.last_backup_status()
|
||||||
|
page_data["user_permissions"] = (
|
||||||
|
self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
|
page_data["backup_config"] = {
|
||||||
|
"excluded_dirs": [],
|
||||||
|
"max_backups": 0,
|
||||||
|
"server_id": server_id,
|
||||||
|
"backup_location": os.path.join(self.helper.backup_path, server_id),
|
||||||
|
"compress": False,
|
||||||
|
"shutdown": False,
|
||||||
|
"before": "",
|
||||||
|
"after": "",
|
||||||
|
}
|
||||||
|
page_data["backing_up"] = False
|
||||||
|
self.controller.servers.refresh_server_settings(server_id)
|
||||||
|
|
||||||
|
page_data["backup_list"] = []
|
||||||
|
page_data["backup_path"] = Helpers.wtol_path(
|
||||||
|
page_data["backup_config"]["backup_location"]
|
||||||
|
)
|
||||||
|
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
page_data["server_stats"]["server_type"] = (
|
||||||
|
self.controller.servers.get_server_type_by_id(server_id)
|
||||||
|
)
|
||||||
|
page_data["exclusions"] = []
|
||||||
|
|
||||||
|
if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
|
||||||
|
if not superuser:
|
||||||
|
self.redirect(SCHEDULE_AUTH_ERROR_URL)
|
||||||
|
return
|
||||||
|
template = "panel/server_backup_edit.html"
|
||||||
|
|
||||||
elif page == "edit_user":
|
elif page == "edit_user":
|
||||||
user_id = self.get_argument("id", None)
|
user_id = self.get_argument("id", None)
|
||||||
role_servers = self.controller.servers.get_authorized_servers(user_id)
|
role_servers = self.controller.servers.get_authorized_servers(user_id)
|
||||||
@ -1304,6 +1434,8 @@ class PanelHandler(BaseHandler):
|
|||||||
for file in sorted(
|
for file in sorted(
|
||||||
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
|
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
|
||||||
):
|
):
|
||||||
|
if file == "humanized_index.json":
|
||||||
|
continue
|
||||||
if file.endswith(".json"):
|
if file.endswith(".json"):
|
||||||
if file.split(".")[0] not in self.helper.get_setting(
|
if file.split(".")[0] not in self.helper.get_setting(
|
||||||
"disabled_language_files"
|
"disabled_language_files"
|
||||||
@ -1355,6 +1487,9 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data["crafty_permissions_all"] = (
|
page_data["crafty_permissions_all"] = (
|
||||||
self.controller.crafty_perms.list_defined_crafty_permissions()
|
self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||||
)
|
)
|
||||||
|
page_data["user_crafty_permissions"] = (
|
||||||
|
self.controller.crafty_perms.get_crafty_permissions_list(user_id)
|
||||||
|
)
|
||||||
|
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
self.redirect("/panel/error?error=Invalid User ID")
|
self.redirect("/panel/error?error=Invalid User ID")
|
||||||
@ -1402,7 +1537,7 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
exec_user["user_id"],
|
exec_user["user_id"],
|
||||||
f"Removed user {target_user['username']} (UID:{user_id})",
|
f"Removed user {target_user['username']} (UID:{user_id})",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
self.redirect("/panel/panel_config")
|
self.redirect("/panel/panel_config")
|
||||||
@ -1502,8 +1637,6 @@ class PanelHandler(BaseHandler):
|
|||||||
template = "panel/panel_edit_role.html"
|
template = "panel/panel_edit_role.html"
|
||||||
|
|
||||||
elif page == "activity_logs":
|
elif page == "activity_logs":
|
||||||
page_data["audit_logs"] = self.controller.management.get_activity_log()
|
|
||||||
|
|
||||||
template = "panel/activity_logs.html"
|
template = "panel/activity_logs.html"
|
||||||
|
|
||||||
elif page == "download_file":
|
elif page == "download_file":
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import json
|
||||||
import nh3
|
import nh3
|
||||||
|
from jsonschema import validate
|
||||||
|
from jsonschema.exceptions import ValidationError
|
||||||
|
|
||||||
from app.classes.shared.helpers import Helpers
|
from app.classes.shared.helpers import Helpers
|
||||||
from app.classes.models.users import HelperUsers
|
from app.classes.models.users import HelperUsers
|
||||||
@ -45,7 +48,10 @@ class PublicHandler(BaseHandler):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
page_data["query"] = self.request.query
|
request_query = self.request.query_arguments.get("next")
|
||||||
|
if not request_query:
|
||||||
|
self.redirect("/login")
|
||||||
|
page_data["query"] = request_query[0].decode()
|
||||||
|
|
||||||
# sensible defaults
|
# sensible defaults
|
||||||
template = "public/404.html"
|
template = "public/404.html"
|
||||||
@ -75,11 +81,7 @@ class PublicHandler(BaseHandler):
|
|||||||
|
|
||||||
# if we have no page, let's go to login
|
# if we have no page, let's go to login
|
||||||
else:
|
else:
|
||||||
if self.request.query:
|
return self.redirect("/login")
|
||||||
self.redirect("/login?" + self.request.query)
|
|
||||||
else:
|
|
||||||
self.redirect("/login")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
@ -89,33 +91,61 @@ class PublicHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def post(self, page=None):
|
def post(self, page=None):
|
||||||
# pylint: disable=no-member
|
login_schema = {
|
||||||
error = nh3.clean(self.get_argument("error", "Invalid Login!"))
|
"type": "object",
|
||||||
error_msg = nh3.clean(self.get_argument("error_msg", ""))
|
"properties": {
|
||||||
# pylint: enable=no-member
|
"username": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"password": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["username", "password"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
data = json.loads(self.request.body)
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
logger.error(
|
||||||
|
"Invalid JSON schema for API"
|
||||||
|
f" login attempt from {self.get_remote_ip()}"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate(data, login_schema)
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.error(
|
||||||
|
"Invalid JSON schema for API"
|
||||||
|
f" login attempt from {self.get_remote_ip()}"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "VWggb2ghIFN0aW5reS 🪠",
|
||||||
|
"error_data": str(e),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
page_data = {
|
page_data = {
|
||||||
"version": self.helper.get_version_string(),
|
"version": self.helper.get_version_string(),
|
||||||
"error": error,
|
|
||||||
"lang": self.helper.get_setting("language"),
|
"lang": self.helper.get_setting("language"),
|
||||||
"lang_page": self.helper.get_lang_page(self.helper.get_setting("language")),
|
"lang_page": self.helper.get_lang_page(self.helper.get_setting("language")),
|
||||||
"query": "",
|
"query": "",
|
||||||
}
|
}
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
page_data["query"] = self.request.query
|
page_data["query"] = self.request.query_arguments.get("next")[0].decode()
|
||||||
|
|
||||||
if page == "login":
|
if page == "login":
|
||||||
|
data = json.loads(self.request.body)
|
||||||
|
|
||||||
auth_log.info(
|
auth_log.info(
|
||||||
f"User attempting to authenticate from {self.get_remote_ip()}"
|
f"User attempting to authenticate from {self.get_remote_ip()}"
|
||||||
)
|
)
|
||||||
next_page = "/login"
|
entered_username = nh3.clean(data["username"]) # pylint: disable=no-member
|
||||||
if self.request.query:
|
entered_password = data["password"]
|
||||||
next_page = "/login?" + self.request.query
|
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
entered_username = nh3.clean(self.get_argument("username"))
|
|
||||||
entered_password = self.get_argument("password")
|
|
||||||
# pylint: enable=no-member
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
user_id = HelperUsers.get_user_id_by_name(entered_username.lower())
|
user_id = HelperUsers.get_user_id_by_name(entered_username.lower())
|
||||||
@ -127,16 +157,18 @@ class PublicHandler(BaseHandler):
|
|||||||
f" Authentication failed from remote IP {self.get_remote_ip()}"
|
f" Authentication failed from remote IP {self.get_remote_ip()}"
|
||||||
" Users does not exist."
|
" Users does not exist."
|
||||||
)
|
)
|
||||||
error_msg = "Incorrect username or password. Please try again."
|
self.finish_json(
|
||||||
|
403,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": self.helper.translation.translate(
|
||||||
|
"login", "incorrect", self.helper.get_setting("language")
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
return self.clear_cookie("token")
|
||||||
if self.request.query:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
|
|
||||||
else:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# if we don't have a user
|
# if we don't have a user
|
||||||
if not user_data:
|
if not user_data:
|
||||||
auth_log.error(
|
auth_log.error(
|
||||||
@ -145,15 +177,18 @@ class PublicHandler(BaseHandler):
|
|||||||
" User does not exist."
|
" User does not exist."
|
||||||
)
|
)
|
||||||
self.controller.log_attempt(self.get_remote_ip(), entered_username)
|
self.controller.log_attempt(self.get_remote_ip(), entered_username)
|
||||||
error_msg = "Incorrect username or password. Please try again."
|
self.finish_json(
|
||||||
|
403,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": self.helper.translation.translate(
|
||||||
|
"login", "incorrect", self.helper.get_setting("language")
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
return self.clear_cookie("token")
|
||||||
if self.request.query:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
|
|
||||||
else:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# if they are disabled
|
# if they are disabled
|
||||||
if not user_data.enabled:
|
if not user_data.enabled:
|
||||||
@ -163,19 +198,18 @@ class PublicHandler(BaseHandler):
|
|||||||
" User account disabled"
|
" User account disabled"
|
||||||
)
|
)
|
||||||
self.controller.log_attempt(self.get_remote_ip(), entered_username)
|
self.controller.log_attempt(self.get_remote_ip(), entered_username)
|
||||||
error_msg = (
|
self.finish_json(
|
||||||
"User account disabled. Please contact "
|
403,
|
||||||
"your system administrator for more info."
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": self.helper.translation.translate(
|
||||||
|
"login", "disabled", self.helper.get_setting("language")
|
||||||
|
),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
return self.clear_cookie("token")
|
||||||
if self.request.query:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
|
|
||||||
else:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}")
|
|
||||||
return
|
|
||||||
|
|
||||||
login_result = self.helper.verify_pass(entered_password, user_data.password)
|
login_result = self.helper.verify_pass(entered_password, user_data.password)
|
||||||
|
|
||||||
# Valid Login
|
# Valid Login
|
||||||
@ -197,16 +231,14 @@ class PublicHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
# log this login
|
# log this login
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user_data.user_id, "Logged in", 0, self.get_remote_ip()
|
user_data.user_id, "Logged in", None, self.get_remote_ip()
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.request.query_arguments.get("next"):
|
return self.finish_json(
|
||||||
next_page = self.request.query_arguments.get("next")[0].decode()
|
200, {"status": "ok", "data": {"message": "login successful!"}}
|
||||||
else:
|
)
|
||||||
next_page = "/panel/dashboard"
|
|
||||||
|
|
||||||
self.redirect(next_page)
|
# We'll continue on and handle unsuccessful logins
|
||||||
else:
|
|
||||||
auth_log.error(
|
auth_log.error(
|
||||||
f"User attempted to log into {entered_username}."
|
f"User attempted to log into {entered_username}."
|
||||||
f" Authentication failed from remote IP {self.get_remote_ip()}"
|
f" Authentication failed from remote IP {self.get_remote_ip()}"
|
||||||
@ -215,17 +247,21 @@ class PublicHandler(BaseHandler):
|
|||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
error_msg = "Incorrect username or password. Please try again."
|
error_msg = self.helper.translation.translate(
|
||||||
|
"login", "incorrect", self.helper.get_setting("language")
|
||||||
|
)
|
||||||
|
if entered_password == "app/config/default-creds.txt":
|
||||||
|
error_msg += ". "
|
||||||
|
error_msg += self.helper.translation.translate(
|
||||||
|
"login", "defaultPath", self.helper.get_setting("language")
|
||||||
|
)
|
||||||
# log this failed login attempt
|
# log this failed login attempt
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
|
user_data.user_id, "Tried to log in", None, self.get_remote_ip()
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
403,
|
||||||
|
{"status": "error", "error": error_msg},
|
||||||
)
|
)
|
||||||
if self.request.query:
|
|
||||||
self.redirect(f"/login?error_msg={error_msg}&{self.request.query}")
|
|
||||||
else:
|
else:
|
||||||
self.redirect(f"/login?error_msg={error_msg}")
|
self.redirect("/login?")
|
||||||
else:
|
|
||||||
if self.request.query:
|
|
||||||
self.redirect("/login?" + self.request.query)
|
|
||||||
else:
|
|
||||||
self.redirect("/login")
|
|
||||||
|
@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import (
|
|||||||
)
|
)
|
||||||
from app.classes.web.routes.api.servers.server.backups.backup.index import (
|
from app.classes.web.routes.api.servers.server.backups.backup.index import (
|
||||||
ApiServersServerBackupsBackupIndexHandler,
|
ApiServersServerBackupsBackupIndexHandler,
|
||||||
|
ApiServersServerBackupsBackupFilesIndexHandler,
|
||||||
)
|
)
|
||||||
from app.classes.web.routes.api.servers.server.files import (
|
from app.classes.web.routes.api.servers.server.files import (
|
||||||
ApiServersServerFilesIndexHandler,
|
ApiServersServerFilesIndexHandler,
|
||||||
ApiServersServerFilesCreateHandler,
|
ApiServersServerFilesCreateHandler,
|
||||||
ApiServersServerFilesZipHandler,
|
ApiServersServerFilesZipHandler,
|
||||||
)
|
)
|
||||||
|
from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
|
||||||
from app.classes.web.routes.api.servers.server.tasks.task.children import (
|
from app.classes.web.routes.api.servers.server.tasks.task.children import (
|
||||||
ApiServersServerTasksTaskChildrenHandler,
|
ApiServersServerTasksTaskChildrenHandler,
|
||||||
)
|
)
|
||||||
@ -221,92 +223,113 @@ def api_handlers(handler_args):
|
|||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/?",
|
r"/api/v2/servers/([a-z0-9-]+)/?",
|
||||||
ApiServersServerIndexHandler,
|
ApiServersServerIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/backups/?",
|
r"/api/v2/servers/([a-z0-9-]+)/backups/?",
|
||||||
ApiServersServerBackupsIndexHandler,
|
ApiServersServerBackupsIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/backups/backup/?",
|
r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?",
|
||||||
ApiServersServerBackupsBackupIndexHandler,
|
ApiServersServerBackupsBackupIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/files/?",
|
r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?",
|
||||||
ApiServersServerFilesIndexHandler,
|
ApiServersServerBackupsBackupFilesIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/files/create/?",
|
r"/api/v2/servers/([a-z0-9-]+)/files/create/?",
|
||||||
ApiServersServerFilesCreateHandler,
|
ApiServersServerFilesCreateHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/files/zip/?",
|
r"/api/v2/servers/([a-z0-9-]+)/files/zip/?",
|
||||||
ApiServersServerFilesZipHandler,
|
ApiServersServerFilesZipHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/tasks/?",
|
r"/api/v2/crafty/admin/upload/?",
|
||||||
|
ApiFilesUploadHandler,
|
||||||
|
handler_args,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r"/api/v2/servers/import/upload/?",
|
||||||
|
ApiFilesUploadHandler,
|
||||||
|
handler_args,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
|
||||||
|
ApiFilesUploadHandler,
|
||||||
|
handler_args,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?",
|
||||||
|
ApiServersServerFilesIndexHandler,
|
||||||
|
handler_args,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r"/api/v2/servers/([a-z0-9-]+)/tasks/?",
|
||||||
ApiServersServerTasksIndexHandler,
|
ApiServersServerTasksIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/tasks/([0-9]+)/?",
|
r"/api/v2/servers/([a-z0-9-]+)/tasks/([0-9]+)/?",
|
||||||
ApiServersServerTasksTaskIndexHandler,
|
ApiServersServerTasksTaskIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/tasks/([0-9]+)/children/?",
|
r"/api/v2/servers/([a-z0-9-]+)/tasks/([0-9]+)/children/?",
|
||||||
ApiServersServerTasksTaskChildrenHandler,
|
ApiServersServerTasksTaskChildrenHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/stats/?",
|
r"/api/v2/servers/([a-z0-9-]+)/stats/?",
|
||||||
ApiServersServerStatsHandler,
|
ApiServersServerStatsHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/history/?",
|
r"/api/v2/servers/([a-z0-9-]+)/history/?",
|
||||||
ApiServersServerHistoryHandler,
|
ApiServersServerHistoryHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/webhook/([0-9]+)/?",
|
r"/api/v2/servers/([a-z0-9-]+)/webhook/([0-9]+)/?",
|
||||||
ApiServersServerWebhooksManagementIndexHandler,
|
ApiServersServerWebhooksManagementIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/webhook/?",
|
r"/api/v2/servers/([a-z0-9-]+)/webhook/?",
|
||||||
ApiServersServerWebhooksIndexHandler,
|
ApiServersServerWebhooksIndexHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/action/([a-z_]+)/?",
|
# optional third argument when we need a action ID
|
||||||
|
r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?",
|
||||||
ApiServersServerActionHandler,
|
ApiServersServerActionHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/logs/?",
|
r"/api/v2/servers/([a-z0-9-]+)/logs/?",
|
||||||
ApiServersServerLogsHandler,
|
ApiServersServerLogsHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/users/?",
|
r"/api/v2/servers/([a-z0-9-]+)/users/?",
|
||||||
ApiServersServerUsersHandler,
|
ApiServersServerUsersHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/public/?",
|
r"/api/v2/servers/([a-z0-9-]+)/public/?",
|
||||||
ApiServersServerPublicHandler,
|
ApiServersServerPublicHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/api/v2/servers/([0-9]+)/stdin/?",
|
r"/api/v2/servers/([a-z0-9-]+)/stdin/?",
|
||||||
ApiServersServerStdinHandler,
|
ApiServersServerStdinHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import datetime
|
|
||||||
import logging
|
import logging
|
||||||
from app.classes.web.base_api_handler import BaseApiHandler
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
from app.classes.shared.helpers import Helpers
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler):
|
|||||||
|
|
||||||
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
|
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
|
||||||
self.controller.users.raw_update_user(
|
self.controller.users.raw_update_user(
|
||||||
auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
|
auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.finish_json(200, {"status": "ok"})
|
self.finish_json(200, {"status": "ok"})
|
||||||
|
@ -17,7 +17,7 @@ login_schema = {
|
|||||||
"minLength": 4,
|
"minLength": 4,
|
||||||
"pattern": "^[a-z0-9_]+$",
|
"pattern": "^[a-z0-9_]+$",
|
||||||
},
|
},
|
||||||
"password": {"type": "string", "maxLength": 20, "minLength": 4},
|
"password": {"type": "string", "minLength": 4},
|
||||||
},
|
},
|
||||||
"required": ["username", "password"],
|
"required": ["username", "password"],
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
@ -101,7 +101,7 @@ class ApiAuthLoginHandler(BaseApiHandler):
|
|||||||
|
|
||||||
# log this login
|
# log this login
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user_data.user_id, "logged in via the API", 0, self.get_remote_ip()
|
user_data.user_id, "logged in via the API", None, self.get_remote_ip()
|
||||||
)
|
)
|
||||||
|
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
@ -119,7 +119,7 @@ class ApiAuthLoginHandler(BaseApiHandler):
|
|||||||
else:
|
else:
|
||||||
# log this failed login attempt
|
# log this failed login attempt
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
|
user_data.user_id, "Tried to log in", None, self.get_remote_ip()
|
||||||
)
|
)
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
401,
|
401,
|
||||||
|
@ -26,6 +26,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_user,
|
_user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
data = self.helper.get_announcements()
|
data = self.helper.get_announcements()
|
||||||
@ -72,6 +73,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_user,
|
_user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
try:
|
try:
|
||||||
data = json.loads(self.request.body)
|
data = json.loads(self.request.body)
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
from app.classes.web.base_api_handler import BaseApiHandler
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
|
||||||
|
|
||||||
@ -12,6 +14,7 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not superuser:
|
if not superuser:
|
||||||
@ -22,9 +25,17 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
if log_type == "audit":
|
if log_type == "audit":
|
||||||
|
with open(
|
||||||
|
os.path.join(self.controller.project_root, "logs", "audit.log"),
|
||||||
|
"r",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as f:
|
||||||
|
log_lines = [json.loads(line) for line in f]
|
||||||
|
rev_log_lines = log_lines[::-1]
|
||||||
|
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
200,
|
200,
|
||||||
{"status": "ok", "data": self.controller.management.get_activity_log()},
|
{"status": "ok", "data": rev_log_lines},
|
||||||
)
|
)
|
||||||
|
|
||||||
if log_type == "session":
|
if log_type == "session":
|
||||||
|
@ -9,7 +9,6 @@ from app.classes.web.base_api_handler import BaseApiHandler
|
|||||||
config_json_schema = {
|
config_json_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"http_port": {"type": "integer"},
|
|
||||||
"https_port": {"type": "integer"},
|
"https_port": {"type": "integer"},
|
||||||
"language": {
|
"language": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@ -32,6 +31,7 @@ config_json_schema = {
|
|||||||
"monitored_mounts": {"type": "array"},
|
"monitored_mounts": {"type": "array"},
|
||||||
"dir_size_poll_freq_minutes": {"type": "integer"},
|
"dir_size_poll_freq_minutes": {"type": "integer"},
|
||||||
"crafty_logs_delete_after_days": {"type": "integer"},
|
"crafty_logs_delete_after_days": {"type": "integer"},
|
||||||
|
"big_bucket_repo": {"type": "string"},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"minProperties": 1,
|
"minProperties": 1,
|
||||||
@ -68,6 +68,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/roles?ids=true
|
# GET /api/v2/roles?ids=true
|
||||||
@ -94,20 +95,14 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
(
|
(_, _, _, superuser, user, _) = auth_data
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
superuser,
|
|
||||||
user,
|
|
||||||
) = auth_data
|
|
||||||
|
|
||||||
if not superuser:
|
if not superuser:
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = orjson.loads(self.request.body)
|
data = orjson.loads(self.request.body)
|
||||||
except orjson.decoder.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
@ -129,7 +124,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
"edited config.json",
|
"edited config.json",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -150,6 +145,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/roles?ids=true
|
# GET /api/v2/roles?ids=true
|
||||||
@ -182,13 +178,14 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
if not superuser:
|
if not superuser:
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = orjson.loads(self.request.body)
|
data = orjson.loads(self.request.body)
|
||||||
except orjson.decoder.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
@ -226,7 +223,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"customized login photo: {data['photo']}/{data['opacity']}",
|
f"customized login photo: {data['photo']}/{data['opacity']}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
self.controller.management.set_login_opacity(int(data["opacity"]))
|
self.controller.management.set_login_opacity(int(data["opacity"]))
|
||||||
|
@ -24,6 +24,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/roles?ids=true
|
# GET /api/v2/roles?ids=true
|
||||||
@ -56,6 +57,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
@ -68,7 +70,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
data = orjson.loads(self.request.body)
|
data = orjson.loads(self.request.body)
|
||||||
except orjson.decoder.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
@ -109,7 +111,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
auth_data[4]["user_id"],
|
auth_data[4]["user_id"],
|
||||||
f"updated master servers dir to {new_dir}/servers",
|
f"updated master servers dir to {new_dir}/servers",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -12,17 +12,18 @@ class ApiCraftyJarCacheIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not auth_data[4]["superuser"]:
|
if not auth_data[4]["superuser"]:
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
self.controller.server_jars.manual_refresh_cache()
|
self.controller.big_bucket.manual_refresh_cache()
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
200,
|
200,
|
||||||
{
|
{
|
||||||
"status": "ok",
|
"status": "ok",
|
||||||
"data": self.controller.server_jars.get_serverjar_data(),
|
"data": self.controller.big_bucket.get_bucket_data(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
308
app/classes/web/routes/api/crafty/upload/index.py
Normal file
308
app/classes/web/routes/api/crafty/upload/index.py
Normal file
@ -0,0 +1,308 @@
|
|||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||||
|
from app.classes.shared.helpers import Helpers
|
||||||
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
IMAGE_MIME_TYPES = [
|
||||||
|
"image/bmp",
|
||||||
|
"image/cis-cod",
|
||||||
|
"image/gif",
|
||||||
|
"image/ief",
|
||||||
|
"image/jpeg",
|
||||||
|
"image/pipeg",
|
||||||
|
"image/svg+xml",
|
||||||
|
"image/tiff",
|
||||||
|
"image/x-cmu-raster",
|
||||||
|
"image/x-cmx",
|
||||||
|
"image/x-icon",
|
||||||
|
"image/x-portable-anymap",
|
||||||
|
"image/x-portable-bitmap",
|
||||||
|
"image/x-portable-graymap",
|
||||||
|
"image/x-portable-pixmap",
|
||||||
|
"image/x-rgb",
|
||||||
|
"image/x-xbitmap",
|
||||||
|
"image/x-xpixmap",
|
||||||
|
"image/x-xwindowdump",
|
||||||
|
"image/png",
|
||||||
|
"image/webp",
|
||||||
|
]
|
||||||
|
|
||||||
|
ARCHIVE_MIME_TYPES = ["application/zip"]
|
||||||
|
|
||||||
|
|
||||||
|
class ApiFilesUploadHandler(BaseApiHandler):
|
||||||
|
async def post(self, server_id=None):
|
||||||
|
auth_data = self.authenticate_user()
|
||||||
|
if not auth_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
upload_type = self.request.headers.get("type")
|
||||||
|
accepted_types = []
|
||||||
|
|
||||||
|
if server_id:
|
||||||
|
# Check to make sure user is authorized for the server
|
||||||
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
|
# if the user doesn't have access to the server, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "NOT_AUTHORIZED"}
|
||||||
|
)
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
|
)
|
||||||
|
# Make sure user has file access for the server
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
|
# if the user doesn't have Files permission, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "NOT_AUTHORIZED"}
|
||||||
|
)
|
||||||
|
|
||||||
|
u_type = "server_upload"
|
||||||
|
# Make sure user is a super user if they're changing panel settings
|
||||||
|
elif auth_data[4]["superuser"] and upload_type == "background":
|
||||||
|
u_type = "admin_config"
|
||||||
|
self.upload_dir = os.path.join(
|
||||||
|
self.controller.project_root,
|
||||||
|
"app/frontend/static/assets/images/auth/custom",
|
||||||
|
)
|
||||||
|
accepted_types = IMAGE_MIME_TYPES
|
||||||
|
elif upload_type == "import":
|
||||||
|
# Check that user can make servers
|
||||||
|
if (
|
||||||
|
not self.controller.crafty_perms.can_create_server(
|
||||||
|
auth_data[4]["user_id"]
|
||||||
|
)
|
||||||
|
and not auth_data[4]["superuser"]
|
||||||
|
):
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"data": {"message": ""},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Set directory to upload import dir
|
||||||
|
self.upload_dir = os.path.join(
|
||||||
|
self.controller.project_root, "import", "upload"
|
||||||
|
)
|
||||||
|
u_type = "server_import"
|
||||||
|
accepted_types = ARCHIVE_MIME_TYPES
|
||||||
|
else:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"data": {"message": ""},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Get the headers from the request
|
||||||
|
self.chunk_hash = self.request.headers.get("chunkHash", 0)
|
||||||
|
self.file_id = self.request.headers.get("fileId")
|
||||||
|
self.chunked = self.request.headers.get("chunked", False)
|
||||||
|
self.filename = self.request.headers.get("fileName", None)
|
||||||
|
try:
|
||||||
|
file_size = int(self.request.headers.get("fileSize", None))
|
||||||
|
total_chunks = int(self.request.headers.get("totalChunks", 0))
|
||||||
|
except TypeError:
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "TYPE ERROR", "data": {}}
|
||||||
|
)
|
||||||
|
self.chunk_index = self.request.headers.get("chunkId")
|
||||||
|
if u_type == "server_upload":
|
||||||
|
self.upload_dir = self.request.headers.get("location", None)
|
||||||
|
self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
|
||||||
|
|
||||||
|
if u_type == "server_upload":
|
||||||
|
# If this is an upload from a server the path will be what
|
||||||
|
# Is requested
|
||||||
|
full_path = os.path.join(self.upload_dir, self.filename)
|
||||||
|
|
||||||
|
# Check to make sure the requested path is inside the server's directory
|
||||||
|
if not self.helper.is_subdir(
|
||||||
|
full_path,
|
||||||
|
Helpers.get_os_understandable_path(
|
||||||
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
|
),
|
||||||
|
):
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT AUTHORIZED",
|
||||||
|
"data": {"message": "Traversal detected"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Check to make sure the file type we're being sent is what we're expecting
|
||||||
|
if (
|
||||||
|
self.file_helper.check_mime_types(self.filename) not in accepted_types
|
||||||
|
and u_type != "server_upload"
|
||||||
|
):
|
||||||
|
return self.finish_json(
|
||||||
|
422,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INVALID FILE TYPE",
|
||||||
|
"data": {
|
||||||
|
"message": f"Invalid File Type only accepts {accepted_types}"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
_total, _used, free = shutil.disk_usage(self.upload_dir)
|
||||||
|
|
||||||
|
# Check to see if we have enough space
|
||||||
|
if free <= file_size:
|
||||||
|
return self.finish_json(
|
||||||
|
507,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NO STORAGE SPACE",
|
||||||
|
"data": {"message": "Out Of Space!"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# If this has no chunk index we know it's the inital request
|
||||||
|
if self.chunked and not self.chunk_index:
|
||||||
|
return self.finish_json(
|
||||||
|
200, {"status": "ok", "data": {"file-id": self.file_id}}
|
||||||
|
)
|
||||||
|
# Create the upload and temp directories if they don't exist
|
||||||
|
os.makedirs(self.upload_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Check for chunked header. We will handle this request differently
|
||||||
|
# if it doesn't exist
|
||||||
|
if not self.chunked:
|
||||||
|
# Write the file directly to the upload dir
|
||||||
|
with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
|
||||||
|
chunk = self.request.body
|
||||||
|
if chunk:
|
||||||
|
file.write(chunk)
|
||||||
|
# We'll check the file hash against the sent hash once the file is
|
||||||
|
# written. We cannot check this buffer.
|
||||||
|
calculated_hash = self.file_helper.calculate_file_hash(
|
||||||
|
os.path.join(self.upload_dir, self.filename)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"File upload completed. Filename: {self.filename} Type: {u_type}"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
200,
|
||||||
|
{
|
||||||
|
"status": "completed",
|
||||||
|
"data": {"message": "File uploaded successfully"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# Since this is a chunked upload we'll create the temp dir for parts.
|
||||||
|
os.makedirs(self.temp_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Read headers and query parameters
|
||||||
|
content_length = int(self.request.headers.get("Content-Length"))
|
||||||
|
if content_length <= 0:
|
||||||
|
logger.error(
|
||||||
|
f"File upload failed. Filename: {self.filename}"
|
||||||
|
f"Type: {u_type} Error: INVALID CONTENT LENGTH"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INVALID CONTENT LENGTH",
|
||||||
|
"data": {"message": "Invalid content length"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# At this point filename, chunk index and total chunks are required
|
||||||
|
# in the request
|
||||||
|
if not self.filename or self.chunk_index is None:
|
||||||
|
logger.error(
|
||||||
|
f"File upload failed. Filename: {self.filename}"
|
||||||
|
f"Type: {u_type} Error: CHUNK INDEX NOT FOUND"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INDEX ERROR",
|
||||||
|
"data": {
|
||||||
|
"message": "Filename, chunk_index,"
|
||||||
|
" and total_chunks are required"
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate the hash of the buffer and compare it against the expected hash
|
||||||
|
calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body)
|
||||||
|
if str(self.chunk_hash) != str(calculated_hash):
|
||||||
|
logger.error(
|
||||||
|
f"File upload failed. Filename: {self.filename}"
|
||||||
|
f"Type: {u_type} Error: INVALID HASH"
|
||||||
|
)
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INVALID_HASH",
|
||||||
|
"data": {
|
||||||
|
"message": "Hash recieved does not match reported sent hash.",
|
||||||
|
"chunk_id": self.chunk_index,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# File paths
|
||||||
|
file_path = os.path.join(self.upload_dir, self.filename)
|
||||||
|
chunk_path = os.path.join(
|
||||||
|
self.temp_dir, f"{self.filename}.part{self.chunk_index}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save the chunk
|
||||||
|
with open(chunk_path, "wb") as f:
|
||||||
|
f.write(self.request.body)
|
||||||
|
|
||||||
|
# Check if all chunks are received
|
||||||
|
received_chunks = [
|
||||||
|
f
|
||||||
|
for f in os.listdir(self.temp_dir)
|
||||||
|
if f.startswith(f"{self.filename}.part")
|
||||||
|
]
|
||||||
|
# When we've reached the total chunks we'll
|
||||||
|
# Compare the hash and write the file
|
||||||
|
if len(received_chunks) == total_chunks:
|
||||||
|
with open(file_path, "wb") as outfile:
|
||||||
|
for i in range(total_chunks):
|
||||||
|
chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
|
||||||
|
with open(chunk_file, "rb") as infile:
|
||||||
|
outfile.write(infile.read())
|
||||||
|
os.remove(chunk_file)
|
||||||
|
logger.info(
|
||||||
|
f"File upload completed. Filename: {self.filename}"
|
||||||
|
f" Path: {file_path} Type: {u_type}"
|
||||||
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
auth_data[4]["user_id"],
|
||||||
|
f"Uploaded file {self.filename}",
|
||||||
|
server_id,
|
||||||
|
self.request.remote_ip,
|
||||||
|
)
|
||||||
|
self.finish_json(
|
||||||
|
200,
|
||||||
|
{
|
||||||
|
"status": "completed",
|
||||||
|
"data": {"message": "File uploaded successfully"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.finish_json(
|
||||||
|
200,
|
||||||
|
{
|
||||||
|
"status": "partial",
|
||||||
|
"data": {"message": f"Chunk {self.chunk_index} received"},
|
||||||
|
},
|
||||||
|
)
|
@ -2,6 +2,7 @@ import typing as t
|
|||||||
from jsonschema import ValidationError, validate
|
from jsonschema import ValidationError, validate
|
||||||
import orjson
|
import orjson
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||||
from app.classes.web.base_api_handler import BaseApiHandler
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
|
||||||
create_role_schema = {
|
create_role_schema = {
|
||||||
@ -10,6 +11,7 @@ create_role_schema = {
|
|||||||
"name": {
|
"name": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
|
"pattern": r"^[^,\[\]]*$",
|
||||||
},
|
},
|
||||||
"servers": {
|
"servers": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
@ -17,12 +19,12 @@ create_role_schema = {
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_id": {
|
"server_id": {
|
||||||
"type": "integer",
|
"type": "string",
|
||||||
"minimum": 1,
|
"minimum": 1,
|
||||||
},
|
},
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
"pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"required": ["server_id", "permissions"],
|
"required": ["server_id", "permissions"],
|
||||||
@ -47,7 +49,7 @@ basic_create_role_schema = {
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_id": {
|
"server_id": {
|
||||||
"type": "integer",
|
"type": "string",
|
||||||
"minimum": 1,
|
"minimum": 1,
|
||||||
},
|
},
|
||||||
"permissions": {
|
"permissions": {
|
||||||
@ -71,16 +73,20 @@ class ApiRolesIndexHandler(BaseApiHandler):
|
|||||||
return
|
return
|
||||||
(
|
(
|
||||||
_,
|
_,
|
||||||
_,
|
exec_user_permissions_crafty,
|
||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/roles?ids=true
|
# GET /api/v2/roles?ids=true
|
||||||
get_only_ids = self.get_query_argument("ids", None) == "true"
|
get_only_ids = self.get_query_argument("ids", None) == "true"
|
||||||
|
|
||||||
if not superuser:
|
if (
|
||||||
|
not superuser
|
||||||
|
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
|
||||||
|
):
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
@ -103,13 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler):
|
|||||||
return
|
return
|
||||||
(
|
(
|
||||||
_,
|
_,
|
||||||
_,
|
exec_user_permissions_crafty,
|
||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not superuser:
|
if (
|
||||||
|
not superuser
|
||||||
|
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
|
||||||
|
):
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -136,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler):
|
|||||||
|
|
||||||
role_name = data["name"]
|
role_name = data["name"]
|
||||||
manager = data.get("manager", None)
|
manager = data.get("manager", None)
|
||||||
|
if not superuser and not manager:
|
||||||
|
manager = auth_data[4]["user_id"]
|
||||||
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
|
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
|
||||||
manager = None
|
manager = None
|
||||||
|
|
||||||
@ -161,7 +173,7 @@ class ApiRolesIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"created role {role_name} (RID:{role_id})",
|
f"created role {role_name} (RID:{role_id})",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from jsonschema import ValidationError, validate
|
from jsonschema import ValidationError, validate
|
||||||
import orjson
|
import orjson
|
||||||
from peewee import DoesNotExist
|
from peewee import DoesNotExist, IntegrityError
|
||||||
|
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||||
from app.classes.web.base_api_handler import BaseApiHandler
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
|
||||||
modify_role_schema = {
|
modify_role_schema = {
|
||||||
@ -9,6 +10,7 @@ modify_role_schema = {
|
|||||||
"name": {
|
"name": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
|
"pattern": r"^[^,\[\]]*$",
|
||||||
},
|
},
|
||||||
"servers": {
|
"servers": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
@ -16,12 +18,12 @@ modify_role_schema = {
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_id": {
|
"server_id": {
|
||||||
"type": "integer",
|
"type": "string",
|
||||||
"minimum": 1,
|
"minimum": 1,
|
||||||
},
|
},
|
||||||
"permissions": {
|
"permissions": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
"pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"required": ["server_id", "permissions"],
|
"required": ["server_id", "permissions"],
|
||||||
@ -46,7 +48,7 @@ basic_modify_role_schema = {
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_id": {
|
"server_id": {
|
||||||
"type": "integer",
|
"type": "string",
|
||||||
"minimum": 1,
|
"minimum": 1,
|
||||||
},
|
},
|
||||||
"permissions": {
|
"permissions": {
|
||||||
@ -70,13 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
|
|||||||
return
|
return
|
||||||
(
|
(
|
||||||
_,
|
_,
|
||||||
_,
|
exec_user_permissions_crafty,
|
||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not superuser:
|
if (
|
||||||
|
not superuser
|
||||||
|
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
|
||||||
|
):
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -97,9 +103,13 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
role = self.controller.roles.get_role(role_id)
|
||||||
if not superuser:
|
if (
|
||||||
|
str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"])
|
||||||
|
and not superuser
|
||||||
|
):
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
self.controller.roles.remove_role(role_id)
|
self.controller.roles.remove_role(role_id)
|
||||||
@ -112,7 +122,7 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"deleted role with ID {role_id}",
|
f"deleted role with ID {role_id}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -122,18 +132,30 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
|
|||||||
return
|
return
|
||||||
(
|
(
|
||||||
_,
|
_,
|
||||||
_,
|
exec_user_permissions_crafty,
|
||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not superuser:
|
role = self.controller.roles.get_role(role_id)
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
if not superuser and (
|
||||||
|
user["user_id"] != role["manager"]
|
||||||
|
or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
|
||||||
|
):
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": "Not Authorized",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = orjson.loads(self.request.body)
|
data = orjson.loads(self.request.body)
|
||||||
except orjson.decoder.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
@ -168,11 +190,14 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
|
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
|
||||||
|
except IntegrityError:
|
||||||
|
return self.finish_json(
|
||||||
|
404, {"status": "error", "error": "ROLE_NAME_EXISTS"}
|
||||||
|
)
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"modified role with ID {role_id}",
|
f"modified role with ID {role_id}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ class ApiRolesRoleServersHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/roles/role/servers?ids=true
|
# GET /api/v2/roles/role/servers?ids=true
|
||||||
|
@ -12,6 +12,7 @@ class ApiRolesRoleUsersHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
_,
|
_,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if not superuser:
|
if not superuser:
|
||||||
|
@ -23,6 +23,7 @@ new_server_schema = {
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"examples": ["My Server"],
|
"examples": ["My Server"],
|
||||||
"minLength": 2,
|
"minLength": 2,
|
||||||
|
"pattern": "^[^/\\\\]*$",
|
||||||
},
|
},
|
||||||
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
|
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
|
||||||
"stop_command": {
|
"stop_command": {
|
||||||
@ -139,7 +140,7 @@ new_server_schema = {
|
|||||||
"category": {
|
"category": {
|
||||||
"title": "Jar Category",
|
"title": "Jar Category",
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"examples": ["modded", "vanilla"],
|
"examples": ["Mc_java_servers", "Mc_java_proxies"],
|
||||||
},
|
},
|
||||||
"properties": {
|
"properties": {
|
||||||
"type": {
|
"type": {
|
||||||
@ -743,6 +744,7 @@ class ApiServersIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_superuser,
|
_superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if EnumPermissionsCrafty.SERVER_CREATION not in exec_user_crafty_permissions:
|
if EnumPermissionsCrafty.SERVER_CREATION not in exec_user_crafty_permissions:
|
||||||
@ -782,9 +784,7 @@ class ApiServersIndexHandler(BaseApiHandler):
|
|||||||
405, {"status": "error", "error": "DATA CONSTRAINT FAILED"}
|
405, {"status": "error", "error": "DATA CONSTRAINT FAILED"}
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
new_server_id, new_server_uuid = self.controller.create_api_server(
|
new_server_id = self.controller.create_api_server(data, user["user_id"])
|
||||||
data, user["user_id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.controller.servers.stats.record_stats()
|
self.controller.servers.stats.record_stats()
|
||||||
|
|
||||||
@ -793,7 +793,7 @@ class ApiServersIndexHandler(BaseApiHandler):
|
|||||||
(
|
(
|
||||||
f"created server {data['name']}"
|
f"created server {data['name']}"
|
||||||
f" (ID: {new_server_id})"
|
f" (ID: {new_server_id})"
|
||||||
f" (UUID: {new_server_uuid})"
|
f" (UUID: {new_server_id})"
|
||||||
),
|
),
|
||||||
server_id=new_server_id,
|
server_id=new_server_id,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
@ -805,7 +805,7 @@ class ApiServersIndexHandler(BaseApiHandler):
|
|||||||
"status": "ok",
|
"status": "ok",
|
||||||
"data": {
|
"data": {
|
||||||
"new_server_id": str(new_server_id),
|
"new_server_id": str(new_server_id),
|
||||||
"new_server_uuid": new_server_uuid,
|
"new_server_uuid": new_server_id,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import json
|
||||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||||
from app.classes.models.servers import Servers
|
from app.classes.models.servers import Servers
|
||||||
from app.classes.shared.file_helpers import FileHelpers
|
from app.classes.shared.file_helpers import FileHelpers
|
||||||
from app.classes.shared.helpers import Helpers
|
|
||||||
from app.classes.web.base_api_handler import BaseApiHandler
|
from app.classes.web.base_api_handler import BaseApiHandler
|
||||||
|
|
||||||
|
|
||||||
@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class ApiServersServerActionHandler(BaseApiHandler):
|
class ApiServersServerActionHandler(BaseApiHandler):
|
||||||
def post(self, server_id: str, action: str):
|
def post(self, server_id: str, action: str, action_id=None):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -19,13 +19,14 @@ class ApiServersServerActionHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.COMMANDS
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.COMMANDS not in server_permissions:
|
||||||
# if the user doesn't have Commands permission, return an error
|
# if the user doesn't have Commands permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -34,6 +35,17 @@ class ApiServersServerActionHandler(BaseApiHandler):
|
|||||||
self.controller.crafty_perms.can_create_server(auth_data[4]["user_id"])
|
self.controller.crafty_perms.can_create_server(auth_data[4]["user_id"])
|
||||||
or auth_data[4]["superuser"]
|
or auth_data[4]["superuser"]
|
||||||
):
|
):
|
||||||
|
srv_object = self.controller.servers.get_server_instance_by_id(
|
||||||
|
server_id
|
||||||
|
)
|
||||||
|
if srv_object.check_running():
|
||||||
|
return self.finish_json(
|
||||||
|
409,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "Server Running!",
|
||||||
|
},
|
||||||
|
)
|
||||||
self._clone_server(server_id, auth_data[4]["user_id"])
|
self._clone_server(server_id, auth_data[4]["user_id"])
|
||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
@ -43,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler):
|
|||||||
return self._agree_eula(server_id, auth_data[4]["user_id"])
|
return self._agree_eula(server_id, auth_data[4]["user_id"])
|
||||||
|
|
||||||
self.controller.management.send_command(
|
self.controller.management.send_command(
|
||||||
auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
|
auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id
|
||||||
)
|
)
|
||||||
|
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
@ -68,10 +80,44 @@ class ApiServersServerActionHandler(BaseApiHandler):
|
|||||||
name_counter += 1
|
name_counter += 1
|
||||||
new_server_name = server_data.get("server_name") + f" (Copy {name_counter})"
|
new_server_name = server_data.get("server_name") + f" (Copy {name_counter})"
|
||||||
|
|
||||||
new_server_uuid = Helpers.create_uuid()
|
new_server_id = self.helper.create_uuid()
|
||||||
while os.path.exists(os.path.join(self.helper.servers_dir, new_server_uuid)):
|
new_server_path = os.path.join(self.helper.servers_dir, new_server_id)
|
||||||
new_server_uuid = Helpers.create_uuid()
|
new_backup_path = os.path.join(self.helper.backup_path, new_server_id)
|
||||||
new_server_path = os.path.join(self.helper.servers_dir, new_server_uuid)
|
backup_data = {
|
||||||
|
"backup_name": f"{new_server_name} Backup",
|
||||||
|
"backup_location": new_backup_path,
|
||||||
|
"excluded_dirs": "",
|
||||||
|
"max_backups": 0,
|
||||||
|
"server_id": new_server_id,
|
||||||
|
"compress": False,
|
||||||
|
"shutdown": False,
|
||||||
|
"before": "",
|
||||||
|
"after": "",
|
||||||
|
"default": True,
|
||||||
|
"status": json.dumps({"status": "Standby", "message": ""}),
|
||||||
|
"enabled": True,
|
||||||
|
}
|
||||||
|
new_server_command = str(server_data.get("execution_command")).replace(
|
||||||
|
server_id, new_server_id
|
||||||
|
)
|
||||||
|
new_server_log_path = server_data.get("log_path").replace(
|
||||||
|
server_id, new_server_id
|
||||||
|
)
|
||||||
|
|
||||||
|
self.controller.register_server(
|
||||||
|
new_server_name,
|
||||||
|
new_server_id,
|
||||||
|
new_server_path,
|
||||||
|
new_server_command,
|
||||||
|
server_data.get("executable"),
|
||||||
|
new_server_log_path,
|
||||||
|
server_data.get("stop_command"),
|
||||||
|
server_data.get("server_port"),
|
||||||
|
user_id,
|
||||||
|
server_data.get("type"),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.controller.management.add_backup_config(backup_data)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user_id,
|
user_id,
|
||||||
@ -83,25 +129,6 @@ class ApiServersServerActionHandler(BaseApiHandler):
|
|||||||
# copy the old server
|
# copy the old server
|
||||||
FileHelpers.copy_dir(server_data.get("path"), new_server_path)
|
FileHelpers.copy_dir(server_data.get("path"), new_server_path)
|
||||||
|
|
||||||
# TODO get old server DB data to individual variables
|
|
||||||
new_server_command = str(server_data.get("execution_command"))
|
|
||||||
new_server_log_file = str(
|
|
||||||
self.helper.get_os_understandable_path(server_data.get("log_path"))
|
|
||||||
)
|
|
||||||
|
|
||||||
new_server_id = self.controller.servers.create_server(
|
|
||||||
new_server_name,
|
|
||||||
new_server_uuid,
|
|
||||||
new_server_path,
|
|
||||||
"",
|
|
||||||
new_server_command,
|
|
||||||
server_data.get("executable"),
|
|
||||||
new_server_log_file,
|
|
||||||
server_data.get("stop_command"),
|
|
||||||
server_data.get("type"),
|
|
||||||
user_id,
|
|
||||||
server_data.get("server_port"),
|
|
||||||
)
|
|
||||||
for role in self.controller.server_perms.get_server_roles(server_id):
|
for role in self.controller.server_perms.get_server_roles(server_id):
|
||||||
mask = self.controller.server_perms.get_permissions_mask(
|
mask = self.controller.server_perms.get_permissions_mask(
|
||||||
role.role_id, server_id
|
role.role_id, server_id
|
||||||
|
@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
backup_schema = {
|
BACKUP_SCHEMA = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"filename": {"type": "string", "minLength": 5},
|
"filename": {"type": "string", "minLength": 5},
|
||||||
@ -19,84 +19,157 @@ backup_schema = {
|
|||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"minProperties": 1,
|
"minProperties": 1,
|
||||||
}
|
}
|
||||||
|
BACKUP_PATCH_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"backup_name": {"type": "string", "minLength": 3},
|
||||||
|
"backup_location": {"type": "string", "minLength": 1},
|
||||||
|
"max_backups": {"type": "integer"},
|
||||||
|
"compress": {"type": "boolean"},
|
||||||
|
"shutdown": {"type": "boolean"},
|
||||||
|
"before": {"type": "string"},
|
||||||
|
"after": {"type": "string"},
|
||||||
|
"excluded_dirs": {"type": "array"},
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
|
"minProperties": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
BASIC_BACKUP_PATCH_SCHEMA = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"backup_name": {"type": "string", "minLength": 3},
|
||||||
|
"max_backups": {"type": "integer"},
|
||||||
|
"compress": {"type": "boolean"},
|
||||||
|
"shutdown": {"type": "boolean"},
|
||||||
|
"before": {"type": "string"},
|
||||||
|
"after": {"type": "string"},
|
||||||
|
"excluded_dirs": {"type": "array"},
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
|
"minProperties": 1,
|
||||||
|
}
|
||||||
|
ID_MISMATCH = "Server ID backup server ID different"
|
||||||
|
GENERAL_AUTH_ERROR = "Authorization Error"
|
||||||
|
|
||||||
|
|
||||||
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
||||||
def get(self, server_id: str):
|
def get(self, server_id: str, backup_id: str):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
|
backup_conf = self.controller.management.get_backup_config(backup_id)
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.BACKUP
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
if backup_conf["server_id"]["server_id"] != server_id:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
|
||||||
self.finish_json(200, self.controller.management.get_backup_config(server_id))
|
|
||||||
|
|
||||||
def delete(self, server_id: str):
|
|
||||||
auth_data = self.authenticate_user()
|
|
||||||
backup_conf = self.controller.management.get_backup_config(server_id)
|
|
||||||
if not auth_data:
|
|
||||||
return
|
|
||||||
if (
|
|
||||||
EnumPermissionsServer.BACKUP
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
|
||||||
)
|
|
||||||
):
|
|
||||||
# if the user doesn't have Schedule permission, return an error
|
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = json.loads(self.request.body)
|
|
||||||
except json.decoder.JSONDecodeError as e:
|
|
||||||
return self.finish_json(
|
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
validate(data, backup_schema)
|
|
||||||
except ValidationError as e:
|
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400,
|
400,
|
||||||
{
|
{
|
||||||
"status": "error",
|
"status": "error",
|
||||||
"error": "INVALID_JSON_SCHEMA",
|
"error": "ID_MISMATCH",
|
||||||
"error_data": str(e),
|
"error_data": ID_MISMATCH,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
|
# if the user doesn't have Schedule permission, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.finish_json(200, backup_conf)
|
||||||
|
|
||||||
|
def delete(self, server_id: str, backup_id: str):
|
||||||
|
auth_data = self.authenticate_user()
|
||||||
|
backup_conf = self.controller.management.get_backup_config(backup_id)
|
||||||
|
if backup_conf["server_id"]["server_id"] != server_id:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "ID_MISMATCH",
|
||||||
|
"error_data": ID_MISMATCH,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not auth_data:
|
||||||
|
return
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
|
)
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
|
# if the user doesn't have Schedule permission, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
FileHelpers.del_file(
|
|
||||||
os.path.join(backup_conf["backup_path"], data["filename"])
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return self.finish_json(
|
|
||||||
400, {"status": "error", "error": f"DELETE FAILED with error {e}"}
|
|
||||||
)
|
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
auth_data[4]["user_id"],
|
auth_data[4]["user_id"],
|
||||||
f"Edited server {server_id}: removed backup {data['filename']}",
|
f"Edited server {server_id}: removed backup config"
|
||||||
|
f" {backup_conf['backup_name']}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip(),
|
self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
if backup_conf["default"]:
|
||||||
|
return self.finish_json(
|
||||||
|
405,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_ALLOWED",
|
||||||
|
"error_data": "Cannot delete default backup",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.controller.management.delete_backup_config(backup_id)
|
||||||
|
|
||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
|
||||||
def post(self, server_id: str):
|
def post(self, server_id: str, backup_id: str):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.BACKUP
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
backup_config = self.controller.management.get_backup_config(backup_id)
|
||||||
|
if backup_config["server_id"]["server_id"] != server_id:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "ID_MISMATCH",
|
||||||
|
"error_data": ID_MISMATCH,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(self.request.body)
|
data = json.loads(self.request.body)
|
||||||
@ -105,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
validate(data, backup_schema)
|
validate(data, BACKUP_SCHEMA)
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400,
|
400,
|
||||||
@ -116,14 +189,21 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
svr_obj = self.controller.servers.get_server_obj(server_id)
|
svr_obj = self.controller.servers.get_server_obj(server_id)
|
||||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
zip_name = data["filename"]
|
zip_name = data["filename"]
|
||||||
# import the server again based on zipfile
|
# import the server again based on zipfile
|
||||||
backup_path = svr_obj.backup_path
|
backup_config = self.controller.management.get_backup_config(backup_id)
|
||||||
if Helpers.validate_traversal(backup_path, zip_name):
|
backup_location = os.path.join(
|
||||||
temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
|
backup_config["backup_location"], backup_config["backup_id"]
|
||||||
|
)
|
||||||
|
if Helpers.validate_traversal(backup_location, zip_name):
|
||||||
|
try:
|
||||||
|
temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name)
|
||||||
|
except (FileNotFoundError, NotADirectoryError) as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
|
||||||
|
)
|
||||||
if server_data["type"] == "minecraft-java":
|
if server_data["type"] == "minecraft-java":
|
||||||
new_server = self.controller.restore_java_zip_server(
|
new_server = self.controller.restore_java_zip_server(
|
||||||
svr_obj.server_name,
|
svr_obj.server_name,
|
||||||
@ -145,7 +225,9 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
new_server_id = new_server
|
new_server_id = new_server
|
||||||
new_server = self.controller.servers.get_server_data(new_server)
|
new_server = self.controller.servers.get_server_data(new_server)
|
||||||
self.controller.rename_backup_dir(
|
self.controller.rename_backup_dir(
|
||||||
server_id, new_server_id, new_server["server_uuid"]
|
server_id,
|
||||||
|
new_server_id,
|
||||||
|
new_server["server_id"],
|
||||||
)
|
)
|
||||||
# preserve current schedules
|
# preserve current schedules
|
||||||
for schedule in self.controller.management.get_schedules_by_server(
|
for schedule in self.controller.management.get_schedules_by_server(
|
||||||
@ -178,24 +260,26 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
self.controller.servers.update_server(new_server_obj)
|
self.controller.servers.update_server(new_server_obj)
|
||||||
|
|
||||||
# preserve backup config
|
# preserve backup config
|
||||||
backup_config = self.controller.management.get_backup_config(server_id)
|
server_backups = self.controller.management.get_backups_by_server(server_id)
|
||||||
excluded_dirs = []
|
for backup in server_backups:
|
||||||
server_obj = self.controller.servers.get_server_obj(server_id)
|
old_backup_id = server_backups[backup]["backup_id"]
|
||||||
loop_backup_path = self.helper.wtol_path(server_obj.path)
|
del server_backups[backup]["backup_id"]
|
||||||
for item in self.controller.management.get_excluded_backup_dirs(
|
server_backups[backup]["server_id"] = new_server_id
|
||||||
server_id
|
if str(server_id) in (server_backups[backup]["backup_location"]):
|
||||||
):
|
server_backups[backup]["backup_location"] = str(
|
||||||
item_path = self.helper.wtol_path(item)
|
server_backups[backup]["backup_location"]
|
||||||
bu_path = os.path.relpath(item_path, loop_backup_path)
|
).replace(str(server_id), str(new_server_id))
|
||||||
bu_path = os.path.join(new_server_obj.path, bu_path)
|
new_backup_id = self.controller.management.add_backup_config(
|
||||||
excluded_dirs.append(bu_path)
|
server_backups[backup]
|
||||||
self.controller.management.set_backup_config(
|
)
|
||||||
new_server_id,
|
os.listdir(server_backups[backup]["backup_location"])
|
||||||
new_server_obj.backup_path,
|
FileHelpers.move_dir(
|
||||||
backup_config["max_backups"],
|
os.path.join(
|
||||||
excluded_dirs,
|
server_backups[backup]["backup_location"], old_backup_id
|
||||||
backup_config["compress"],
|
),
|
||||||
backup_config["shutdown"],
|
os.path.join(
|
||||||
|
server_backups[backup]["backup_location"], new_backup_id
|
||||||
|
),
|
||||||
)
|
)
|
||||||
# remove old server's tasks
|
# remove old server's tasks
|
||||||
try:
|
try:
|
||||||
@ -203,10 +287,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
except JobLookupError as e:
|
except JobLookupError as e:
|
||||||
logger.info("No active tasks found for server: {e}")
|
logger.info("No active tasks found for server: {e}")
|
||||||
self.controller.remove_server(server_id, True)
|
self.controller.remove_server(server_id, True)
|
||||||
except Exception as e:
|
|
||||||
return self.finish_json(
|
|
||||||
400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
|
|
||||||
)
|
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
auth_data[4]["user_id"],
|
auth_data[4]["user_id"],
|
||||||
f"Restored server {server_id} backup {data['filename']}",
|
f"Restored server {server_id} backup {data['filename']}",
|
||||||
@ -215,3 +296,149 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
|
||||||
|
def patch(self, server_id: str, backup_id: str):
|
||||||
|
auth_data = self.authenticate_user()
|
||||||
|
if not auth_data:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(self.request.body)
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if auth_data[4]["superuser"]:
|
||||||
|
validate(data, BACKUP_PATCH_SCHEMA)
|
||||||
|
else:
|
||||||
|
validate(data, BASIC_BACKUP_PATCH_SCHEMA)
|
||||||
|
except ValidationError as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INVALID_JSON_SCHEMA",
|
||||||
|
"error_data": str(e),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
backup_conf = self.controller.management.get_backup_config(backup_id)
|
||||||
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
|
# if the user doesn't have access to the server, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if backup_conf["server_id"]["server_id"] != server_id:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "ID_MISMATCH",
|
||||||
|
"error_data": ID_MISMATCH,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
|
)
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
|
# if the user doesn't have Schedule permission, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.controller.management.update_backup_config(backup_id, data)
|
||||||
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
|
||||||
|
|
||||||
|
class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler):
|
||||||
|
def delete(self, server_id: str, backup_id: str):
|
||||||
|
auth_data = self.authenticate_user()
|
||||||
|
backup_conf = self.controller.management.get_backup_config(backup_id)
|
||||||
|
if backup_conf["server_id"]["server_id"] != server_id:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "ID_MISMATCH",
|
||||||
|
"error_data": ID_MISMATCH,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not auth_data:
|
||||||
|
return
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
|
)
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
|
# if the user doesn't have Schedule permission, return an error
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "NOT_AUTHORIZED",
|
||||||
|
"error_data": GENERAL_AUTH_ERROR,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(self.request.body)
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
validate(data, BACKUP_SCHEMA)
|
||||||
|
except ValidationError as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "INVALID_JSON_SCHEMA",
|
||||||
|
"error_data": str(e),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.helper.validate_traversal(
|
||||||
|
os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]),
|
||||||
|
os.path.join(
|
||||||
|
backup_conf["backup_location"],
|
||||||
|
backup_conf["backup_id"],
|
||||||
|
data["filename"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
FileHelpers.del_file(
|
||||||
|
os.path.join(
|
||||||
|
backup_conf["backup_location"],
|
||||||
|
backup_conf["backup_id"],
|
||||||
|
data["filename"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": f"DELETE FAILED with error {e}"}
|
||||||
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
auth_data[4]["user_id"],
|
||||||
|
f"Edited server {server_id}: removed backup {data['filename']}",
|
||||||
|
server_id,
|
||||||
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from jsonschema import validate
|
from jsonschema import validate
|
||||||
@ -10,13 +11,14 @@ logger = logging.getLogger(__name__)
|
|||||||
backup_patch_schema = {
|
backup_patch_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"backup_path": {"type": "string", "minLength": 1},
|
"backup_name": {"type": "string", "minLength": 3},
|
||||||
|
"backup_location": {"type": "string", "minLength": 1},
|
||||||
"max_backups": {"type": "integer"},
|
"max_backups": {"type": "integer"},
|
||||||
"compress": {"type": "boolean"},
|
"compress": {"type": "boolean"},
|
||||||
"shutdown": {"type": "boolean"},
|
"shutdown": {"type": "boolean"},
|
||||||
"backup_before": {"type": "string"},
|
"before": {"type": "string"},
|
||||||
"backup_after": {"type": "string"},
|
"after": {"type": "string"},
|
||||||
"exclusions": {"type": "array"},
|
"excluded_dirs": {"type": "array"},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"minProperties": 1,
|
"minProperties": 1,
|
||||||
@ -25,12 +27,13 @@ backup_patch_schema = {
|
|||||||
basic_backup_patch_schema = {
|
basic_backup_patch_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
"backup_name": {"type": "string", "minLength": 3},
|
||||||
"max_backups": {"type": "integer"},
|
"max_backups": {"type": "integer"},
|
||||||
"compress": {"type": "boolean"},
|
"compress": {"type": "boolean"},
|
||||||
"shutdown": {"type": "boolean"},
|
"shutdown": {"type": "boolean"},
|
||||||
"backup_before": {"type": "string"},
|
"before": {"type": "string"},
|
||||||
"backup_after": {"type": "string"},
|
"after": {"type": "string"},
|
||||||
"exclusions": {"type": "array"},
|
"excluded_dirs": {"type": "array"},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"minProperties": 1,
|
"minProperties": 1,
|
||||||
@ -42,17 +45,21 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.BACKUP
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
self.finish_json(200, self.controller.management.get_backup_config(server_id))
|
self.finish_json(
|
||||||
|
200, self.controller.management.get_backups_by_server(server_id)
|
||||||
|
)
|
||||||
|
|
||||||
def patch(self, server_id: str):
|
def post(self, server_id: str):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -78,46 +85,25 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
|
|||||||
"error_data": str(e),
|
"error_data": str(e),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.BACKUP
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.BACKUP not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
# Set the backup location automatically for non-super users. We should probably
|
||||||
self.controller.management.set_backup_config(
|
# make the default location configurable for SU eventually
|
||||||
server_id,
|
if not auth_data[4]["superuser"]:
|
||||||
data.get(
|
data["backup_location"] = os.path.join(self.helper.backup_path, server_id)
|
||||||
"backup_path",
|
data["server_id"] = server_id
|
||||||
self.controller.management.get_backup_config(server_id)["backup_path"],
|
if not data.get("excluded_dirs", None):
|
||||||
),
|
data["excluded_dirs"] = []
|
||||||
data.get(
|
self.controller.management.add_backup_config(data)
|
||||||
"max_backups",
|
|
||||||
self.controller.management.get_backup_config(server_id)["max_backups"],
|
|
||||||
),
|
|
||||||
data.get("exclusions"),
|
|
||||||
data.get(
|
|
||||||
"compress",
|
|
||||||
self.controller.management.get_backup_config(server_id)["compress"],
|
|
||||||
),
|
|
||||||
data.get(
|
|
||||||
"shutdown",
|
|
||||||
self.controller.management.get_backup_config(server_id)["shutdown"],
|
|
||||||
),
|
|
||||||
data.get(
|
|
||||||
"backup_before",
|
|
||||||
self.controller.management.get_backup_config(server_id)["before"],
|
|
||||||
),
|
|
||||||
data.get(
|
|
||||||
"backup_after",
|
|
||||||
self.controller.management.get_backup_config(server_id)["after"],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
@ -72,7 +72,7 @@ file_delete_schema = {
|
|||||||
|
|
||||||
|
|
||||||
class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
||||||
def post(self, server_id: str):
|
def post(self, server_id: str, backup_id=None):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -80,16 +80,16 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
|
)
|
||||||
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
if (
|
if (
|
||||||
EnumPermissionsServer.FILES
|
EnumPermissionsServer.FILES not in server_permissions
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
and EnumPermissionsServer.BACKUP not in server_permissions
|
||||||
auth_data[4]["user_id"], server_id
|
|
||||||
)
|
|
||||||
and EnumPermissionsServer.BACKUP
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
# if the user doesn't have Files or Backup permission, return an error
|
# if the user doesn't have Files or Backup permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
@ -149,9 +149,10 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
|
if backup_id:
|
||||||
server_id
|
if str(
|
||||||
):
|
dpath
|
||||||
|
) in self.controller.management.get_excluded_backup_dirs(backup_id):
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
return_json[filename] = {
|
return_json[filename] = {
|
||||||
"path": dpath,
|
"path": dpath,
|
||||||
@ -177,6 +178,19 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
"dir": False,
|
"dir": False,
|
||||||
"excluded": False,
|
"excluded": False,
|
||||||
}
|
}
|
||||||
|
else:
|
||||||
|
if os.path.isdir(rel):
|
||||||
|
return_json[filename] = {
|
||||||
|
"path": dpath,
|
||||||
|
"dir": True,
|
||||||
|
"excluded": False,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return_json[filename] = {
|
||||||
|
"path": dpath,
|
||||||
|
"dir": False,
|
||||||
|
"excluded": False,
|
||||||
|
}
|
||||||
self.finish_json(200, {"status": "ok", "data": return_json})
|
self.finish_json(200, {"status": "ok", "data": return_json})
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
self.finish_json(200, {"status": "ok", "data": file_contents})
|
self.finish_json(200, {"status": "ok", "data": file_contents})
|
||||||
|
|
||||||
def delete(self, server_id: str):
|
def delete(self, server_id: str, _backup_id=None):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -197,13 +211,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
@ -246,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
return self.finish_json(500, {"status": "error", "error": str(proc)})
|
return self.finish_json(500, {"status": "error", "error": str(proc)})
|
||||||
|
|
||||||
def patch(self, server_id: str):
|
def patch(self, server_id: str, _backup_id):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -254,13 +269,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
@ -299,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
file_object.write(file_contents)
|
file_object.write(file_contents)
|
||||||
return self.finish_json(200, {"status": "ok"})
|
return self.finish_json(200, {"status": "ok"})
|
||||||
|
|
||||||
def put(self, server_id: str):
|
def put(self, server_id: str, _backup_id):
|
||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
@ -307,13 +323,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
@ -373,13 +390,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
@ -438,13 +456,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
@ -504,13 +523,14 @@ class ApiServersServerFilesZipHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.FILES
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.FILES not in server_permissions:
|
||||||
# if the user doesn't have Files permission, return an error
|
# if the user doesn't have Files permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
try:
|
try:
|
||||||
|
@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
|
|||||||
server_patch_schema = {
|
server_patch_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"server_name": {"type": "string", "minLength": 1},
|
"server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"},
|
||||||
"backup_path": {"type": "string"},
|
"backup_path": {"type": "string"},
|
||||||
"executable": {"type": "string"},
|
"executable": {"type": "string"},
|
||||||
"log_path": {"type": "string", "minLength": 1},
|
"log_path": {"type": "string", "minLength": 1},
|
||||||
@ -102,13 +102,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Config permission, return an error
|
# if the user doesn't have Config permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -154,13 +155,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Config permission, return an error
|
# if the user doesn't have Config permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -176,7 +178,7 @@ class ApiServersServerIndexHandler(BaseApiHandler):
|
|||||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||||
failed = False
|
failed = False
|
||||||
for item in self.controller.servers.failed_servers[:]:
|
for item in self.controller.servers.failed_servers[:]:
|
||||||
if item["server_id"] == int(server_id):
|
if item["server_id"] == server_id:
|
||||||
self.controller.servers.failed_servers.remove(item)
|
self.controller.servers.failed_servers.remove(item)
|
||||||
failed = True
|
failed = True
|
||||||
|
|
||||||
|
@ -30,13 +30,14 @@ class ApiServersServerLogsHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.LOGS
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.LOGS not in server_permissions:
|
||||||
# if the user doesn't have Logs permission, return an error
|
# if the user doesn't have Logs permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
|
@ -16,13 +16,14 @@ class ApiServersServerStdinHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.COMMANDS
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.COMMANDS not in server_permissions:
|
||||||
# if the user doesn't have Commands permission, return an error
|
# if the user doesn't have Commands permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
|
@ -21,6 +21,9 @@ new_task_schema = {
|
|||||||
"action": {
|
"action": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
|
"action_id": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
"interval": {"type": "integer"},
|
"interval": {"type": "integer"},
|
||||||
"interval_type": {
|
"interval_type": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@ -78,13 +81,14 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.SCHEDULE
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.SCHEDULE not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
data["server_id"] = server_id
|
data["server_id"] = server_id
|
||||||
@ -109,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
if "parent" not in data:
|
if "parent" not in data:
|
||||||
data["parent"] = None
|
data["parent"] = None
|
||||||
|
if data.get("action_id"):
|
||||||
|
backup_config = self.controller.management.get_backup_config(
|
||||||
|
data["action_id"]
|
||||||
|
)
|
||||||
|
if backup_config["server_id"]["server_id"] != server_id:
|
||||||
|
return self.finish_json(
|
||||||
|
405,
|
||||||
|
{
|
||||||
|
"status": "error",
|
||||||
|
"error": "Server ID Mismatch",
|
||||||
|
},
|
||||||
|
)
|
||||||
task_id = self.tasks_manager.schedule_job(data)
|
task_id = self.tasks_manager.schedule_job(data)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
|
@ -22,6 +22,9 @@ task_patch_schema = {
|
|||||||
"action": {
|
"action": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
},
|
},
|
||||||
|
"action_id": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
"interval": {"type": "integer"},
|
"interval": {"type": "integer"},
|
||||||
"interval_type": {
|
"interval_type": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@ -54,12 +57,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.SCHEDULE
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.SCHEDULE not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
self.finish_json(200, self.controller.management.get_scheduled_task(task_id))
|
self.finish_json(200, self.controller.management.get_scheduled_task(task_id))
|
||||||
@ -68,12 +73,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.SCHEDULE
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.SCHEDULE not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -120,13 +127,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.SCHEDULE
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.SCHEDULE not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
|
@ -38,12 +38,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
@ -81,13 +83,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
data["server_id"] = server_id
|
data["server_id"] = server_id
|
||||||
|
@ -39,12 +39,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
if (
|
if (
|
||||||
@ -66,12 +68,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
|
|||||||
auth_data = self.authenticate_user()
|
auth_data = self.authenticate_user()
|
||||||
if not auth_data:
|
if not auth_data:
|
||||||
return
|
return
|
||||||
if (
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -117,13 +121,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
|
||||||
@ -159,13 +164,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
|
|||||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||||
# if the user doesn't have access to the server, return an error
|
# if the user doesn't have access to the server, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
|
mask = self.controller.server_perms.get_lowest_api_perm_mask(
|
||||||
if (
|
self.controller.server_perms.get_user_permissions_mask(
|
||||||
EnumPermissionsServer.CONFIG
|
|
||||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
auth_data[4]["user_id"], server_id
|
auth_data[4]["user_id"], server_id
|
||||||
|
),
|
||||||
|
auth_data[5],
|
||||||
)
|
)
|
||||||
):
|
server_permissions = self.controller.server_perms.get_permissions(mask)
|
||||||
|
if EnumPermissionsServer.CONFIG not in server_permissions:
|
||||||
# if the user doesn't have Schedule permission, return an error
|
# if the user doesn't have Schedule permission, return an error
|
||||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||||
webhook = self.controller.management.get_webhook_by_id(webhook_id)
|
webhook = self.controller.management.get_webhook_by_id(webhook_id)
|
||||||
|
@ -2,6 +2,7 @@ import logging
|
|||||||
import json
|
import json
|
||||||
from jsonschema import validate
|
from jsonschema import validate
|
||||||
from jsonschema.exceptions import ValidationError
|
from jsonschema.exceptions import ValidationError
|
||||||
|
from app.classes.shared.translation import Translation
|
||||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||||
from app.classes.models.roles import Roles, HelperRoles
|
from app.classes.models.roles import Roles, HelperRoles
|
||||||
from app.classes.models.users import PUBLIC_USER_ATTRS
|
from app.classes.models.users import PUBLIC_USER_ATTRS
|
||||||
@ -21,6 +22,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
# GET /api/v2/users?ids=true
|
# GET /api/v2/users?ids=true
|
||||||
@ -53,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def post(self):
|
def post(self):
|
||||||
|
self.translator = Translation(self.helper)
|
||||||
new_user_schema = {
|
new_user_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
@ -70,6 +73,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||||
@ -85,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
try:
|
try:
|
||||||
validate(data, new_user_schema)
|
validate(data, new_user_schema)
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
|
err = self.translator.translate(
|
||||||
|
"validators",
|
||||||
|
e.schema["error"],
|
||||||
|
self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]),
|
||||||
|
)
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400,
|
400,
|
||||||
{
|
{
|
||||||
"status": "error",
|
"status": "error",
|
||||||
"error": "INVALID_JSON_SCHEMA",
|
"error": "INVALID_JSON_SCHEMA",
|
||||||
"error_data": str(e),
|
"error_data": f"{str(err)}",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
username = data["username"]
|
username = data["username"]
|
||||||
@ -149,8 +158,13 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
400, {"status": "error", "error": "INVALID_SUPERUSER_CREATE"}
|
400, {"status": "error", "error": "INVALID_SUPERUSER_CREATE"}
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(roles) != 0 and not superuser:
|
for role in roles:
|
||||||
# HACK: This should check if the user has the roles or something
|
role = self.controller.roles.get_role(role)
|
||||||
|
if (
|
||||||
|
str(role.get("manager", "no manager found"))
|
||||||
|
!= str(auth_data[4]["user_id"])
|
||||||
|
and not superuser
|
||||||
|
):
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
|
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
|
||||||
)
|
)
|
||||||
@ -177,7 +191,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"added user {username} (UID:{user_id}) with roles {roles}",
|
f"added user {username} (UID:{user_id}) with roles {roles}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
auth_data[4]["user_id"],
|
auth_data[4]["user_id"],
|
||||||
f"Generated a new API token for the key {key.name} "
|
f"Generated a new API token for the key {key.name} "
|
||||||
f"from user with UID: {key.user_id}",
|
f"from user with UID: {key.user_id}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
data_key = self.controller.authentication.generate(
|
data_key = self.controller.authentication.generate(
|
||||||
@ -75,7 +75,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
"name": key.name,
|
"name": key.name,
|
||||||
"server_permissions": key.server_permissions,
|
"server_permissions": key.server_permissions,
|
||||||
"crafty_permissions": key.crafty_permissions,
|
"crafty_permissions": key.crafty_permissions,
|
||||||
"superuser": key.superuser,
|
"full_access": key.full_access,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
@ -99,7 +99,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^[01]{3}$", # 8 bits, see EnumPermissionsCrafty
|
"pattern": "^[01]{3}$", # 8 bits, see EnumPermissionsCrafty
|
||||||
},
|
},
|
||||||
"superuser": {"type": "boolean"},
|
"full_access": {"type": "boolean"},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"minProperties": 1,
|
"minProperties": 1,
|
||||||
@ -113,6 +113,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_superuser,
|
_superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -163,7 +164,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
key_id = self.controller.users.add_user_api_key(
|
key_id = self.controller.users.add_user_api_key(
|
||||||
data["name"],
|
data["name"],
|
||||||
user_id,
|
user_id,
|
||||||
data["superuser"],
|
data["full_access"],
|
||||||
data["server_permissions_mask"],
|
data["server_permissions_mask"],
|
||||||
data["crafty_permissions_mask"],
|
data["crafty_permissions_mask"],
|
||||||
)
|
)
|
||||||
@ -173,7 +174,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
f"Added API key {data['name']} with crafty permissions "
|
f"Added API key {data['name']} with crafty permissions "
|
||||||
f"{data['crafty_permissions_mask']}"
|
f"{data['crafty_permissions_mask']}"
|
||||||
f" and {data['server_permissions_mask']} for user with UID: {user_id}",
|
f" and {data['server_permissions_mask']} for user with UID: {user_id}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
self.finish_json(200, {"status": "ok", "data": {"id": key_id}})
|
self.finish_json(200, {"status": "ok", "data": {"id": key_id}})
|
||||||
@ -188,6 +189,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_user,
|
_user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
if key_id:
|
if key_id:
|
||||||
key = self.controller.users.get_user_api_key(key_id)
|
key = self.controller.users.get_user_api_key(key_id)
|
||||||
@ -215,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
target_key.user_id != auth_data[4]["user_id"]
|
str(target_key.user_id) != str(auth_data[4]["user_id"])
|
||||||
and not auth_data[4]["superuser"]
|
and not auth_data[4]["superuser"]
|
||||||
):
|
):
|
||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
@ -233,7 +235,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
|
|||||||
auth_data[4]["user_id"],
|
auth_data[4]["user_id"],
|
||||||
f"Removed API key {target_key} "
|
f"Removed API key {target_key} "
|
||||||
f"(ID: {key_id}) from user {auth_data[4]['user_id']}",
|
f"(ID: {key_id}) from user {auth_data[4]['user_id']}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if user_id in ["@me", user["user_id"]]:
|
if user_id in ["@me", user["user_id"]]:
|
||||||
@ -72,6 +73,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if (user_id in ["@me", user["user_id"]]) and self.helper.get_setting(
|
if (user_id in ["@me", user["user_id"]]) and self.helper.get_setting(
|
||||||
@ -94,7 +96,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
self.controller.management.add_to_audit_log(
|
self.controller.management.add_to_audit_log(
|
||||||
user["user_id"],
|
user["user_id"],
|
||||||
f"deleted the user {user_id}",
|
f"deleted the user {user_id}",
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -121,6 +123,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
superuser,
|
superuser,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -129,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validate(data, user_patch_schema)
|
validate(data, user_patch_schema)
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
@ -141,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
"error_data": str(e),
|
"error_data": str(e),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if user_id == "@me":
|
if user_id == "@me":
|
||||||
user_id = user["user_id"]
|
user_id = user["user_id"]
|
||||||
|
|
||||||
if (
|
if (
|
||||||
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
|
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
|
||||||
and str(user["user_id"]) != str(user_id)
|
and str(user["user_id"]) != str(user_id)
|
||||||
@ -212,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
return self.finish_json(
|
return self.finish_json(
|
||||||
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
|
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
|
||||||
)
|
)
|
||||||
|
user_modify = self.controller.users.get_user_roles_id(user_id)
|
||||||
|
|
||||||
|
for role in data["roles"]:
|
||||||
|
# Check if user is not a super user and that the exec user is the role
|
||||||
|
# manager or that the role already exists in the user's list
|
||||||
|
if not superuser and (
|
||||||
|
str(
|
||||||
|
self.controller.roles.get_role(role).get(
|
||||||
|
"manager", "no manager found"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
!= str(auth_data[4]["user_id"])
|
||||||
|
and role not in user_modify
|
||||||
|
):
|
||||||
|
for item in user_modify:
|
||||||
|
print(type(role), type(item))
|
||||||
|
return self.finish_json(
|
||||||
|
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
|
||||||
|
)
|
||||||
|
|
||||||
user_obj = HelperUsers.get_user_model(user_id)
|
user_obj = HelperUsers.get_user_model(user_id)
|
||||||
if "password" in data and str(user["user_id"]) != str(user_id):
|
if "password" in data and str(user["user_id"]) != str(user_id):
|
||||||
@ -283,7 +302,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
|
|||||||
f"edited user {user_obj.username} (UID: {user_id})"
|
f"edited user {user_obj.username} (UID: {user_id})"
|
||||||
f"with roles {user_obj.roles}"
|
f"with roles {user_obj.roles}"
|
||||||
),
|
),
|
||||||
server_id=0,
|
server_id=None,
|
||||||
source_ip=self.get_remote_ip(),
|
source_ip=self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if user_id in ["@me", user["user_id"]]:
|
if user_id in ["@me", user["user_id"]]:
|
||||||
@ -52,6 +53,8 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
counter_data = PermissionsCrafty.get_created_quantity_list(user_id)
|
||||||
|
|
||||||
self.finish_json(
|
self.finish_json(
|
||||||
200,
|
200,
|
||||||
{
|
{
|
||||||
@ -59,9 +62,9 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
|
|||||||
"data": {
|
"data": {
|
||||||
"permissions": res_data.permissions,
|
"permissions": res_data.permissions,
|
||||||
"counters": {
|
"counters": {
|
||||||
SERVER_CREATION: res_data.created_server,
|
SERVER_CREATION: counter_data["SERVER_CREATION"],
|
||||||
USER_CONFIG: res_data.created_user,
|
USER_CONFIG: counter_data["USER_CONFIG"],
|
||||||
ROLES_CONFIG: res_data.created_role,
|
ROLES_CONFIG: counter_data["ROLES_CONFIG"],
|
||||||
},
|
},
|
||||||
"limits": {
|
"limits": {
|
||||||
SERVER_CREATION: res_data.limit_server_creation,
|
SERVER_CREATION: res_data.limit_server_creation,
|
||||||
|
@ -17,6 +17,7 @@ class ApiUsersUserPublicHandler(BaseApiHandler):
|
|||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
user,
|
user,
|
||||||
|
_,
|
||||||
) = auth_data
|
) = auth_data
|
||||||
|
|
||||||
if user_id == "@me":
|
if user_id == "@me":
|
||||||
|
@ -17,7 +17,7 @@ def metrics_handlers(handler_args):
|
|||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
r"/metrics/servers/([0-9]+)/?",
|
r"/metrics/servers/([a-z0-9-]+)/?",
|
||||||
ApiOpenMetricsServersHandler,
|
ApiOpenMetricsServersHandler,
|
||||||
handler_args,
|
handler_args,
|
||||||
),
|
),
|
||||||
|
@ -30,7 +30,7 @@ class ServerHandler(BaseHandler):
|
|||||||
) = self.current_user
|
) = self.current_user
|
||||||
superuser = exec_user["superuser"]
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.full_access
|
||||||
|
|
||||||
if superuser:
|
if superuser:
|
||||||
defined_servers = self.controller.servers.list_defined_servers()
|
defined_servers = self.controller.servers.list_defined_servers()
|
||||||
@ -126,7 +126,7 @@ class ServerHandler(BaseHandler):
|
|||||||
"created": api_key.created,
|
"created": api_key.created,
|
||||||
"server_permissions": api_key.server_permissions,
|
"server_permissions": api_key.server_permissions,
|
||||||
"crafty_permissions": api_key.crafty_permissions,
|
"crafty_permissions": api_key.crafty_permissions,
|
||||||
"superuser": api_key.superuser,
|
"full_access": api_key.full_access,
|
||||||
}
|
}
|
||||||
if api_key is not None
|
if api_key is not None
|
||||||
else None
|
else None
|
||||||
@ -148,12 +148,12 @@ class ServerHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
page_data["server_api"] = False
|
page_data["server_api"] = False
|
||||||
if page_data["online"]:
|
if page_data["online"]:
|
||||||
page_data["server_api"] = self.helper.check_address_status(
|
page_data["server_api"] = (
|
||||||
"https://serverjars.com/api/fetchTypes"
|
self.controller.big_bucket._check_bucket_alive()
|
||||||
)
|
)
|
||||||
page_data["server_types"] = self.controller.server_jars.get_serverjar_data()
|
page_data["server_types"] = self.controller.big_bucket.get_bucket_data()
|
||||||
page_data["js_server_types"] = json.dumps(
|
page_data["js_server_types"] = json.dumps(
|
||||||
self.controller.server_jars.get_serverjar_data()
|
self.controller.big_bucket.get_bucket_data()
|
||||||
)
|
)
|
||||||
if page_data["server_types"] is None:
|
if page_data["server_types"] is None:
|
||||||
page_data["server_types"] = []
|
page_data["server_types"] = []
|
||||||
|
@ -24,8 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers
|
|||||||
from app.classes.web.server_handler import ServerHandler
|
from app.classes.web.server_handler import ServerHandler
|
||||||
from app.classes.web.websocket_handler import WebSocketHandler
|
from app.classes.web.websocket_handler import WebSocketHandler
|
||||||
from app.classes.web.static_handler import CustomStaticHandler
|
from app.classes.web.static_handler import CustomStaticHandler
|
||||||
from app.classes.web.upload_handler import UploadHandler
|
|
||||||
from app.classes.web.http_handler import HTTPHandler, HTTPHandlerPage
|
|
||||||
from app.classes.web.status_handler import StatusHandler
|
from app.classes.web.status_handler import StatusHandler
|
||||||
|
|
||||||
|
|
||||||
@ -44,7 +42,6 @@ class Webserver:
|
|||||||
file_helper: FileHelpers,
|
file_helper: FileHelpers,
|
||||||
):
|
):
|
||||||
self.ioloop = None
|
self.ioloop = None
|
||||||
self.http_server = None
|
|
||||||
self.https_server = None
|
self.https_server = None
|
||||||
self.helper = helper
|
self.helper = helper
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
@ -100,7 +97,6 @@ class Webserver:
|
|||||||
# let's verify we have an SSL cert
|
# let's verify we have an SSL cert
|
||||||
self.helper.create_self_signed_cert()
|
self.helper.create_self_signed_cert()
|
||||||
|
|
||||||
http_port = self.helper.get_setting("http_port")
|
|
||||||
https_port = self.helper.get_setting("https_port")
|
https_port = self.helper.get_setting("https_port")
|
||||||
|
|
||||||
debug_errors = self.helper.get_setting("show_errors")
|
debug_errors = self.helper.get_setting("show_errors")
|
||||||
@ -112,9 +108,6 @@ class Webserver:
|
|||||||
cookie_secret = self.helper.random_string_generator(32)
|
cookie_secret = self.helper.random_string_generator(32)
|
||||||
HelpersManagement.set_cookie_secret(cookie_secret)
|
HelpersManagement.set_cookie_secret(cookie_secret)
|
||||||
|
|
||||||
if not http_port and http_port != 0:
|
|
||||||
http_port = 8000
|
|
||||||
|
|
||||||
if not https_port:
|
if not https_port:
|
||||||
https_port = 8443
|
https_port = 8443
|
||||||
|
|
||||||
@ -127,7 +120,7 @@ class Webserver:
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
|
logger.info(f"Starting Web Server on ports https:{https_port}")
|
||||||
|
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||||
|
|
||||||
@ -148,7 +141,6 @@ class Webserver:
|
|||||||
(r"/panel/(.*)", PanelHandler, handler_args),
|
(r"/panel/(.*)", PanelHandler, handler_args),
|
||||||
(r"/server/(.*)", ServerHandler, handler_args),
|
(r"/server/(.*)", ServerHandler, handler_args),
|
||||||
(r"/ws", WebSocketHandler, handler_args),
|
(r"/ws", WebSocketHandler, handler_args),
|
||||||
(r"/upload", UploadHandler, handler_args),
|
|
||||||
(r"/status", StatusHandler, handler_args),
|
(r"/status", StatusHandler, handler_args),
|
||||||
# API Routes V2
|
# API Routes V2
|
||||||
*api_handlers(handler_args),
|
*api_handlers(handler_args),
|
||||||
@ -173,30 +165,6 @@ class Webserver:
|
|||||||
static_handler_class=CustomStaticHandler,
|
static_handler_class=CustomStaticHandler,
|
||||||
serve_traceback=debug_errors,
|
serve_traceback=debug_errors,
|
||||||
)
|
)
|
||||||
http_handers = [
|
|
||||||
(r"/", HTTPHandler, handler_args),
|
|
||||||
(r"/(.+)", HTTPHandlerPage, handler_args),
|
|
||||||
]
|
|
||||||
http_app = tornado.web.Application(
|
|
||||||
http_handers,
|
|
||||||
template_path=os.path.join(self.helper.webroot, "templates"),
|
|
||||||
static_path=os.path.join(self.helper.webroot, "static"),
|
|
||||||
debug=debug_errors,
|
|
||||||
cookie_secret=cookie_secret,
|
|
||||||
xsrf_cookies=True,
|
|
||||||
autoreload=False,
|
|
||||||
log_function=self.log_function,
|
|
||||||
default_handler_class=HTTPHandler,
|
|
||||||
login_url="/login",
|
|
||||||
serve_traceback=debug_errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
if http_port != 0:
|
|
||||||
self.http_server = tornado.httpserver.HTTPServer(http_app)
|
|
||||||
self.http_server.listen(http_port)
|
|
||||||
else:
|
|
||||||
logger.info("http port disabled by config")
|
|
||||||
|
|
||||||
self.https_server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
self.https_server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
||||||
self.https_server.listen(https_port)
|
self.https_server.listen(https_port)
|
||||||
|
|
||||||
@ -218,7 +186,6 @@ class Webserver:
|
|||||||
logger.info("Shutting Down Web Server")
|
logger.info("Shutting Down Web Server")
|
||||||
Console.info("Shutting Down Web Server")
|
Console.info("Shutting Down Web Server")
|
||||||
self.ioloop.stop()
|
self.ioloop.stop()
|
||||||
self.http_server.stop()
|
|
||||||
self.https_server.stop()
|
self.https_server.stop()
|
||||||
logger.info("Web Server Stopped")
|
logger.info("Web Server Stopped")
|
||||||
Console.info("Web Server Stopped")
|
Console.info("Web Server Stopped")
|
||||||
|
@ -1,331 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import urllib.parse
|
|
||||||
import tornado.web
|
|
||||||
import tornado.options
|
|
||||||
import tornado.httpserver
|
|
||||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
|
||||||
|
|
||||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
|
||||||
from app.classes.shared.console import Console
|
|
||||||
from app.classes.shared.helpers import Helpers
|
|
||||||
from app.classes.shared.main_controller import Controller
|
|
||||||
from app.classes.web.base_handler import BaseHandler
|
|
||||||
from app.classes.shared.websocket_manager import WebSocketManager
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@tornado.web.stream_request_body
|
|
||||||
class UploadHandler(BaseHandler):
|
|
||||||
# noinspection PyAttributeOutsideInit
|
|
||||||
def initialize(
|
|
||||||
self,
|
|
||||||
helper: Helpers = None,
|
|
||||||
controller: Controller = None,
|
|
||||||
tasks_manager=None,
|
|
||||||
translator=None,
|
|
||||||
file_helper=None,
|
|
||||||
):
|
|
||||||
self.helper = helper
|
|
||||||
self.controller = controller
|
|
||||||
self.tasks_manager = tasks_manager
|
|
||||||
self.translator = translator
|
|
||||||
self.file_helper = file_helper
|
|
||||||
|
|
||||||
def prepare(self):
|
|
||||||
# Class & Function Defination
|
|
||||||
api_key, _token_data, exec_user = self.current_user
|
|
||||||
self.upload_type = str(self.request.headers.get("X-Content-Upload-Type"))
|
|
||||||
|
|
||||||
if self.upload_type == "server_import":
|
|
||||||
superuser = exec_user["superuser"]
|
|
||||||
if api_key is not None:
|
|
||||||
superuser = superuser and api_key.superuser
|
|
||||||
user_id = exec_user["user_id"]
|
|
||||||
stream_size_value = self.helper.get_setting("stream_size_GB")
|
|
||||||
|
|
||||||
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
|
|
||||||
|
|
||||||
self.content_len = int(self.request.headers.get("Content-Length"))
|
|
||||||
if self.content_len > max_streamed_size:
|
|
||||||
logger.error(
|
|
||||||
f"User with ID {user_id} attempted to upload a file that"
|
|
||||||
f" exceeded the max body size."
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.finish_json(
|
|
||||||
413,
|
|
||||||
{
|
|
||||||
"status": "error",
|
|
||||||
"error": "TOO LARGE",
|
|
||||||
"info": self.helper.translation.translate(
|
|
||||||
"error",
|
|
||||||
"fileTooLarge",
|
|
||||||
self.controller.users.get_user_lang_by_id(user_id),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.do_upload = True
|
|
||||||
|
|
||||||
if superuser:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.server_perms.list_defined_permissions()
|
|
||||||
)
|
|
||||||
elif api_key is not None:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.crafty_perms.get_api_key_permissions_list(api_key)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.crafty_perms.get_crafty_permissions_list(
|
|
||||||
exec_user["user_id"]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if user_id is None:
|
|
||||||
logger.warning("User ID not found in upload handler call")
|
|
||||||
Console.warning("User ID not found in upload handler call")
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
if (
|
|
||||||
EnumPermissionsCrafty.SERVER_CREATION
|
|
||||||
not in exec_user_server_permissions
|
|
||||||
and not exec_user["superuser"]
|
|
||||||
):
|
|
||||||
logger.warning(
|
|
||||||
f"User {user_id} tried to upload a server" " without permissions!"
|
|
||||||
)
|
|
||||||
Console.warning(
|
|
||||||
f"User {user_id} tried to upload a server" " without permissions!"
|
|
||||||
)
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
path = os.path.join(self.controller.project_root, "import", "upload")
|
|
||||||
self.helper.ensure_dir_exists(path)
|
|
||||||
# Delete existing files
|
|
||||||
if len(os.listdir(path)) > 0:
|
|
||||||
for item in os.listdir():
|
|
||||||
try:
|
|
||||||
os.remove(os.path.join(path, item))
|
|
||||||
except:
|
|
||||||
logger.debug("Could not delete file on user server upload")
|
|
||||||
|
|
||||||
self.helper.ensure_dir_exists(path)
|
|
||||||
filename = urllib.parse.unquote(
|
|
||||||
self.request.headers.get("X-FileName", None)
|
|
||||||
)
|
|
||||||
if not str(filename).endswith(".zip"):
|
|
||||||
WebSocketManager().broadcast("close_upload_box", "error")
|
|
||||||
self.finish("error")
|
|
||||||
full_path = os.path.join(path, filename)
|
|
||||||
|
|
||||||
if self.do_upload:
|
|
||||||
try:
|
|
||||||
self.f = open(full_path, "wb")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Upload failed with error: {e}")
|
|
||||||
self.do_upload = False
|
|
||||||
# If max_body_size is not set, you cannot upload files > 100MB
|
|
||||||
self.request.connection.set_max_body_size(max_streamed_size)
|
|
||||||
|
|
||||||
elif self.upload_type == "background":
|
|
||||||
superuser = exec_user["superuser"]
|
|
||||||
if api_key is not None:
|
|
||||||
superuser = superuser and api_key.superuser
|
|
||||||
user_id = exec_user["user_id"]
|
|
||||||
stream_size_value = self.helper.get_setting("stream_size_GB")
|
|
||||||
|
|
||||||
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
|
|
||||||
|
|
||||||
self.content_len = int(self.request.headers.get("Content-Length"))
|
|
||||||
if self.content_len > max_streamed_size:
|
|
||||||
logger.error(
|
|
||||||
f"User with ID {user_id} attempted to upload a file that"
|
|
||||||
f" exceeded the max body size."
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.finish_json(
|
|
||||||
413,
|
|
||||||
{
|
|
||||||
"status": "error",
|
|
||||||
"error": "TOO LARGE",
|
|
||||||
"info": self.helper.translation.translate(
|
|
||||||
"error",
|
|
||||||
"fileTooLarge",
|
|
||||||
self.controller.users.get_user_lang_by_id(user_id),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.do_upload = True
|
|
||||||
|
|
||||||
if not superuser:
|
|
||||||
return self.finish_json(
|
|
||||||
401,
|
|
||||||
{
|
|
||||||
"status": "error",
|
|
||||||
"error": "UNAUTHORIZED ACCESS",
|
|
||||||
"info": self.helper.translation.translate(
|
|
||||||
"error",
|
|
||||||
"superError",
|
|
||||||
self.controller.users.get_user_lang_by_id(user_id),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not self.request.headers.get("X-Content-Type", None).startswith(
|
|
||||||
"image/"
|
|
||||||
):
|
|
||||||
return self.finish_json(
|
|
||||||
415,
|
|
||||||
{
|
|
||||||
"status": "error",
|
|
||||||
"error": "TYPE ERROR",
|
|
||||||
"info": self.helper.translation.translate(
|
|
||||||
"error",
|
|
||||||
"fileError",
|
|
||||||
self.controller.users.get_user_lang_by_id(user_id),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if user_id is None:
|
|
||||||
logger.warning("User ID not found in upload handler call")
|
|
||||||
Console.warning("User ID not found in upload handler call")
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
path = os.path.join(
|
|
||||||
self.controller.project_root,
|
|
||||||
"app/frontend/static/assets/images/auth/custom",
|
|
||||||
)
|
|
||||||
filename = self.request.headers.get("X-FileName", None)
|
|
||||||
full_path = os.path.join(path, filename)
|
|
||||||
|
|
||||||
if self.do_upload:
|
|
||||||
try:
|
|
||||||
self.f = open(full_path, "wb")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Upload failed with error: {e}")
|
|
||||||
self.do_upload = False
|
|
||||||
# If max_body_size is not set, you cannot upload files > 100MB
|
|
||||||
self.request.connection.set_max_body_size(max_streamed_size)
|
|
||||||
else:
|
|
||||||
server_id = self.get_argument("server_id", None)
|
|
||||||
superuser = exec_user["superuser"]
|
|
||||||
if api_key is not None:
|
|
||||||
superuser = superuser and api_key.superuser
|
|
||||||
user_id = exec_user["user_id"]
|
|
||||||
stream_size_value = self.helper.get_setting("stream_size_GB")
|
|
||||||
|
|
||||||
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
|
|
||||||
|
|
||||||
self.content_len = int(self.request.headers.get("Content-Length"))
|
|
||||||
if self.content_len > max_streamed_size:
|
|
||||||
logger.error(
|
|
||||||
f"User with ID {user_id} attempted to upload a file that"
|
|
||||||
f" exceeded the max body size."
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.finish_json(
|
|
||||||
413,
|
|
||||||
{
|
|
||||||
"status": "error",
|
|
||||||
"error": "TOO LARGE",
|
|
||||||
"info": self.helper.translation.translate(
|
|
||||||
"error",
|
|
||||||
"fileTooLarge",
|
|
||||||
self.controller.users.get_user_lang_by_id(user_id),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.do_upload = True
|
|
||||||
|
|
||||||
if superuser:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.server_perms.list_defined_permissions()
|
|
||||||
)
|
|
||||||
elif api_key is not None:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.server_perms.get_api_key_permissions_list(
|
|
||||||
api_key, server_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
exec_user_server_permissions = (
|
|
||||||
self.controller.server_perms.get_user_id_permissions_list(
|
|
||||||
exec_user["user_id"], server_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
server_id = self.request.headers.get("X-ServerId", None)
|
|
||||||
if server_id is None:
|
|
||||||
logger.warning("Server ID not found in upload handler call")
|
|
||||||
Console.warning("Server ID not found in upload handler call")
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
if user_id is None:
|
|
||||||
logger.warning("User ID not found in upload handler call")
|
|
||||||
Console.warning("User ID not found in upload handler call")
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
if EnumPermissionsServer.FILES not in exec_user_server_permissions:
|
|
||||||
logger.warning(
|
|
||||||
f"User {user_id} tried to upload a file to "
|
|
||||||
f"{server_id} without permissions!"
|
|
||||||
)
|
|
||||||
Console.warning(
|
|
||||||
f"User {user_id} tried to upload a file to "
|
|
||||||
f"{server_id} without permissions!"
|
|
||||||
)
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
path = self.request.headers.get("X-Path", None)
|
|
||||||
filename = self.request.headers.get("X-FileName", None)
|
|
||||||
full_path = os.path.join(path, filename)
|
|
||||||
|
|
||||||
if not self.helper.is_subdir(
|
|
||||||
full_path,
|
|
||||||
Helpers.get_os_understandable_path(
|
|
||||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
|
||||||
),
|
|
||||||
):
|
|
||||||
logger.warning(
|
|
||||||
f"User {user_id} tried to upload a file to {server_id} "
|
|
||||||
f"but the path is not inside of the server!"
|
|
||||||
)
|
|
||||||
Console.warning(
|
|
||||||
f"User {user_id} tried to upload a file to {server_id} "
|
|
||||||
f"but the path is not inside of the server!"
|
|
||||||
)
|
|
||||||
self.do_upload = False
|
|
||||||
|
|
||||||
if self.do_upload:
|
|
||||||
try:
|
|
||||||
self.f = open(full_path, "wb")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Upload failed with error: {e}")
|
|
||||||
self.do_upload = False
|
|
||||||
# If max_body_size is not set, you cannot upload files > 100MB
|
|
||||||
self.request.connection.set_max_body_size(max_streamed_size)
|
|
||||||
|
|
||||||
def post(self):
|
|
||||||
logger.info("Upload completed")
|
|
||||||
if self.upload_type == "server_files":
|
|
||||||
files_left = int(self.request.headers.get("X-Files-Left", None))
|
|
||||||
else:
|
|
||||||
files_left = 0
|
|
||||||
|
|
||||||
if self.do_upload:
|
|
||||||
time.sleep(5)
|
|
||||||
if files_left == 0:
|
|
||||||
WebSocketManager().broadcast("close_upload_box", "success")
|
|
||||||
self.finish("success") # Nope, I'm sending "success"
|
|
||||||
self.f.close()
|
|
||||||
else:
|
|
||||||
time.sleep(5)
|
|
||||||
if files_left == 0:
|
|
||||||
WebSocketManager().broadcast("close_upload_box", "error")
|
|
||||||
self.finish("error")
|
|
||||||
|
|
||||||
def data_received(self, chunk):
|
|
||||||
if self.do_upload:
|
|
||||||
self.f.write(chunk)
|
|
@ -55,7 +55,7 @@ class WebSocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
self.controller.management.add_to_audit_log_raw(
|
self.controller.management.add_to_audit_log_raw(
|
||||||
"unknown",
|
"unknown",
|
||||||
0,
|
0,
|
||||||
0,
|
None,
|
||||||
"Someone tried to connect via WebSocket without proper authentication",
|
"Someone tried to connect via WebSocket without proper authentication",
|
||||||
self.get_remote_ip(),
|
self.get_remote_ip(),
|
||||||
)
|
)
|
||||||
|
@ -14,6 +14,9 @@
|
|||||||
"auth": {
|
"auth": {
|
||||||
"format": "%(asctime)s - [AUTH] - %(levelname)s - %(message)s"
|
"format": "%(asctime)s - [AUTH] - %(levelname)s - %(message)s"
|
||||||
},
|
},
|
||||||
|
"audit": {
|
||||||
|
"()": "app.classes.logging.log_formatter.JsonFormatter"
|
||||||
|
},
|
||||||
"cmd_queue": {
|
"cmd_queue": {
|
||||||
"format": "%(asctime)s - [CMD_QUEUE] - %(levelname)s - %(message)s"
|
"format": "%(asctime)s - [CMD_QUEUE] - %(levelname)s - %(message)s"
|
||||||
}
|
}
|
||||||
@ -70,6 +73,14 @@
|
|||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 20,
|
"backupCount": 20,
|
||||||
"encoding": "utf8"
|
"encoding": "utf8"
|
||||||
|
},
|
||||||
|
"audit_log_handler": {
|
||||||
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
|
"formatter": "audit",
|
||||||
|
"filename": "logs/audit.log",
|
||||||
|
"maxBytes": 10485760,
|
||||||
|
"backupCount": 20,
|
||||||
|
"encoding": "utf8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"loggers": {
|
"loggers": {
|
||||||
@ -108,6 +119,12 @@
|
|||||||
"cmd_queue_file_handler"
|
"cmd_queue_file_handler"
|
||||||
],
|
],
|
||||||
"propagate": false
|
"propagate": false
|
||||||
|
},
|
||||||
|
"audit_log": {
|
||||||
|
"level": "INFO",
|
||||||
|
"handlers": [
|
||||||
|
"audit_log_handler"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"major": 4,
|
"major": 4,
|
||||||
"minor": 2,
|
"minor": 4,
|
||||||
"sub": 4
|
"sub": 2
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,16 @@ nav.sidebar {
|
|||||||
position: fixed;
|
position: fixed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
td {
|
||||||
|
-ms-overflow-style: none;
|
||||||
|
/* IE and Edge */
|
||||||
|
scrollbar-width: none;
|
||||||
|
/* Firefox */
|
||||||
|
}
|
||||||
|
|
||||||
|
td::-webkit-scrollbar {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
@media (min-width: 992px) {
|
@media (min-width: 992px) {
|
||||||
nav.sidebar {
|
nav.sidebar {
|
||||||
@ -268,3 +278,6 @@ div.warnings div.wssError a:hover {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**************************************************************/
|
/**************************************************************/
|
||||||
|
.hidden-input {
|
||||||
|
margin-left: -40px;
|
||||||
|
}
|
537
app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
vendored
Normal file
537
app/frontend/static/assets/css/vendors/bootstrap-select-1.13.18.css
vendored
Normal file
@ -0,0 +1,537 @@
|
|||||||
|
/*!
|
||||||
|
* Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
|
||||||
|
*
|
||||||
|
* Copyright 2012-2020 SnapAppointments, LLC
|
||||||
|
* Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
|
||||||
|
*/
|
||||||
|
@-webkit-keyframes bs-notify-fadeOut {
|
||||||
|
0% {
|
||||||
|
opacity: .9
|
||||||
|
}
|
||||||
|
|
||||||
|
100% {
|
||||||
|
opacity: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@-o-keyframes bs-notify-fadeOut {
|
||||||
|
0% {
|
||||||
|
opacity: .9
|
||||||
|
}
|
||||||
|
|
||||||
|
100% {
|
||||||
|
opacity: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes bs-notify-fadeOut {
|
||||||
|
0% {
|
||||||
|
opacity: .9
|
||||||
|
}
|
||||||
|
|
||||||
|
100% {
|
||||||
|
opacity: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>select.bs-select-hidden,
|
||||||
|
select.bs-select-hidden,
|
||||||
|
select.selectpicker {
|
||||||
|
display: none !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select {
|
||||||
|
width: 220px;
|
||||||
|
vertical-align: middle
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>.dropdown-toggle {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
text-align: right;
|
||||||
|
white-space: nowrap;
|
||||||
|
display: -webkit-inline-box;
|
||||||
|
display: -webkit-inline-flex;
|
||||||
|
display: -ms-inline-flexbox;
|
||||||
|
display: inline-flex;
|
||||||
|
-webkit-box-align: center;
|
||||||
|
-webkit-align-items: center;
|
||||||
|
-ms-flex-align: center;
|
||||||
|
align-items: center;
|
||||||
|
-webkit-box-pack: justify;
|
||||||
|
-webkit-justify-content: space-between;
|
||||||
|
-ms-flex-pack: justify;
|
||||||
|
justify-content: space-between
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>.dropdown-toggle:after {
|
||||||
|
margin-top: -1px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder:hover {
|
||||||
|
color: #999
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:hover,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:hover,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:hover,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:hover,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:hover,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:active,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:focus,
|
||||||
|
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:hover {
|
||||||
|
color: rgba(255, 255, 255, .5)
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>select {
|
||||||
|
position: absolute !important;
|
||||||
|
bottom: 0;
|
||||||
|
left: 50%;
|
||||||
|
display: block !important;
|
||||||
|
width: .5px !important;
|
||||||
|
height: 100% !important;
|
||||||
|
padding: 0 !important;
|
||||||
|
opacity: 0 !important;
|
||||||
|
border: none;
|
||||||
|
z-index: 0 !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select>select.mobile-device {
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
display: block !important;
|
||||||
|
width: 100% !important;
|
||||||
|
z-index: 2 !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.is-invalid .dropdown-toggle,
|
||||||
|
.error .bootstrap-select .dropdown-toggle,
|
||||||
|
.has-error .bootstrap-select .dropdown-toggle,
|
||||||
|
.was-validated .bootstrap-select select:invalid+.dropdown-toggle {
|
||||||
|
border-color: #b94a48
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.is-valid .dropdown-toggle,
|
||||||
|
.was-validated .bootstrap-select select:valid+.dropdown-toggle {
|
||||||
|
border-color: #28a745
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.fit-width {
|
||||||
|
width: auto !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn) {
|
||||||
|
width: 220px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-toggle:focus,
|
||||||
|
.bootstrap-select>select.mobile-device:focus+.dropdown-toggle {
|
||||||
|
outline: thin dotted #333 !important;
|
||||||
|
outline: 5px auto -webkit-focus-ring-color !important;
|
||||||
|
outline-offset: -2px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.form-control {
|
||||||
|
margin-bottom: 0;
|
||||||
|
padding: 0;
|
||||||
|
border: none;
|
||||||
|
height: auto
|
||||||
|
}
|
||||||
|
|
||||||
|
:not(.input-group)>.bootstrap-select.form-control:not([class*=col-]) {
|
||||||
|
width: 100%
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.form-control.input-group-btn {
|
||||||
|
float: none;
|
||||||
|
z-index: auto
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-inline .bootstrap-select,
|
||||||
|
.form-inline .bootstrap-select.form-control:not([class*=col-]) {
|
||||||
|
width: auto
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select:not(.input-group-btn),
|
||||||
|
.bootstrap-select[class*=col-] {
|
||||||
|
float: none;
|
||||||
|
display: inline-block;
|
||||||
|
margin-left: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.dropdown-menu-right,
|
||||||
|
.bootstrap-select[class*=col-].dropdown-menu-right,
|
||||||
|
.row .bootstrap-select[class*=col-].dropdown-menu-right {
|
||||||
|
float: right
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group .bootstrap-select,
|
||||||
|
.form-horizontal .bootstrap-select,
|
||||||
|
.form-inline .bootstrap-select {
|
||||||
|
margin-bottom: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group-lg .bootstrap-select.form-control,
|
||||||
|
.form-group-sm .bootstrap-select.form-control {
|
||||||
|
padding: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group-lg .bootstrap-select.form-control .dropdown-toggle,
|
||||||
|
.form-group-sm .bootstrap-select.form-control .dropdown-toggle {
|
||||||
|
height: 100%;
|
||||||
|
font-size: inherit;
|
||||||
|
line-height: inherit;
|
||||||
|
border-radius: inherit
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.form-control-lg .dropdown-toggle,
|
||||||
|
.bootstrap-select.form-control-sm .dropdown-toggle {
|
||||||
|
font-size: inherit;
|
||||||
|
line-height: inherit;
|
||||||
|
border-radius: inherit
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.form-control-sm .dropdown-toggle {
|
||||||
|
padding: .25rem .5rem
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.form-control-lg .dropdown-toggle {
|
||||||
|
padding: .5rem 1rem
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-inline .bootstrap-select .form-control {
|
||||||
|
width: 100%
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.disabled,
|
||||||
|
.bootstrap-select>.disabled {
|
||||||
|
cursor: not-allowed
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.disabled:focus,
|
||||||
|
.bootstrap-select>.disabled:focus {
|
||||||
|
outline: 0 !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.bs-container {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
height: 0 !important;
|
||||||
|
padding: 0 !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.bs-container .dropdown-menu {
|
||||||
|
z-index: 1060
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-toggle .filter-option {
|
||||||
|
position: static;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
float: left;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
text-align: left;
|
||||||
|
overflow: hidden;
|
||||||
|
-webkit-box-flex: 0;
|
||||||
|
-webkit-flex: 0 1 auto;
|
||||||
|
-ms-flex: 0 1 auto;
|
||||||
|
flex: 0 1 auto
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs3.bootstrap-select .dropdown-toggle .filter-option {
|
||||||
|
padding-right: inherit
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option {
|
||||||
|
position: absolute;
|
||||||
|
padding-top: inherit;
|
||||||
|
padding-bottom: inherit;
|
||||||
|
padding-left: inherit;
|
||||||
|
float: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option .filter-option-inner {
|
||||||
|
padding-right: inherit
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-toggle .filter-option-inner-inner {
|
||||||
|
overflow: hidden
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-toggle .filter-expand {
|
||||||
|
width: 0 !important;
|
||||||
|
float: left;
|
||||||
|
opacity: 0 !important;
|
||||||
|
overflow: hidden
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-toggle .caret {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
right: 12px;
|
||||||
|
margin-top: -2px;
|
||||||
|
vertical-align: middle
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-group .bootstrap-select.form-control .dropdown-toggle {
|
||||||
|
border-radius: inherit
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select[class*=col-] .dropdown-toggle {
|
||||||
|
width: 100%
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu {
|
||||||
|
min-width: 100%;
|
||||||
|
-webkit-box-sizing: border-box;
|
||||||
|
-moz-box-sizing: border-box;
|
||||||
|
box-sizing: border-box
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu>.inner:focus {
|
||||||
|
outline: 0 !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu.inner {
|
||||||
|
position: static;
|
||||||
|
float: none;
|
||||||
|
border: 0;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
border-radius: 0;
|
||||||
|
-webkit-box-shadow: none;
|
||||||
|
box-shadow: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li {
|
||||||
|
position: relative
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li.active small {
|
||||||
|
color: rgba(255, 255, 255, .5) !important
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li.disabled a {
|
||||||
|
cursor: not-allowed
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li a {
|
||||||
|
cursor: pointer;
|
||||||
|
-webkit-user-select: none;
|
||||||
|
-moz-user-select: none;
|
||||||
|
-ms-user-select: none;
|
||||||
|
user-select: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li a.opt {
|
||||||
|
position: relative;
|
||||||
|
padding-left: 2.25em
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li a span.check-mark {
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li a span.text {
|
||||||
|
display: inline-block
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu li small {
|
||||||
|
padding-left: .5em
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu .notify {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 5px;
|
||||||
|
width: 96%;
|
||||||
|
margin: 0 2%;
|
||||||
|
min-height: 26px;
|
||||||
|
padding: 3px 5px;
|
||||||
|
background: #f5f5f5;
|
||||||
|
border: 1px solid #e3e3e3;
|
||||||
|
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
|
||||||
|
box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
|
||||||
|
pointer-events: none;
|
||||||
|
opacity: .9;
|
||||||
|
-webkit-box-sizing: border-box;
|
||||||
|
-moz-box-sizing: border-box;
|
||||||
|
box-sizing: border-box
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .dropdown-menu .notify.fadeOut {
|
||||||
|
-webkit-animation: .3s linear 750ms forwards bs-notify-fadeOut;
|
||||||
|
-o-animation: .3s linear 750ms forwards bs-notify-fadeOut;
|
||||||
|
animation: .3s linear 750ms forwards bs-notify-fadeOut
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .no-results {
|
||||||
|
padding: 3px;
|
||||||
|
background: #f5f5f5;
|
||||||
|
margin: 0 5px;
|
||||||
|
white-space: nowrap
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.fit-width .dropdown-toggle .filter-option {
|
||||||
|
position: static;
|
||||||
|
display: inline;
|
||||||
|
padding: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner,
|
||||||
|
.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner-inner {
|
||||||
|
display: inline
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.fit-width .dropdown-toggle .bs-caret:before {
|
||||||
|
content: '\00a0'
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.fit-width .dropdown-toggle .caret {
|
||||||
|
position: static;
|
||||||
|
top: auto;
|
||||||
|
margin-top: -1px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-tick .dropdown-menu .selected span.check-mark {
|
||||||
|
position: absolute;
|
||||||
|
display: inline-block;
|
||||||
|
right: 15px;
|
||||||
|
top: 5px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-tick .dropdown-menu li a span.text {
|
||||||
|
margin-right: 34px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select .bs-ok-default:after {
|
||||||
|
content: '';
|
||||||
|
display: block;
|
||||||
|
width: .5em;
|
||||||
|
height: 1em;
|
||||||
|
border-style: solid;
|
||||||
|
border-width: 0 .26em .26em 0;
|
||||||
|
-webkit-transform-style: preserve-3d;
|
||||||
|
transform-style: preserve-3d;
|
||||||
|
-webkit-transform: rotate(45deg);
|
||||||
|
-ms-transform: rotate(45deg);
|
||||||
|
-o-transform: rotate(45deg);
|
||||||
|
transform: rotate(45deg)
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle,
|
||||||
|
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle {
|
||||||
|
z-index: 1061
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:before {
|
||||||
|
content: '';
|
||||||
|
border-left: 7px solid transparent;
|
||||||
|
border-right: 7px solid transparent;
|
||||||
|
border-bottom: 7px solid rgba(204, 204, 204, .2);
|
||||||
|
position: absolute;
|
||||||
|
bottom: -4px;
|
||||||
|
left: 9px;
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:after {
|
||||||
|
content: '';
|
||||||
|
border-left: 6px solid transparent;
|
||||||
|
border-right: 6px solid transparent;
|
||||||
|
border-bottom: 6px solid #fff;
|
||||||
|
position: absolute;
|
||||||
|
bottom: -4px;
|
||||||
|
left: 10px;
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:before {
|
||||||
|
bottom: auto;
|
||||||
|
top: -4px;
|
||||||
|
border-top: 7px solid rgba(204, 204, 204, .2);
|
||||||
|
border-bottom: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:after {
|
||||||
|
bottom: auto;
|
||||||
|
top: -4px;
|
||||||
|
border-top: 6px solid #fff;
|
||||||
|
border-bottom: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:before {
|
||||||
|
right: 12px;
|
||||||
|
left: auto
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:after {
|
||||||
|
right: 13px;
|
||||||
|
left: auto
|
||||||
|
}
|
||||||
|
|
||||||
|
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:after,
|
||||||
|
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:before,
|
||||||
|
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:after,
|
||||||
|
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:before {
|
||||||
|
display: block
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-actionsbox,
|
||||||
|
.bs-donebutton,
|
||||||
|
.bs-searchbox {
|
||||||
|
padding: 4px 8px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-actionsbox {
|
||||||
|
width: 100%;
|
||||||
|
-webkit-box-sizing: border-box;
|
||||||
|
-moz-box-sizing: border-box;
|
||||||
|
box-sizing: border-box
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-actionsbox .btn-group button {
|
||||||
|
width: 50%
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-donebutton {
|
||||||
|
float: left;
|
||||||
|
width: 100%;
|
||||||
|
-webkit-box-sizing: border-box;
|
||||||
|
-moz-box-sizing: border-box;
|
||||||
|
box-sizing: border-box
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-donebutton .btn-group button {
|
||||||
|
width: 100%
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-searchbox+.bs-actionsbox {
|
||||||
|
padding: 0 8px 4px
|
||||||
|
}
|
||||||
|
|
||||||
|
.bs-searchbox .form-control {
|
||||||
|
margin-bottom: 0;
|
||||||
|
width: 100%;
|
||||||
|
float: none
|
||||||
|
}
|
@ -1,120 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<!-- Generator: Adobe Illustrator 26.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
|
||||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
|
||||||
viewBox="0 0 683.6 143.8" style="enable-background:new 0 0 683.6 143.8;" xml:space="preserve">
|
|
||||||
<style type="text/css">
|
|
||||||
.st0{opacity:0.85;fill:#FFFFFF;enable-background:new ;}
|
|
||||||
.st1{opacity:0.85;}
|
|
||||||
.st2{fill:#FFFFFF;}
|
|
||||||
.st3{fill:none;}
|
|
||||||
.st4{fill:url(#SVGID_1_);}
|
|
||||||
.st5{fill:url(#SVGID_00000137122815686618769650000009047437546445953421_);}
|
|
||||||
.st6{fill:url(#SVGID_00000170963539203169094570000007184871682409824703_);}
|
|
||||||
.st7{fill:url(#SVGID_00000169549353698428389090000007910489870824235905_);}
|
|
||||||
.st8{fill-rule:evenodd;clip-rule:evenodd;fill:url(#SVGID_00000029754379306852418700000008865188217784465572_);}
|
|
||||||
</style>
|
|
||||||
<path class="st0" d="M175.8,111.5h17.6v3.8h-13.2v8.9h12.1v3.7h-12.1v11.8h-4.4V111.5z"/>
|
|
||||||
<path class="st0" d="M196.3,119.1h4.2v3.5h0.1c0.4-2.3,2.4-3.9,4.7-3.9c0.5,0,1,0.1,1.5,0.2v3.9c-0.6-0.2-1.3-0.3-1.9-0.2
|
|
||||||
c-2.7,0-4.4,1.8-4.4,4.8v12.3h-4.2L196.3,119.1z"/>
|
|
||||||
<path class="st0" d="M207.2,129.4L207.2,129.4c0-6.6,3.9-10.6,9.7-10.6s9.8,4,9.8,10.6l0,0c0,6.6-3.9,10.7-9.8,10.7
|
|
||||||
S207.2,136,207.2,129.4z M222.4,129.4L222.4,129.4c0-4.5-2.2-7.1-5.5-7.1s-5.4,2.6-5.4,7.1l0,0c0,4.5,2.2,7.2,5.5,7.2
|
|
||||||
S222.4,133.9,222.4,129.4L222.4,129.4z"/>
|
|
||||||
<path class="st0" d="M229.6,119.1h4.2v3.2h0.1c1-2.3,3.2-3.7,5.7-3.6c2.6-0.2,5,1.5,5.7,4h0.1c1.1-2.5,3.6-4.1,6.4-4
|
|
||||||
c4.1,0,6.7,2.7,6.7,6.8v14.1h-4.2v-13.1c0-2.7-1.4-4.2-3.9-4.2c-2.3,0-4.2,1.8-4.3,4.2c0,0.1,0,0.2,0,0.3v12.9H242v-13.4
|
|
||||||
c0.2-2-1.3-3.8-3.3-3.9c-0.2,0-0.4,0-0.5,0c-2.4,0-4.3,2-4.3,4.3c0,0.1,0,0.2,0,0.3v12.7h-4.2L229.6,119.1z"/>
|
|
||||||
<g id="Layer_2_00000138553854520646606810000012156271018779627156_" class="st1">
|
|
||||||
<g id="Layer_1-2">
|
|
||||||
<path class="st2" d="M343.7,139.9c-6.9,0-12.5-5.6-12.5-12.5s5.6-12.5,12.5-12.5c2.1,0,4.2,0.5,6,1.5c1.8,1,3.3,2.4,4.3,4.1
|
|
||||||
l-4.1,2.4c-0.6-1.1-1.5-1.9-2.5-2.5c-3.1-1.6-6.8-1.1-9.4,1.3c-1.5,1.5-2.2,3.6-2.1,5.7c-0.1,2.1,0.7,4.1,2.1,5.7
|
|
||||||
c1.5,1.5,3.5,2.3,5.7,2.2c1.3,0,2.6-0.3,3.7-0.9c1.1-0.6,2-1.4,2.5-2.5l4.1,2.4c-1,1.7-2.5,3.2-4.3,4.1
|
|
||||||
C347.8,139.4,345.8,139.9,343.7,139.9z"/>
|
|
||||||
<path class="st2" d="M361.4,122.3v3c0.3-1,1.1-1.9,2-2.5c1-0.6,2.1-0.9,3.2-0.8v4.9c-1.3-0.2-2.6,0.1-3.6,0.8
|
|
||||||
c-1.1,0.8-1.7,2.2-1.6,3.5v8.2H357v-17.2H361.4z"/>
|
|
||||||
<path class="st2" d="M381.6,124.3v-2h4.4v17.2h-4.4v-2c-1.4,1.7-3.4,2.6-5.6,2.5c-2.2,0-4.4-0.9-5.9-2.6c-1.6-1.8-2.5-4.1-2.4-6.5
|
|
||||||
c-0.1-2.4,0.8-4.7,2.4-6.4c1.5-1.7,3.6-2.7,5.9-2.7C378.1,121.7,380.2,122.6,381.6,124.3z M373.4,134.3c1.9,1.8,4.9,1.8,6.8,0
|
|
||||||
c0.9-0.9,1.4-2.2,1.4-3.5c0.1-1.3-0.4-2.6-1.4-3.5c-1.9-1.8-4.9-1.8-6.8,0c-0.9,0.9-1.4,2.2-1.3,3.5
|
|
||||||
C372,132.1,372.5,133.4,373.4,134.3z"/>
|
|
||||||
<path class="st2" d="M399.2,115v4.2c-2.4-0.2-3.6,0.8-3.7,2.9v0.2h3.6v4.3h-3.6v12.9h-4.4v-12.9h-2.5v-4.2h2.5v-0.2
|
|
||||||
c-0.2-2,0.6-4.1,2-5.5C394.5,115.3,396.6,114.8,399.2,115z"/>
|
|
||||||
<path class="st2" d="M411.6,122.3v4.2h-3.9v7.1c0,0.5,0.1,1,0.5,1.3c0.4,0.3,0.8,0.5,1.3,0.5c0.7,0,1.4,0,2.1,0v4
|
|
||||||
c-3,0.3-5.1,0.1-6.4-0.8s-1.9-2.5-1.9-4.9v-7.1h-3v-4.2h3v-3.5l4.4-1.3v4.8L411.6,122.3z"/>
|
|
||||||
<path class="st2" d="M427.2,124.3v-2h4.4v17.2h-4.4v-2c-1.4,1.7-3.4,2.6-5.6,2.5c-2.2,0-4.4-0.9-5.9-2.6c-1.6-1.8-2.5-4.1-2.4-6.5
|
|
||||||
c-0.1-2.4,0.8-4.7,2.4-6.4c1.5-1.7,3.6-2.7,5.9-2.7C423.8,121.7,425.9,122.6,427.2,124.3z M419.1,134.3c1.9,1.8,4.9,1.8,6.8,0
|
|
||||||
c0.9-0.9,1.4-2.2,1.4-3.5c0-1.3-0.4-2.5-1.4-3.5c-1.9-1.8-4.9-1.8-6.8,0c-0.9,0.9-1.4,2.2-1.3,3.5
|
|
||||||
C417.7,132.1,418.2,133.4,419.1,134.3L419.1,134.3z"/>
|
|
||||||
<path class="st2" d="M440.1,122.3v3c0.4-1,1.1-1.9,2-2.5c1-0.6,2.1-0.9,3.2-0.8v4.9c-1.3-0.2-2.6,0.1-3.6,0.8
|
|
||||||
c-1.1,0.8-1.7,2.2-1.6,3.5v8.2h-4.4v-17.2H440.1z"/>
|
|
||||||
<path class="st2" d="M461.9,137.3c-3.6,3.6-9.3,3.6-12.9,0s-3.6-9.3,0-12.9l0,0c3.6-3.5,9.3-3.5,12.9,0.1c1.7,1.7,2.6,4,2.6,6.4
|
|
||||||
C464.5,133.3,463.6,135.6,461.9,137.3z M452.1,134.3c1.9,1.8,4.8,1.8,6.7,0c1.8-1.9,1.8-4.9,0-6.8c-1.9-1.8-4.8-1.8-6.7,0
|
|
||||||
C450.3,129.4,450.3,132.3,452.1,134.3L452.1,134.3z"/>
|
|
||||||
<path class="st2" d="M320,137.6l-2.9-20.3c-0.4-2.7-2.7-4.7-5.5-4.7h-9c-0.3,0-0.5,0.2-0.7,0.4l-0.9,2H292l-0.9-2
|
|
||||||
c-0.1-0.3-0.4-0.4-0.7-0.4h-9c-2.7,0-5.1,2-5.5,4.7l-2.9,20.3c-0.4,3,1.7,5.8,4.7,6.2c0,0,0,0,0,0l0,0c0.3,0,0.5,0.1,0.8,0.1h36
|
|
||||||
c3,0,5.5-2.5,5.5-5.5l0,0C320,138.1,320,137.8,320,137.6z M287.1,130c-2.7,0-4.9-2.2-4.9-4.9c0-2.7,2.2-4.9,4.9-4.9
|
|
||||||
c2.7,0,4.9,2.2,4.9,4.9c0,0,0,0,0,0l0,0C292,127.8,289.8,130,287.1,130z M296.5,138c-2.7,0-4.9-2.2-4.9-4.9h9.8
|
|
||||||
C301.4,135.8,299.3,138,296.5,138L296.5,138L296.5,138z M305.9,130c-2.7,0-4.9-2.2-4.9-4.9c0-2.7,2.2-4.9,4.9-4.9
|
|
||||||
c2.7,0,4.9,2.2,4.9,4.9c0,0,0,0,0,0l0,0C310.8,127.8,308.6,130,305.9,130L305.9,130z"/>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
<path class="st2" d="M133.1,19.2H9.7c-1.8,0-3.2-1.4-3.2-3.2V3.2C6.5,1.5,7.9,0,9.7,0h123.4c1.8,0,3.2,1.4,3.2,3.2V16
|
|
||||||
C136.3,17.8,134.9,19.2,133.1,19.2"/>
|
|
||||||
<path class="st2" d="M23.6,36.7c-3.4,0-6.7,1.6-8.8,4.3c-2.9,3.6-4.1,8.3-3.2,12.8l9.2,51.9c1.2,6.6,6.2,11.4,12.1,11.4H110
|
|
||||||
c5.8,0,10.9-4.8,12.1-11.4l9.2-51.9c0.8-4.5-0.4-9.2-3.3-12.8c-2.1-2.7-5.4-4.3-8.8-4.3H23.6z M110,128.3H32.8
|
|
||||||
c-11.3,0-21-8.7-23.1-20.7L0.5,55.8c-1.5-7.8,0.6-15.9,5.7-22c4.3-5.2,10.7-8.3,17.4-8.3h95.6c6.8,0.1,13.1,3.1,17.4,8.3
|
|
||||||
c5.1,6.1,7.2,14.2,5.7,22l-9.2,51.9C130.9,119.7,121.2,128.4,110,128.3"/>
|
|
||||||
<path class="st2" d="M120.8,23.8v-2.2c2,0,3.5-1.6,3.5-3.6c0-1.8-1.5-3.4-3.3-3.5H21.6c-2,0.1-3.5,1.8-3.4,3.7
|
|
||||||
c0.1,1.8,1.5,3.3,3.4,3.4v2.2c-3.2-0.1-5.7-2.8-5.6-6c0.1-3,2.5-5.4,5.6-5.6h99.2c3.2-0.1,5.9,2.4,6,5.6s-2.4,5.9-5.6,6
|
|
||||||
C121.1,23.8,121,23.8,120.8,23.8"/>
|
|
||||||
<path class="st2" d="M120.8,33.1H21.6c-3.2,0-5.8-2.6-5.8-5.8c0-3.2,2.6-5.8,5.8-5.8v2.2c-2,0.1-3.5,1.8-3.4,3.7
|
|
||||||
c0.1,1.8,1.5,3.3,3.4,3.4h99.2c2,0.1,3.7-1.3,3.8-3.3c0.1-2-1.3-3.7-3.3-3.8c-0.1,0-0.2,0-0.3,0h-0.2v-2.2c3.2-0.1,5.9,2.4,6,5.6
|
|
||||||
s-2.4,5.9-5.6,6C121.1,33.1,121,33.1,120.8,33.1"/>
|
|
||||||
<path class="st2" d="M21.6,21.5l36.1,1.1l-36.1,1.1V21.5z"/>
|
|
||||||
<path class="st2" d="M125.5,23.8l-45.1-1.1l45.1-1.1V23.8z"/>
|
|
||||||
<rect x="-2.5" y="-1.1" class="st3" width="571.3" height="131.4"/>
|
|
||||||
<path class="st2" d="M163.8,91.7l7.3-10.9c5.8,5.5,14.3,9.3,22.3,9.3c7.1,0,13.1-3.3,13.1-8.3c0-6-8.1-7.9-15.4-9.6
|
|
||||||
c-13.7-3.2-24.8-9.8-24.8-22.3c0-12.7,11.1-21,27.1-21c10.7,0,19.4,3.7,24.7,8.9l-6.6,10.8c-4-3.9-11.2-6.9-18.3-6.9
|
|
||||||
s-12.2,3.2-12.2,7.7c0,5.5,7.4,7.9,14.1,9.3s26.2,6.2,26.2,22.5c0,12.8-12.2,21.6-27.8,21.6C182.6,102.8,171.1,98.4,163.8,91.7z"/>
|
|
||||||
<path class="st2" d="M281.7,80.1h-40.9c1.9,6.6,7.5,10.9,15.1,10.9c5.6,0.1,10.9-2.3,14.5-6.5l9,7.9c-5.5,6.5-14,10.5-23.9,10.5
|
|
||||||
c-16.8,0-29.3-12-29.3-27.8c0-15.6,12.1-27.4,28-27.4S282,59.4,282,75.3C282,76.9,281.9,78.5,281.7,80.1z M240.8,70.3h26.9
|
|
||||||
c-1.7-6.6-6.9-10.9-13.4-10.9C247.7,59.4,242.5,63.8,240.8,70.3L240.8,70.3z"/>
|
|
||||||
<path class="st2" d="M321.3,48v13.9h-2.3c-9.6,0-15.2,5.7-15.2,14.7v25h-13.4V48.9h13.5v6.8c3.6-4.8,9.2-7.7,15.2-7.7L321.3,48z"/>
|
|
||||||
<path class="st2" d="M381.9,48.9L360,101.6h-13.9l-21.9-52.8h15.3l13.8,35.9L367,48.9H381.9z"/>
|
|
||||||
<path class="st2" d="M437.1,80.1h-40.9c1.9,6.6,7.5,10.9,15.1,10.9c5.6,0.1,10.9-2.3,14.5-6.5l9,7.9c-5.5,6.5-14,10.5-23.9,10.5
|
|
||||||
c-16.8,0-29.3-12-29.3-27.8c0-15.6,12.1-27.4,28-27.4s27.7,11.8,27.7,27.7C437.4,76.9,437.3,78.5,437.1,80.1z M396.1,70.3H423
|
|
||||||
c-1.7-6.6-6.9-10.9-13.4-10.9S397.7,63.8,396.1,70.3L396.1,70.3z"/>
|
|
||||||
<path class="st2" d="M476.7,48v13.9h-2.2c-9.6,0-15.2,5.7-15.2,14.7v25h-13.5V48.9h13.5v6.8c3.6-4.8,9.2-7.7,15.2-7.7L476.7,48z"/>
|
|
||||||
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="870.0443" y1="434.2369" x2="907.1767" y2="465.2789" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
|
|
||||||
<stop offset="0" style="stop-color:#FEAF6F"/>
|
|
||||||
<stop offset="1" style="stop-color:#FD5E83"/>
|
|
||||||
</linearGradient>
|
|
||||||
<path class="st4" d="M492.5,100.6V87c3.2,1.4,6.6,2.1,10,2.2c7.3,0,11.8-3.9,11.8-10.9v-48h14.3V79c0,15-9.8,23.9-24.5,23.9
|
|
||||||
C500,102.9,496.1,102.1,492.5,100.6z"/>
|
|
||||||
<linearGradient id="SVGID_00000162328622213414588160000008200821717462734513_" gradientUnits="userSpaceOnUse" x1="920.7661" y1="434.5518" x2="972.3098" y2="477.6348" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
|
|
||||||
<stop offset="0" style="stop-color:#FEAF6F"/>
|
|
||||||
<stop offset="1" style="stop-color:#FD5E83"/>
|
|
||||||
</linearGradient>
|
|
||||||
<path style="fill:url(#SVGID_00000162328622213414588160000008200821717462734513_);" d="M593.2,48.9v52.8h-13.5v-6.3
|
|
||||||
c-4.4,4.9-10.6,7.6-17.2,7.5c-14.7,0-25.8-11.9-25.8-27.6s11.1-27.6,25.8-27.6c6.5-0.1,12.8,2.7,17.2,7.5v-6.3L593.2,48.9z
|
|
||||||
M579.8,75.2c0-8-6.6-14.5-14.6-14.5c-8,0-14.5,6.6-14.5,14.6c0,8,6.5,14.4,14.5,14.5c7.9,0.2,14.4-6,14.6-13.9
|
|
||||||
C579.8,75.7,579.8,75.5,579.8,75.2z"/>
|
|
||||||
<linearGradient id="SVGID_00000026849485640012965730000014957007722205225107_" gradientUnits="userSpaceOnUse" x1="973.2171" y1="437.9167" x2="1007.0711" y2="466.2133" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
|
|
||||||
<stop offset="0" style="stop-color:#FEAF6F"/>
|
|
||||||
<stop offset="1" style="stop-color:#FD5E83"/>
|
|
||||||
</linearGradient>
|
|
||||||
<path style="fill:url(#SVGID_00000026849485640012965730000014957007722205225107_);" d="M635.9,48v13.9h-2.3
|
|
||||||
c-9.6,0-15.2,5.7-15.2,14.7v25H605V48.9h13.4v6.8c3.6-4.8,9.2-7.7,15.2-7.7L635.9,48z"/>
|
|
||||||
<linearGradient id="SVGID_00000011000279650532451330000005619277557075874698_" gradientUnits="userSpaceOnUse" x1="1015.3561" y1="439.477" x2="1056.9301" y2="474.2302" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
|
|
||||||
<stop offset="0" style="stop-color:#FEAF6F"/>
|
|
||||||
<stop offset="1" style="stop-color:#FD5E83"/>
|
|
||||||
</linearGradient>
|
|
||||||
<path style="fill:url(#SVGID_00000011000279650532451330000005619277557075874698_);" d="M638.7,94.8l6.5-8.9
|
|
||||||
c4.2,3.8,9.7,5.9,15.4,5.9c5.4,0,9.3-1.8,9.3-5c0-3.5-4.6-4.8-10.3-6.1c-8.4-1.9-19.2-4.5-19.2-16.5c0-11.2,9.8-16.7,21.5-16.7
|
|
||||||
c7.4-0.1,14.6,2.3,20.5,6.9l-6.5,9c-3.9-3.1-8.7-4.8-13.7-4.9c-4.6,0-8.3,1.5-8.3,4.5c0,3.5,4.4,4.7,10.3,5.9
|
|
||||||
c8.4,1.9,19.2,4.5,19.2,16.4c0,11.2-9.9,17.3-22.6,17.3C652.9,102.9,644.9,100.1,638.7,94.8z"/>
|
|
||||||
<linearGradient id="SVGID_00000176732902084481618460000012775063734620060048_" gradientUnits="userSpaceOnUse" x1="408.7259" y1="431.5905" x2="485.4144" y2="495.6844" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
|
|
||||||
<stop offset="0" style="stop-color:#FEAF6F"/>
|
|
||||||
<stop offset="1" style="stop-color:#FD5E83"/>
|
|
||||||
</linearGradient>
|
|
||||||
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:url(#SVGID_00000176732902084481618460000012775063734620060048_);" d="
|
|
||||||
M124.5,62c-12.7,0.9-27,5.5-35.7,12.3c-38.7,30.3-69.2-6.6-69.3-6.6l6.8,36.8c0.8,4.3,4.6,7.5,9,7.5l73,0.2c4.5,0,8.3-3.2,9.1-7.6
|
|
||||||
L124.5,62z"/>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 10 KiB |
@ -1 +0,0 @@
|
|||||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 142.71 128.36"><defs><style>.cls-1{fill:#fff;}.cls-2{fill-rule:evenodd;fill:url(#linear-gradient);}</style><linearGradient id="linear-gradient" x1="408.73" y1="431.59" x2="485.41" y2="495.68" gradientTransform="translate(-374.6 -381.38)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#feaf6f"/><stop offset="1" stop-color="#fd5e83"/></linearGradient></defs><path class="cls-1" d="M133.09,19.17H9.67A3.24,3.24,0,0,1,6.46,16V3.24A3.24,3.24,0,0,1,9.7,0H133.09a3.25,3.25,0,0,1,3.25,3.24V16a3.25,3.25,0,0,1-3.25,3.24"/><path class="cls-1" d="M23.61,36.67A11.41,11.41,0,0,0,14.8,41a15.79,15.79,0,0,0-3.25,12.8l9.18,51.92c1.17,6.62,6.25,11.42,12.06,11.42H110c5.82,0,10.89-4.8,12.06-11.42l9.18-51.91A15.86,15.86,0,0,0,128,41a11.5,11.5,0,0,0-8.82-4.33ZM110,128.35H32.8c-11.27,0-21-8.7-23.12-20.69L.46,55.75a26.72,26.72,0,0,1,5.71-22,22.77,22.77,0,0,1,17.41-8.34h95.56a22.8,22.8,0,0,1,17.41,8.34,26.79,26.79,0,0,1,5.71,22l-9.19,51.91c-2.12,12-11.84,20.7-23.12,20.7"/><path class="cls-1" d="M120.8,23.76V21.51A3.56,3.56,0,0,0,121,14.4H21.59a3.56,3.56,0,0,0,0,7.11v2.25a5.81,5.81,0,0,1,0-11.61H120.8a5.81,5.81,0,0,1,.48,11.61h-.48"/><path class="cls-1" d="M120.8,33.11H21.59a5.8,5.8,0,0,1,0-11.6v2.24a3.56,3.56,0,0,0,0,7.11H120.8a3.56,3.56,0,0,0,.52-7.1h-.52V21.51a5.81,5.81,0,0,1,.48,11.61,3.84,3.84,0,0,1-.48,0"/><path class="cls-1" d="M21.59,21.51l36.13,1.13L21.59,23.76Z"/><path class="cls-1" d="M125.46,23.76,80.35,22.64l45.11-1.13Z"/><path class="cls-2" d="M124.46,62c-12.72.93-27,5.55-35.7,12.34-38.69,30.34-69.25-6.6-69.28-6.58l6.75,36.83a9.16,9.16,0,0,0,9,7.52l73,.16a9.17,9.17,0,0,0,9.06-7.64Z"/></svg>
|
|
Before Width: | Height: | Size: 1.7 KiB |
9
app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
vendored
Normal file
9
app/frontend/static/assets/js/shared/bootstrap-select-1.13.18.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -41,7 +41,7 @@ async function getTreeView(path, unzip = false, upload = false) {
|
|||||||
let responseData = await res.json();
|
let responseData = await res.json();
|
||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
console.log(responseData);
|
console.log(responseData);
|
||||||
process_tree_response(responseData);
|
process_tree_response(responseData, unzip);
|
||||||
let x = document.querySelector('.bootbox');
|
let x = document.querySelector('.bootbox');
|
||||||
if (x) {
|
if (x) {
|
||||||
x.remove()
|
x.remove()
|
||||||
@ -61,7 +61,7 @@ async function getTreeView(path, unzip = false, upload = false) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function process_tree_response(response) {
|
function process_tree_response(response, unzip) {
|
||||||
const styles = window.getComputedStyle(document.getElementById("lower_half"));
|
const styles = window.getComputedStyle(document.getElementById("lower_half"));
|
||||||
//If this value is still hidden we know the user is executing a zip import and not an upload
|
//If this value is still hidden we know the user is executing a zip import and not an upload
|
||||||
if (styles.visibility === "hidden") {
|
if (styles.visibility === "hidden") {
|
||||||
@ -70,7 +70,9 @@ function process_tree_response(response) {
|
|||||||
document.getElementById('upload_submit').disabled = false;
|
document.getElementById('upload_submit').disabled = false;
|
||||||
}
|
}
|
||||||
let path = response.data.root_path.path;
|
let path = response.data.root_path.path;
|
||||||
|
if (unzip) {
|
||||||
$(".root-input").val(response.data.root_path.path);
|
$(".root-input").val(response.data.root_path.path);
|
||||||
|
}
|
||||||
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
|
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
|
||||||
Object.entries(response.data).forEach(([key, value]) => {
|
Object.entries(response.data).forEach(([key, value]) => {
|
||||||
if (key === "root_path" || key === "db_stats") {
|
if (key === "root_path" || key === "db_stats") {
|
||||||
@ -83,7 +85,7 @@ function process_tree_response(response) {
|
|||||||
if (value.dir) {
|
if (value.dir) {
|
||||||
text += `<li class="tree-item" id="${dpath}li" data-path="${dpath}">
|
text += `<li class="tree-item" id="${dpath}li" data-path="${dpath}">
|
||||||
<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
|
<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="radio" name="root_path" value="${dpath}">
|
<input type="radio" class="root-input" name="root_path" value="${dpath}">
|
||||||
<span id="${dpath}span" class="files-tree-title" data-path="${dpath}" data-name="${filename}" onclick="getDirView(event)">
|
<span id="${dpath}span" class="files-tree-title" data-path="${dpath}" data-name="${filename}" onclick="getDirView(event)">
|
||||||
<i style="color: var(--info);" class="far fa-folder"></i>
|
<i style="color: var(--info);" class="far fa-folder"></i>
|
||||||
<i style="color: var(--info);" class="far fa-folder-open"></i>
|
<i style="color: var(--info);" class="far fa-folder-open"></i>
|
||||||
|
208
app/frontend/static/assets/js/shared/upload.js
Normal file
208
app/frontend/static/assets/js/shared/upload.js
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
|
||||||
|
if (file == null) {
|
||||||
|
try {
|
||||||
|
file = $("#file")[0].files[0];
|
||||||
|
} catch {
|
||||||
|
bootbox.alert("Please select a file first.")
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
const fileId = uuidv4();
|
||||||
|
const token = getCookie("_xsrf");
|
||||||
|
if (type !== "server_upload") {
|
||||||
|
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"> <i class="fa-solid fa-spinner"></i></div></div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = ``
|
||||||
|
if (type === "server_upload") {
|
||||||
|
url = `/api/v2/servers/${serverId}/files/upload/`;
|
||||||
|
} else if (type === "background") {
|
||||||
|
url = `/api/v2/crafty/admin/upload/`
|
||||||
|
} else if (type === "import") {
|
||||||
|
url = `/api/v2/servers/import/upload/`
|
||||||
|
}
|
||||||
|
console.log(url)
|
||||||
|
const chunkSize = 1024 * 1024 * 10; // 10MB
|
||||||
|
const totalChunks = Math.ceil(file.size / chunkSize);
|
||||||
|
|
||||||
|
const uploadPromises = [];
|
||||||
|
let errors = []; // Array to store errors
|
||||||
|
try {
|
||||||
|
let res = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-XSRFToken': token,
|
||||||
|
'chunked': true,
|
||||||
|
'fileSize': file.size,
|
||||||
|
'type': type,
|
||||||
|
'totalChunks': totalChunks,
|
||||||
|
'fileName': file.name,
|
||||||
|
'location': path,
|
||||||
|
'fileId': fileId,
|
||||||
|
},
|
||||||
|
body: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
let errorResponse = await res.json();
|
||||||
|
throw new Error(JSON.stringify(errorResponse));
|
||||||
|
}
|
||||||
|
|
||||||
|
let responseData = await res.json();
|
||||||
|
|
||||||
|
if (responseData.status !== "ok") {
|
||||||
|
throw new Error(JSON.stringify(responseData));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < totalChunks; i++) {
|
||||||
|
const start = i * chunkSize;
|
||||||
|
const end = Math.min(start + chunkSize, file.size);
|
||||||
|
const chunk = file.slice(start, end);
|
||||||
|
const chunk_hash = await calculateFileHash(chunk);
|
||||||
|
|
||||||
|
const uploadPromise = fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
body: chunk,
|
||||||
|
headers: {
|
||||||
|
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
|
||||||
|
'Content-Length': chunk.size,
|
||||||
|
'fileSize': file.size,
|
||||||
|
'chunkHash': chunk_hash,
|
||||||
|
'chunked': true,
|
||||||
|
'type': type,
|
||||||
|
'totalChunks': totalChunks,
|
||||||
|
'fileName': file.name,
|
||||||
|
'location': path,
|
||||||
|
'fileId': fileId,
|
||||||
|
'chunkId': i,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(async response => {
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
throw new Error(JSON.stringify(errorData) || 'Unknown error occurred');
|
||||||
|
}
|
||||||
|
return response.json(); // Return the JSON data
|
||||||
|
})
|
||||||
|
.then(data => {
|
||||||
|
if (data.status !== "completed" && data.status !== "partial") {
|
||||||
|
throw new Error(data.message || 'Unknown error occurred');
|
||||||
|
}
|
||||||
|
// Update progress bar
|
||||||
|
const progress = (i + 1) / totalChunks * 100;
|
||||||
|
updateProgressBar(Math.round(progress), type, file_num);
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
errors.push(error); // Store the error
|
||||||
|
});
|
||||||
|
|
||||||
|
uploadPromises.push(uploadPromise);
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(uploadPromises);
|
||||||
|
} catch (error) {
|
||||||
|
errors.push(error); // Store the error
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>');
|
||||||
|
console.log(errorMessage)
|
||||||
|
bootbox.alert({
|
||||||
|
title: 'Error',
|
||||||
|
message: errorMessage,
|
||||||
|
callback: function () {
|
||||||
|
window.location.reload();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else if (type !== "server_upload") {
|
||||||
|
// All promises resolved successfully
|
||||||
|
$("#upload_input").html(`<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%;"><input value="${file.name}" type="text" id="file-uploaded" disabled></input> 🔒</div>`);
|
||||||
|
if (type === "import") {
|
||||||
|
document.getElementById("lower_half").style.visibility = "visible";
|
||||||
|
document.getElementById("lower_half").hidden = false;
|
||||||
|
} else if (type === "background") {
|
||||||
|
setTimeout(function () {
|
||||||
|
location.href = `/panel/custom_login`;
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let caught = false;
|
||||||
|
let expanded = false;
|
||||||
|
try {
|
||||||
|
expanded = document.getElementById(path).classList.contains("clicked");
|
||||||
|
} catch { }
|
||||||
|
|
||||||
|
let par_el;
|
||||||
|
let items;
|
||||||
|
try {
|
||||||
|
par_el = document.getElementById(path + "ul");
|
||||||
|
items = par_el.children;
|
||||||
|
} catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
caught = true;
|
||||||
|
par_el = document.getElementById("files-tree");
|
||||||
|
items = par_el.children;
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = file.name;
|
||||||
|
let full_path = path + '/' + name;
|
||||||
|
let flag = false;
|
||||||
|
|
||||||
|
for (let item of items) {
|
||||||
|
if ($(item).attr("data-name") === name) {
|
||||||
|
flag = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!flag) {
|
||||||
|
if (caught && !expanded) {
|
||||||
|
$(par_el).append(`<li id="${full_path}li" class="d-block tree-ctx-item tree-file tree-item" data-path="${full_path}" data-name="${name}" onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>${name}</li>`);
|
||||||
|
} else if (expanded) {
|
||||||
|
$(par_el).append(`<li id="${full_path}li" class="tree-ctx-item tree-file tree-item" data-path="${full_path}" data-name="${name}" onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>${name}</li>`);
|
||||||
|
}
|
||||||
|
setTreeViewContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
$(`#upload-progress-bar-${file_num + 1}`).removeClass("progress-bar-striped");
|
||||||
|
$(`#upload-progress-bar-${file_num + 1}`).addClass("bg-success");
|
||||||
|
$(`#upload-progress-bar-${file_num + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function calculateFileHash(file) {
|
||||||
|
const arrayBuffer = await file.arrayBuffer();
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
|
||||||
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||||
|
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||||||
|
|
||||||
|
return hashHex;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateProgressBar(progress, type, i) {
|
||||||
|
if (type !== "server_upload") {
|
||||||
|
if (progress === 100) {
|
||||||
|
$(`#upload-progress-bar`).removeClass("progress-bar-striped")
|
||||||
|
|
||||||
|
$(`#upload-progress-bar`).removeClass("progress-bar-animated")
|
||||||
|
}
|
||||||
|
$(`#upload-progress-bar`).css('width', progress + '%');
|
||||||
|
$(`#upload-progress-bar`).html(progress + '%');
|
||||||
|
} else {
|
||||||
|
if (progress === 100) {
|
||||||
|
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped")
|
||||||
|
|
||||||
|
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-animated")
|
||||||
|
}
|
||||||
|
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
|
||||||
|
$(`#upload-progress-bar-${i + 1}`).html(progress + '%');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function uuidv4() {
|
||||||
|
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
|
||||||
|
const r = Math.random() * 16 | 0,
|
||||||
|
v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||||
|
return v.toString(16);
|
||||||
|
});
|
||||||
|
}
|
@ -63,9 +63,6 @@
|
|||||||
<nav class="sidebar sidebar-offcanvas" id="sidebar">
|
<nav class="sidebar sidebar-offcanvas" id="sidebar">
|
||||||
<ul class="nav">
|
<ul class="nav">
|
||||||
|
|
||||||
<li class="nav-item nav-category" style="margin-top:10px;">{{ translate('sidebar', 'navigation', data['lang']) }}
|
|
||||||
</li>
|
|
||||||
|
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link" href="/panel/dashboard">
|
<a class="nav-link" href="/panel/dashboard">
|
||||||
<i class="fa-solid fa-diagram-project"></i>
|
<i class="fa-solid fa-diagram-project"></i>
|
||||||
|
@ -36,25 +36,21 @@
|
|||||||
<table class="table table-hover" id="audit_table" style="overflow: scroll;" width="100%">
|
<table class="table table-hover" id="audit_table" style="overflow: scroll;" width="100%">
|
||||||
<thead>
|
<thead>
|
||||||
<tr class="rounded">
|
<tr class="rounded">
|
||||||
<td>Username</td>
|
<th>Time</th>
|
||||||
<td>Time</td>
|
<th>Username</th>
|
||||||
<td>Action</td>
|
<th>Action</th>
|
||||||
<td>Server ID</td>
|
<th>Server ID</th>
|
||||||
<td>IP</td>
|
<th>IP</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{% for row in data['audit_logs'] %}
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ row['user_name'] }}</td>
|
<td colspan="5" id="image-div" class="text-center"> <!-- Center image within table -->
|
||||||
<td>
|
<img class="img-center" id="logo-animate" src="../static/assets/images/crafty-logo-square-1024.png"
|
||||||
{{ row['created'].strftime('%Y-%m-%d %H:%M:%S') }}
|
alt="Crafty Logo, Crafty is loading" width="20%"><br><br>{{ translate('datatables',
|
||||||
|
'loadingRecords', data['lang'])}}
|
||||||
</td>
|
</td>
|
||||||
<td>{{ row['log_msg'] }}</td>
|
|
||||||
<td>{{ row['server_id'] }}</td>
|
|
||||||
<td>{{ row['source_ip'] }}</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
{% end %}
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
@ -79,17 +75,6 @@
|
|||||||
{% end %}
|
{% end %}
|
||||||
|
|
||||||
{% block js %}
|
{% block js %}
|
||||||
<script>
|
|
||||||
|
|
||||||
$(document).ready(function () {
|
|
||||||
console.log('ready for JS!')
|
|
||||||
$('#audit_table').DataTable({
|
|
||||||
'order': [1, 'desc']
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
<script>
|
<script>
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
$('[data-toggle="popover"]').popover();
|
$('[data-toggle="popover"]').popover();
|
||||||
@ -112,6 +97,74 @@
|
|||||||
$('.too_small').popover("hide");
|
$('.too_small').popover("hide");
|
||||||
} // New width
|
} // New width
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$(document).ready(function () {
|
||||||
|
console.log('ready for JS!')
|
||||||
|
// Initialize DataTables
|
||||||
|
// Load initial data
|
||||||
|
getActivity();
|
||||||
|
});
|
||||||
|
|
||||||
|
function updateActivity(data) {
|
||||||
|
let tbody = $('#audit_table tbody');
|
||||||
|
tbody.empty(); // Clear existing rows
|
||||||
|
$.each(data, function (index, value) {
|
||||||
|
let row = $('<tr>');
|
||||||
|
row.append(`<td>${value.time}</td>`);
|
||||||
|
if (value.user_name != "system" && value.user_id != "-1") {
|
||||||
|
row.append(`<td><a href="/panel/edit_user?id=${value.user_id}">${value.user_name}</a></td>`);
|
||||||
|
} else {
|
||||||
|
row.append(`<td>${value.user_name}</td>`);
|
||||||
|
}
|
||||||
|
row.append(`<td>${value.log_msg}</td>`);
|
||||||
|
row.append(`<td>${value.server_id}</td>`);
|
||||||
|
row.append(`<td>${value.source_ip}</td>`);
|
||||||
|
tbody.append(row);
|
||||||
|
});
|
||||||
|
$('#audit_table').DataTable({
|
||||||
|
'order': [[0, 'desc']], // Sort by the first column in descending order
|
||||||
|
filter: true,
|
||||||
|
"searching": true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getActivity() {
|
||||||
|
var token = getCookie("_xsrf");
|
||||||
|
let res = await fetch(`/api/v2/crafty/logs/audit`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'X-XSRFToken': token
|
||||||
|
},
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
console.log(responseData);
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
updateActivity(responseData.data);
|
||||||
|
console.log("activity update")
|
||||||
|
} else {
|
||||||
|
bootbox.alert(responseData.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function rotateImage(degree) {
|
||||||
|
$('#logo-animate').animate({ transform: degree }, {
|
||||||
|
step: function (now, fx) {
|
||||||
|
$(this).css({
|
||||||
|
'-webkit-transform': 'rotate(' + now + 'deg)',
|
||||||
|
'-moz-transform': 'rotate(' + now + 'deg)',
|
||||||
|
'transform': 'rotate(' + now + 'deg)'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setTimeout(function () {
|
||||||
|
rotateImage(360);
|
||||||
|
}, 2000);
|
||||||
|
}
|
||||||
|
$(document).ready(function () {
|
||||||
|
setTimeout(function () {
|
||||||
|
rotateImage(360);
|
||||||
|
}, 2000);
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{% end %}
|
{% end %}
|
@ -69,7 +69,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="input-group-append">
|
<div class="input-group-append">
|
||||||
<button type="button" class="btn btn-info upload-button" id="upload-button"
|
<button type="button" class="btn btn-info upload-button" id="upload-button"
|
||||||
onclick="sendFile()" disabled>UPLOAD</button>
|
onclick="uploadFile('background')" disabled>UPLOAD</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -381,61 +381,6 @@
|
|||||||
}
|
}
|
||||||
img.src = src_path;
|
img.src = src_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
var file;
|
|
||||||
function sendFile() {
|
|
||||||
file = $("#file")[0].files[0]
|
|
||||||
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%"><div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%"> <i class="fa-solid fa-spinner"></i></div></div>';
|
|
||||||
let xmlHttpRequest = new XMLHttpRequest();
|
|
||||||
let token = getCookie("_xsrf")
|
|
||||||
let fileName = file.name
|
|
||||||
let target = '/upload'
|
|
||||||
let mimeType = file.type
|
|
||||||
let size = file.size
|
|
||||||
let type = 'background'
|
|
||||||
|
|
||||||
xmlHttpRequest.upload.addEventListener('progress', function (e) {
|
|
||||||
|
|
||||||
if (e.loaded <= size) {
|
|
||||||
var percent = Math.round(e.loaded / size * 100);
|
|
||||||
$(`#upload-progress-bar`).css('width', percent + '%');
|
|
||||||
$(`#upload-progress-bar`).html(percent + '%');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
xmlHttpRequest.open('POST', target, true);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', type);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
|
|
||||||
xmlHttpRequest.addEventListener('load', (event) => {
|
|
||||||
if (event.target.responseText == 'success') {
|
|
||||||
console.log('Upload for file', file.name, 'was successful!')
|
|
||||||
document.getElementById("upload_input").innerHTML = '<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%"><span id="file-uploaded" style="color: gray;">' + fileName + '</span> 🔒</div>';
|
|
||||||
setTimeout(function () {
|
|
||||||
window.location.reload();
|
|
||||||
}, 2000);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
let response_text = JSON.parse(event.target.responseText);
|
|
||||||
var x = document.querySelector('.bootbox');
|
|
||||||
console.log(JSON.parse(event.target.responseText).info)
|
|
||||||
bootbox.alert({
|
|
||||||
message: JSON.parse(event.target.responseText).info,
|
|
||||||
callback: function () {
|
|
||||||
window.location.reload();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
doUpload = false;
|
|
||||||
}
|
|
||||||
}, false);
|
|
||||||
xmlHttpRequest.addEventListener('error', (e) => {
|
|
||||||
console.error('Error while uploading file', file.name + '.', 'Event:', e)
|
|
||||||
}, false);
|
|
||||||
xmlHttpRequest.send(file);
|
|
||||||
}
|
|
||||||
</script>
|
</script>
|
||||||
|
<script src="../../static/assets/js/shared/upload.js"></script>
|
||||||
{% end %}
|
{% end %}
|
@ -20,7 +20,8 @@
|
|||||||
data-internet="{{ translate('startup', 'internet', data['lang']) }}"
|
data-internet="{{ translate('startup', 'internet', data['lang']) }}"
|
||||||
data-tasks="{{ translate('startup', 'tasks', data['lang']) }}"
|
data-tasks="{{ translate('startup', 'tasks', data['lang']) }}"
|
||||||
data-internals="{{ translate('startup', 'internals', data['lang']) }}"
|
data-internals="{{ translate('startup', 'internals', data['lang']) }}"
|
||||||
data-almost="{{ translate('startup', 'almost', data['lang']) }}">
|
data-almost="{{ translate('startup', 'almost', data['lang']) }}"
|
||||||
|
data-cache="{{ translate('startup', 'cache', data['lang'])}}">
|
||||||
{{ translate('startup', 'starting', data['lang']) }}</h2>
|
{{ translate('startup', 'starting', data['lang']) }}</h2>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -428,10 +428,13 @@
|
|||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
window.location.href = "/panel/panel_config";
|
window.location.href = "/panel/panel_config";
|
||||||
} else {
|
} else {
|
||||||
|
let errordata = responseData.error;
|
||||||
|
if (responseData.error_data){
|
||||||
|
errordata = responseData.error
|
||||||
|
}
|
||||||
bootbox.alert({
|
bootbox.alert({
|
||||||
title: responseData.error,
|
title: responseData.error,
|
||||||
message: responseData.error_data
|
message: errordata
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -122,7 +122,7 @@ data['lang']) }}{% end %}
|
|||||||
name="lang" form="user_form">
|
name="lang" form="user_form">
|
||||||
{% for lang in data['languages'] %}
|
{% for lang in data['languages'] %}
|
||||||
{% if not 'incomplete' in lang %}
|
{% if not 'incomplete' in lang %}
|
||||||
<option value="{{lang}}">{{lang}}</option>
|
<option value="{{lang}}" >{{translate('language', lang, 'humanized_index')}}</option>
|
||||||
{% else %}
|
{% else %}
|
||||||
<option value="{{lang}}" disabled>{{lang}}</option>
|
<option value="{{lang}}" disabled>{{lang}}</option>
|
||||||
{% end %}
|
{% end %}
|
||||||
@ -393,6 +393,7 @@ data['lang']) }}{% end %}
|
|||||||
}
|
}
|
||||||
function replacer(key, value) {
|
function replacer(key, value) {
|
||||||
if (typeof value == "boolean" || key === "email" || key === "permissions" || key === "roles") {
|
if (typeof value == "boolean" || key === "email" || key === "permissions" || key === "roles") {
|
||||||
|
console.log(key)
|
||||||
return value
|
return value
|
||||||
} else {
|
} else {
|
||||||
console.log(key, value)
|
console.log(key, value)
|
||||||
@ -433,6 +434,7 @@ data['lang']) }}{% end %}
|
|||||||
let disabled_flag = false;
|
let disabled_flag = false;
|
||||||
let roles = null;
|
let roles = null;
|
||||||
if (superuser || userId != edit_id){
|
if (superuser || userId != edit_id){
|
||||||
|
console.log("ROLES")
|
||||||
roles = $('.role_check').map(function() {
|
roles = $('.role_check').map(function() {
|
||||||
if ($(this).attr("disabled")){
|
if ($(this).attr("disabled")){
|
||||||
disabled_flag = true;
|
disabled_flag = true;
|
||||||
@ -457,9 +459,7 @@ data['lang']) }}{% end %}
|
|||||||
delete formDataObject.username
|
delete formDataObject.username
|
||||||
}
|
}
|
||||||
if (superuser || userId != edit_id){
|
if (superuser || userId != edit_id){
|
||||||
if (!disabled_flag){
|
|
||||||
formDataObject.roles = roles;
|
formDataObject.roles = roles;
|
||||||
}
|
|
||||||
if ($("#permissions").length){
|
if ($("#permissions").length){
|
||||||
formDataObject.permissions = permissions;
|
formDataObject.permissions = permissions;
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,7 @@
|
|||||||
<!--<th>ID</th>-->
|
<!--<th>ID</th>-->
|
||||||
<th>{{ translate('apiKeys', 'name', data['lang']) }}</th>
|
<th>{{ translate('apiKeys', 'name', data['lang']) }}</th>
|
||||||
<th>{{ translate('apiKeys', 'created', data['lang']) }}</th>
|
<th>{{ translate('apiKeys', 'created', data['lang']) }}</th>
|
||||||
<th>{{ translate('apiKeys', 'superUser', data['lang']) }}</th>
|
<th>{{ translate('apiKeys', 'fullAccess', data['lang']) }}</th>
|
||||||
<th>{{ translate('apiKeys', 'perms', data['lang']) }}</th>
|
<th>{{ translate('apiKeys', 'perms', data['lang']) }}</th>
|
||||||
<th>{{ translate('apiKeys', 'buttons', data['lang']) }}</th>
|
<th>{{ translate('apiKeys', 'buttons', data['lang']) }}</th>
|
||||||
</tr>
|
</tr>
|
||||||
@ -70,7 +70,7 @@
|
|||||||
<td>{{ apikey.name }}</td>
|
<td>{{ apikey.name }}</td>
|
||||||
<td>{{ apikey.created.strftime('%d/%m/%Y %H:%M:%S') }}</td>
|
<td>{{ apikey.created.strftime('%d/%m/%Y %H:%M:%S') }}</td>
|
||||||
<td>
|
<td>
|
||||||
{% if apikey.superuser %}
|
{% if apikey.full_access %}
|
||||||
<span class="text-success">
|
<span class="text-success">
|
||||||
<i class="fas fa-check-square"></i> {{
|
<i class="fas fa-check-square"></i> {{
|
||||||
translate('apiKeys', 'yes', data['lang']) }}
|
translate('apiKeys', 'yes', data['lang']) }}
|
||||||
@ -148,9 +148,15 @@
|
|||||||
}}</label>
|
}}</label>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
{% if permission in data['user_crafty_permissions'] %}
|
||||||
<input type="checkbox" class="crafty_perm"
|
<input type="checkbox" class="crafty_perm"
|
||||||
id="permission_{{ permission.name }}"
|
id="permission_{{ permission.name }}"
|
||||||
name="permission_{{ permission.name }}" value="1">
|
name="permission_{{ permission.name }}" value="1">
|
||||||
|
{% else %}
|
||||||
|
<input type="checkbox" class="crafty_perm"
|
||||||
|
id="permission_{{ permission.name }}"
|
||||||
|
name="permission_{{ permission.name }}" value="1" disabled>
|
||||||
|
{% end %}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% end %}
|
{% end %}
|
||||||
@ -158,8 +164,8 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<label for="superuser">Superuser</label>
|
<label for="full_access">{{translate('apiKeys', 'fullAccess', data['lang'])}}</label>
|
||||||
<input type="checkbox" class="" id="superuser" name="superuser" value="1">
|
<input type="checkbox" class="" id="full_access" name="full_access" value="1">
|
||||||
|
|
||||||
<br />
|
<br />
|
||||||
|
|
||||||
@ -240,7 +246,7 @@
|
|||||||
"name": formDataObject.name,
|
"name": formDataObject.name,
|
||||||
"server_permissions_mask": server_permissions,
|
"server_permissions_mask": server_permissions,
|
||||||
"crafty_permissions_mask": crafty_permissions,
|
"crafty_permissions_mask": crafty_permissions,
|
||||||
"superuser": $("#superuser").prop('checked'),
|
"full_access": $("#full_access").prop('checked'),
|
||||||
});
|
});
|
||||||
console.log(formDataJsonString);
|
console.log(formDataJsonString);
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -39,207 +39,150 @@
|
|||||||
<span class="d-block d-sm-none">
|
<span class="d-block d-sm-none">
|
||||||
{% include "parts/m_server_controls_list.html %}
|
{% include "parts/m_server_controls_list.html %}
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-6 col-sm-12">
|
<div class="col-md-12 col-sm-12" style="overflow-x:auto;">
|
||||||
<br>
|
<div class="card">
|
||||||
<br>
|
<div class="card-header header-sm d-flex justify-content-between align-items-center">
|
||||||
{% if data['backing_up'] %}
|
<h4 class="card-title"><i class="fa-regular fa-bell"></i> {{ translate('serverBackups', 'backups',
|
||||||
<div class="progress" style="height: 15px;">
|
data['lang']) }} </h4>
|
||||||
<div class="progress-bar progress-bar-striped progress-bar-animated" id="backup_progress_bar"
|
{% if data['user_data']['hints'] %}
|
||||||
role="progressbar" style="width:{{data['backup_stats']['percent']}}%;"
|
<span class="too_small" title="{{ translate('serverSchedules', 'cannotSee', data['lang']) }}" ,
|
||||||
aria-valuenow="{{data['backup_stats']['percent']}}" aria-valuemin="0" aria-valuemax="100">{{
|
data-content="{{ translate('serverSchedules', 'cannotSeeOnMobile', data['lang']) }}" ,
|
||||||
data['backup_stats']['percent'] }}%</div>
|
data-placement="bottom"></span>
|
||||||
</div>
|
|
||||||
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
|
|
||||||
id="total_files">{{data['server_stats']['world_size']}}</span></p>
|
|
||||||
{% end %}
|
{% end %}
|
||||||
|
<div><a class="btn btn-info"
|
||||||
<br>
|
href="/panel/add_backup?id={{ data['server_stats']['server_id']['server_id'] }}"><i
|
||||||
{% if not data['backing_up'] %}
|
class="fas fa-plus-circle"></i> {{ translate('serverBackups', 'newBackup', data['lang']) }}</a>
|
||||||
<div id="backup_button" class="form-group">
|
</div>
|
||||||
<button class="btn btn-primary" id="backup_now_button">{{ translate('serverBackups', 'backupNow',
|
</div>
|
||||||
data['lang']) }}</button>
|
<div class="card-body">
|
||||||
|
{% if len(data['backups']) == 0 %}
|
||||||
|
<div style="text-align: center; color: grey;">
|
||||||
|
<h7>{{ translate('serverBackups', 'no-backup', data['lang']) }}.</h7>
|
||||||
</div>
|
</div>
|
||||||
{% end %}
|
{% end %}
|
||||||
<form id="backup-form" class="forms-sample">
|
{% if len(data['backups']) > 0 %}
|
||||||
<div class="form-group">
|
<div class="d-none d-lg-block">
|
||||||
{% if data['super_user'] %}
|
<table class="table table-hover responsive-table" aria-label="backups list" id="backup_table"
|
||||||
<label for="server_name">{{ translate('serverBackups', 'storageLocation', data['lang']) }} <small
|
style="table-layout:fixed;">
|
||||||
class="text-muted ml-1"> - {{ translate('serverBackups', 'storageLocationDesc', data['lang'])
|
|
||||||
}}</small> </label>
|
|
||||||
<input type="text" class="form-control" name="backup_path" id="backup_path"
|
|
||||||
value="{{ data['server_stats']['server_id']['backup_path'] }}"
|
|
||||||
placeholder="{{ translate('serverBackups', 'storageLocation', data['lang']) }}">
|
|
||||||
{% end %}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="server_path">{{ translate('serverBackups', 'maxBackups', data['lang']) }} <small
|
|
||||||
class="text-muted ml-1"> - {{ translate('serverBackups', 'maxBackupsDesc', data['lang'])
|
|
||||||
}}</small> </label>
|
|
||||||
<input type="text" class="form-control" name="max_backups" id="max_backups"
|
|
||||||
value="{{ data['backup_config']['max_backups'] }}"
|
|
||||||
placeholder="{{ translate('serverBackups', 'maxBackups', data['lang']) }}">
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="compress" class="form-check-label ml-4 mb-4"></label>
|
|
||||||
{% if data['backup_config']['compress'] %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="compress" name="compress" checked=""
|
|
||||||
value="True">{{ translate('serverBackups', 'compress', data['lang']) }}
|
|
||||||
{% else %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="compress" name="compress" value="True">{{
|
|
||||||
translate('serverBackups', 'compress', data['lang']) }}
|
|
||||||
{% end %}
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="shutdown" class="form-check-label ml-4 mb-4"></label>
|
|
||||||
{% if data['backup_config']['shutdown'] %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="shutdown" name="shutdown" checked=""
|
|
||||||
value="True">{{ translate('serverBackups', 'shutdown', data['lang']) }}
|
|
||||||
{% else %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="shutdown" name="shutdown" value="True">{{
|
|
||||||
translate('serverBackups', 'shutdown', data['lang']) }}
|
|
||||||
{% end %}
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="command-check" class="form-check-label ml-4 mb-4"></label>
|
|
||||||
{% if data['backup_config']['before'] %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="before-check" name="before-check" checked>{{
|
|
||||||
translate('serverBackups', 'before', data['lang']) }}
|
|
||||||
<br>
|
|
||||||
<input type="text" class="form-control" name="backup_before" id="backup_before"
|
|
||||||
value="{{ data['backup_config']['before'] }}" placeholder="We enter the / for you"
|
|
||||||
style="display: inline-block;">
|
|
||||||
{% else %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="before-check" name="before-check">{{
|
|
||||||
translate('serverBackups', 'before', data['lang']) }}
|
|
||||||
<br>
|
|
||||||
<input type="text" class="form-control" name="backup_before" id="backup_before" value=""
|
|
||||||
placeholder="We enter the / for you." style="display: none;">
|
|
||||||
{% end %}
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="command-check" class="form-check-label ml-4 mb-4"></label>
|
|
||||||
{% if data['backup_config']['after'] %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="after-check" name="after-check" checked>{{
|
|
||||||
translate('serverBackups', 'after', data['lang']) }}
|
|
||||||
<br>
|
|
||||||
<input type="text" class="form-control" name="backup_after" id="backup_after"
|
|
||||||
value="{{ data['backup_config']['after'] }}" placeholder="We enter the / for you"
|
|
||||||
style="display: inline-block;">
|
|
||||||
{% else %}
|
|
||||||
<input type="checkbox" class="form-check-input" id="after-check" name="after-check">{{
|
|
||||||
translate('serverBackups', 'after', data['lang']) }}
|
|
||||||
<br>
|
|
||||||
<input type="text" class="form-control" name="backup_after" id="backup_after" value=""
|
|
||||||
placeholder="We enter the / for you." style="display: none;">
|
|
||||||
{% end %}
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="server">{{ translate('serverBackups', 'exclusionsTitle', data['lang']) }} <small> - {{
|
|
||||||
translate('serverBackups', 'excludedChoose', data['lang']) }}</small></label>
|
|
||||||
<br>
|
|
||||||
<button class="btn btn-primary mr-2" id="root_files_button"
|
|
||||||
data-server_path="{{ data['server_stats']['server_id']['path']}}" type="button">{{
|
|
||||||
translate('serverBackups', 'clickExclude', data['lang']) }}</button>
|
|
||||||
</div>
|
|
||||||
<div class="modal fade" id="dir_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
|
|
||||||
aria-hidden="true">
|
|
||||||
<div class="modal-dialog" role="document">
|
|
||||||
<div class="modal-content">
|
|
||||||
<div class="modal-header">
|
|
||||||
<h5 class="modal-title" id="exampleModalLongTitle">{{ translate('serverBackups',
|
|
||||||
'excludedChoose', data['lang']) }}</h5>
|
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
|
||||||
<span aria-hidden="true">×</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div class="modal-body">
|
|
||||||
<div class="tree-ctx-item" id="main-tree-div" data-path=""
|
|
||||||
style="overflow: scroll; max-height:75%;">
|
|
||||||
<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled>
|
|
||||||
<span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path="">
|
|
||||||
<i class="far fa-folder"></i>
|
|
||||||
<i class="far fa-folder-open"></i>
|
|
||||||
{{ translate('serverFiles', 'files', data['lang']) }}
|
|
||||||
</span>
|
|
||||||
</input>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="modal-footer">
|
|
||||||
<button type="button" id="modal-cancel" class="btn btn-secondary" data-dismiss="modal"><i class="fa-solid fa-xmark"></i></button>
|
|
||||||
<button type="button" id="modal-okay" data-dismiss="modal" class="btn btn-primary"><i class="fa-solid fa-thumbs-up"></i></button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button type="submit" class="btn btn-success mr-2">{{ translate('serverBackups', 'save', data['lang'])
|
|
||||||
}}</button>
|
|
||||||
<button type="reset" class="btn btn-light">{{ translate('serverBackups', 'cancel', data['lang'])
|
|
||||||
}}</button>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="col-md-6 col-sm-12">
|
|
||||||
<div class="text-center">
|
|
||||||
|
|
||||||
<table class="table table-responsive dataTable" id="backup_table">
|
|
||||||
<h4 class="card-title">{{ translate('serverBackups', 'currentBackups', data['lang']) }}</h4>
|
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr class="rounded">
|
||||||
<th width="10%">{{ translate('serverBackups', 'options', data['lang']) }}</th>
|
<th scope="col" style="width: 15%; min-width: 10px;">{{ translate('serverBackups', 'name',
|
||||||
<th>{{ translate('serverBackups', 'path', data['lang']) }}</th>
|
data['lang']) }} </th>
|
||||||
<th width="20%">{{ translate('serverBackups', 'size', data['lang']) }}</th>
|
<th scope="col" style="width: 10%; min-width: 10px;">{{ translate('serverBackups', 'status',
|
||||||
|
data['lang']) }} </th>
|
||||||
|
<th scope="col" style="width: 50%; min-width: 50px;">{{ translate('serverBackups',
|
||||||
|
'storageLocation', data['lang']) }}</th>
|
||||||
|
<th scope="col" style="width: 10%; min-width: 50px;">{{ translate('serverBackups',
|
||||||
|
'maxBackups', data['lang']) }}</th>
|
||||||
|
<th scope="col" style="width: 10%; min-width: 50px;">{{ translate('serverBackups', 'actions',
|
||||||
|
data['lang']) }}</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{% for backup in data['backup_list'] %}
|
{% for backup in data['backups'] %}
|
||||||
<tr>
|
<tr>
|
||||||
|
<td id="{{backup.backup_name}}" class="id">
|
||||||
|
<p>{{backup.backup_name}}</p>
|
||||||
|
<br>
|
||||||
|
{% if backup.default %}
|
||||||
|
<span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
|
||||||
|
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
|
||||||
|
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
|
||||||
|
class="fa-solid fa-question"></i></button></small>
|
||||||
|
{% end %}
|
||||||
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<a href="/panel/download_backup?file={{ backup['path'] }}&id={{ data['server_stats']['server_id']['server_id'] }}"
|
<div id="{{backup.backup_id}}_status">
|
||||||
class="btn btn-primary">
|
<button class="btn btn-outline-success backup-status" data-status="{{ backup.status }}"
|
||||||
<i class="fas fa-download" aria-hidden="true"></i>
|
data-Standby="{{ translate('serverBackups', 'standby', data['lang'])}}"
|
||||||
{{ translate('serverBackups', 'download', data['lang']) }}
|
data-Failed="{{ translate('serverBackups', 'failed', data['lang'])}}"></button>
|
||||||
</a>
|
</div>
|
||||||
<br>
|
</td>
|
||||||
<br>
|
<td id="{{backup.backup_location}}" class="type">
|
||||||
<button data-file="{{ backup['path'] }}" data-backup_path="{{ data['backup_path'] }}"
|
<p style="overflow: scroll;" class="no-scroll">{{backup.backup_location}}</p>
|
||||||
class="btn btn-danger del_button">
|
</td>
|
||||||
<i class="fas fa-trash" aria-hidden="true"></i>
|
<td id="{{backup.max_backups}}" class="trigger" style="overflow: scroll; max-width: 30px;">
|
||||||
{{ translate('serverBackups', 'delete', data['lang']) }}
|
<p>{{backup.max_backups}}</p>
|
||||||
|
</td>
|
||||||
|
<td id="backup_edit" class="action">
|
||||||
|
<button
|
||||||
|
onclick="window.location.href=`/panel/edit_backup?id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{backup.backup_id}}`"
|
||||||
|
class="btn btn-info">
|
||||||
|
<i class="fas fa-pencil-alt"></i>
|
||||||
</button>
|
</button>
|
||||||
<button data-file="{{ backup['path'] }}" class="btn btn-warning restore_button">
|
{% if not backup.default %}
|
||||||
<i class="fas fa-undo-alt" aria-hidden="true"></i>
|
<button data-backup={{ backup.backup_id }} class="btn btn-danger del_button">
|
||||||
{{ translate('serverBackups', 'restore', data['lang']) }}
|
<i class="fas fa-trash" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
{% end %}
|
||||||
|
<button data-backup={{ backup.backup_id }} data-toggle="tooltip"
|
||||||
|
title="{{ translate('serverBackups', 'run', data['lang']) }}"
|
||||||
|
class="btn btn-outline-warning run-backup backup_now_button">
|
||||||
|
<i class="fa-solid fa-forward"></i>
|
||||||
</button>
|
</button>
|
||||||
</td>
|
</td>
|
||||||
<td>{{ backup['path'] }}</td>
|
|
||||||
<td>{{ backup['size'] }}</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
{% end %}
|
{% end %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<div class="d-block d-lg-none">
|
||||||
</div>
|
<table aria-label="backups list" class="table table-hover responsive-table" id="backup_table_mini"
|
||||||
<div class="col-md-12 col-sm-12">
|
style="table-layout:fixed;">
|
||||||
|
<thead>
|
||||||
|
<tr class="rounded">
|
||||||
|
<th style="width: 40%; min-width: 10px;">Name
|
||||||
|
</th>
|
||||||
|
<th style="width: 40%; min-width: 50px;">{{ translate('serverBackups', 'edit', data['lang'])
|
||||||
|
}}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for backup in data['backups'] %}
|
||||||
|
<tr>
|
||||||
|
<td id="{{backup.backup_name}}" class="id">
|
||||||
|
<p>{{backup.backup_name}}</p>
|
||||||
<br>
|
<br>
|
||||||
<br>
|
<div id="{{backup.backup_id}}_status">
|
||||||
<div class="card-header header-sm d-flex justify-content-between align-items-center">
|
<button class="btn btn-outline-success backup-status" data-status="{{ backup.status }}"
|
||||||
<h4 class="card-title"><i class="fas fa-server"></i> {{ translate('serverBackups', 'excludedBackups',
|
data-Standby="{{ translate('serverBackups', 'standby', data['lang'])}}"
|
||||||
data['lang']) }} <small class="text-muted ml-1"></small> </h4>
|
data-Failed="{{ translate('serverBackups', 'failed', data['lang'])}}"></button>
|
||||||
</div>
|
</div>
|
||||||
<br>
|
<br>
|
||||||
<ul>
|
{% if backup.default %}
|
||||||
{% for item in data['exclusions'] %}
|
<span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
|
||||||
<li>{{item}}</li>
|
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
|
||||||
<br>
|
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
|
||||||
|
class="fa-solid fa-question"></i></button></small>
|
||||||
{% end %}
|
{% end %}
|
||||||
</ul>
|
</td>
|
||||||
|
<td id="backup_edit" class="action">
|
||||||
|
<button
|
||||||
|
onclick="window.location.href=`/panel/edit_backup?id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{backup.backup_id}}`"
|
||||||
|
class="btn btn-info">
|
||||||
|
<i class="fas fa-pencil-alt"></i>
|
||||||
|
</button>
|
||||||
|
{% if not backup.default %}
|
||||||
|
<button data-backup={{ backup.backup_id }} class="btn btn-danger del_button">
|
||||||
|
<i class="fas fa-trash" aria-hidden="true"></i>
|
||||||
|
</button>
|
||||||
|
{% end %}
|
||||||
|
<button data-backup={{ backup.backup_id }} data-toggle="tooltip"
|
||||||
|
title="{{ translate('serverBackups', 'run', data['lang']) }}"
|
||||||
|
class="btn btn-outline-warning test-socket backup_now_button">
|
||||||
|
<i class="fa-solid fa-forward"></i>
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% end %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{% end %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@ -298,7 +241,7 @@
|
|||||||
{% block js %}
|
{% block js %}
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
const server_id = new URLSearchParams(document.location.search).get('id')
|
const serverId = new URLSearchParams(document.location.search).get('id')
|
||||||
|
|
||||||
|
|
||||||
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
|
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
|
||||||
@ -307,9 +250,10 @@
|
|||||||
return r ? r[1] : undefined;
|
return r ? r[1] : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function backup_started() {
|
async function backup_started(backup_id) {
|
||||||
const token = getCookie("_xsrf")
|
const token = getCookie("_xsrf")
|
||||||
let res = await fetch(`/api/v2/servers/${server_id}/action/backup_server`, {
|
console.log(backup_id)
|
||||||
|
let res = await fetch(`/api/v2/servers/${serverId}/action/backup_server/${backup_id}/`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'X-XSRFToken': token
|
'X-XSRFToken': token
|
||||||
@ -318,14 +262,7 @@
|
|||||||
let responseData = await res.json();
|
let responseData = await res.json();
|
||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
console.log(responseData);
|
console.log(responseData);
|
||||||
$("#backup_button").html(`<div class="progress" style="height: 15px;">
|
$("#backup_button").prop('disabled', true)
|
||||||
<div class="progress-bar progress-bar-striped progress-bar-animated" id="backup_progress_bar"
|
|
||||||
role="progressbar" style="width:{{data['backup_stats']['percent']}}%;"
|
|
||||||
aria-valuenow="{{data['backup_stats']['percent']}}" aria-valuemin="0" aria-valuemax="100">{{
|
|
||||||
data['backup_stats']['percent'] }}%</div>
|
|
||||||
</div>
|
|
||||||
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
|
|
||||||
id="total_files">{{data['server_stats']['world_size']}}</span></p>`);
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
bootbox.alert({
|
bootbox.alert({
|
||||||
@ -335,155 +272,83 @@
|
|||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
async function del_backup(filename, id) {
|
async function del_backup(backup_id) {
|
||||||
const token = getCookie("_xsrf")
|
const token = getCookie("_xsrf")
|
||||||
let contents = JSON.stringify({"filename": filename})
|
let res = await fetch(`/api/v2/servers/${serverId}/backups/backup/${backup_id}`, {
|
||||||
let res = await fetch(`/api/v2/servers/${id}/backups/backup/`, {
|
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
headers: {
|
headers: {
|
||||||
'token': token,
|
'token': token,
|
||||||
},
|
},
|
||||||
body: contents
|
body: {}
|
||||||
});
|
});
|
||||||
let responseData = await res.json();
|
let responseData = await res.json();
|
||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
}else{
|
|
||||||
bootbox.alert({"title": responseData.status,
|
|
||||||
"message": responseData.error})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function restore_backup(filename, id) {
|
|
||||||
const token = getCookie("_xsrf")
|
|
||||||
let contents = JSON.stringify({"filename": filename})
|
|
||||||
var dialog = bootbox.dialog({
|
|
||||||
message: "<i class='fa fa-spin fa-spinner'></i> {{ translate('serverBackups', 'restoring', data['lang']) }}",
|
|
||||||
closeButton: false
|
|
||||||
});
|
|
||||||
let res = await fetch(`/api/v2/servers/${id}/backups/backup/`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'token': token,
|
|
||||||
},
|
|
||||||
body: contents
|
|
||||||
});
|
|
||||||
let responseData = await res.json();
|
|
||||||
if (responseData.status === "ok") {
|
|
||||||
window.location.href = "/panel/dashboard";
|
|
||||||
}else{
|
|
||||||
bootbox.alert({"title": responseData.status,
|
|
||||||
"message": responseData.error})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$("#before-check").on("click", function () {
|
|
||||||
if ($("#before-check:checked").val()) {
|
|
||||||
$("#backup_before").css("display", "inline-block");
|
|
||||||
} else {
|
} else {
|
||||||
$("#backup_before").css("display", "none");
|
bootbox.alert({
|
||||||
$("#backup_before").val("");
|
"title": responseData.status,
|
||||||
}
|
"message": responseData.error
|
||||||
});
|
})
|
||||||
$("#after-check").on("click", function () {
|
|
||||||
if ($("#after-check:checked").val()) {
|
|
||||||
$("#backup_after").css("display", "inline-block");
|
|
||||||
} else {
|
|
||||||
$("#backup_after").css("display", "none");
|
|
||||||
$("#backup_after").val("");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
function replacer(key, value) {
|
|
||||||
if (key != "backup_before" && key != "backup_after") {
|
|
||||||
if (typeof value == "boolean" || key === "executable_update_url") {
|
|
||||||
return value
|
|
||||||
} else {
|
|
||||||
return (isNaN(value) ? value : +value);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return value;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
$("#backup-form").on("submit", async function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
const token = getCookie("_xsrf")
|
|
||||||
let backupForm = document.getElementById("backup-form");
|
|
||||||
|
|
||||||
let formData = new FormData(backupForm);
|
|
||||||
//Remove checks that we don't need in form data.
|
|
||||||
formData.delete("after-check");
|
|
||||||
formData.delete("before-check");
|
|
||||||
//Create an object from the form data entries
|
|
||||||
let formDataObject = Object.fromEntries(formData.entries());
|
|
||||||
//We need to make sure these are sent regardless of whether or not they're checked
|
|
||||||
formDataObject.compress = $("#compress").prop('checked');
|
|
||||||
formDataObject.shutdown = $("#shutdown").prop('checked');
|
|
||||||
let excluded = [];
|
|
||||||
$('input.excluded:checkbox:checked').each(function () {
|
|
||||||
excluded.push($(this).val());
|
|
||||||
});
|
|
||||||
if ($("#root_files_button").hasClass("clicked")){
|
|
||||||
formDataObject.exclusions = excluded;
|
|
||||||
}
|
|
||||||
delete formDataObject.root_path
|
|
||||||
console.log(excluded);
|
|
||||||
console.log(formDataObject);
|
|
||||||
// Format the plain form data as JSON
|
|
||||||
let formDataJsonString = JSON.stringify(formDataObject, replacer);
|
|
||||||
|
|
||||||
console.log(formDataJsonString);
|
|
||||||
|
|
||||||
let res = await fetch(`/api/v2/servers/${server_id}/backups/`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: {
|
|
||||||
'X-XSRFToken': token
|
|
||||||
},
|
|
||||||
body: formDataJsonString,
|
|
||||||
});
|
|
||||||
let responseData = await res.json();
|
|
||||||
if (responseData.status === "ok") {
|
|
||||||
window.location.reload();
|
|
||||||
} else {
|
|
||||||
|
|
||||||
bootbox.alert({
|
|
||||||
title: responseData.error,
|
|
||||||
message: responseData.error_data
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
if ($('#backup_path').val() == '') {
|
|
||||||
console.log('true')
|
|
||||||
try {
|
|
||||||
document.getElementById('backup_now_button').disabled = true;
|
|
||||||
} catch {
|
|
||||||
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
document.getElementById('backup_now_button').disabled = false;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
try {
|
|
||||||
document.getElementById('backup_now_button').disabled = false;
|
|
||||||
} catch {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log("ready!");
|
console.log("ready!");
|
||||||
$("#backup_config_box").hide();
|
$(".backup-explain").on("click", function () {
|
||||||
$("#backup_save_note").hide();
|
bootbox.alert($(this).data("explain"));
|
||||||
|
|
||||||
$("#show_config").click(function () {
|
|
||||||
$("#backup_config_box").toggle();
|
|
||||||
$('#backup_button').hide();
|
|
||||||
$('#backup_save_note').show();
|
|
||||||
$('#backup_data').hide();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$(".backup-status").on("click", function () {
|
||||||
|
if ($(this).data('message') != "") {
|
||||||
|
bootbox.alert($(this).data('message'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$('.backup-status').each(function () {
|
||||||
|
// Get the JSON string from the element's text
|
||||||
|
var data = $(this).data('status');
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
// Update the element's text with the status value
|
||||||
|
$(this).text($(this).data(data["status"].toLowerCase()));
|
||||||
|
|
||||||
|
// Optionally, add classes based on status to style the element
|
||||||
|
$(this).attr('data-message', data["message"]);
|
||||||
|
if (data.status === 'Active') {
|
||||||
|
$(this).removeClass();
|
||||||
|
$(this).addClass('badge-pill badge-outline-success btn');
|
||||||
|
} else if (data.status === 'Failed') {
|
||||||
|
$(this).removeClass();
|
||||||
|
$(this).addClass('badge-pill badge-outline-danger btn');
|
||||||
|
} else if (data.status === 'Standby') {
|
||||||
|
$(this).removeClass();
|
||||||
|
$(this).addClass('badge-pill badge-outline-secondary btn');
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Invalid JSON string:', e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (webSocket) {
|
||||||
|
webSocket.on('backup_status', function (backup) {
|
||||||
|
text = ``;
|
||||||
|
console.log(backup)
|
||||||
|
if (backup.percent >= 100) {
|
||||||
|
$(`#${backup.backup_id}_status`).html(`<span class="badge-pill badge-outline-success backup-status"
|
||||||
|
>Completed</span>`);
|
||||||
|
setTimeout(function () {
|
||||||
|
window.location.reload(1);
|
||||||
|
}, 5000);
|
||||||
|
} else {
|
||||||
|
text = `<div class="progress-bar progress-bar-striped progress-bar-animated"
|
||||||
|
role="progressbar" style="width:${backup.percent}%;"
|
||||||
|
aria-valuenow="${backup.percent}" aria-valuemin="0" aria-valuemax="100">${backup.percent}%</div>`
|
||||||
|
|
||||||
|
$(`#${backup.backup_id}_status`).html(text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
$('#backup_table').DataTable({
|
$('#backup_table').DataTable({
|
||||||
"order": [[1, "desc"]],
|
"order": [[1, "desc"]],
|
||||||
"paging": false,
|
"paging": false,
|
||||||
@ -491,11 +356,12 @@
|
|||||||
"searching": true,
|
"searching": true,
|
||||||
"ordering": true,
|
"ordering": true,
|
||||||
"info": true,
|
"info": true,
|
||||||
"autoWidth": false,
|
"autoWidth": true,
|
||||||
"responsive": true,
|
"responsive": false,
|
||||||
});
|
});
|
||||||
|
|
||||||
$(".del_button").click(function () {
|
$(".del_button").click(function () {
|
||||||
|
let backup = $(this).data('backup');
|
||||||
var file_to_del = $(this).data("file");
|
var file_to_del = $(this).data("file");
|
||||||
var backup_path = $(this).data('backup_path');
|
var backup_path = $(this).data('backup_path');
|
||||||
|
|
||||||
@ -515,8 +381,8 @@
|
|||||||
callback: function (result) {
|
callback: function (result) {
|
||||||
console.log(result);
|
console.log(result);
|
||||||
if (result == true) {
|
if (result == true) {
|
||||||
var full_path = backup_path + '/' + file_to_del;
|
|
||||||
del_backup(file_to_del, server_id);
|
del_backup(backup);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -541,13 +407,13 @@
|
|||||||
callback: function (result) {
|
callback: function (result) {
|
||||||
console.log(result);
|
console.log(result);
|
||||||
if (result == true) {
|
if (result == true) {
|
||||||
restore_backup(file_to_restore, server_id);
|
restore_backup(file_to_restore, serverId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
$("#backup_now_button").click(function () {
|
$(".backup_now_button").click(function () {
|
||||||
backup_started();
|
backup_started($(this).data('backup'));
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
@ -591,40 +457,25 @@
|
|||||||
bootbox.alert("You must input a path before selecting this button");
|
bootbox.alert("You must input a path before selecting this button");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (webSocket) {
|
|
||||||
webSocket.on('backup_status', function (backup) {
|
|
||||||
if (backup.percent >= 100) {
|
|
||||||
document.getElementById('backup_progress_bar').innerHTML = '100%';
|
|
||||||
document.getElementById('backup_progress_bar').style.width = '100%';
|
|
||||||
setTimeout(function () {
|
|
||||||
window.location.reload(1);
|
|
||||||
}, 5000);
|
|
||||||
} else {
|
|
||||||
document.getElementById('backup_progress_bar').innerHTML = backup.percent + '%';
|
|
||||||
document.getElementById('backup_progress_bar').style.width = backup.percent + '%';
|
|
||||||
document.getElementById('total_files').innerHTML = backup.total_files;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDirView(event){
|
function getDirView(event) {
|
||||||
let path = event.target.parentElement.getAttribute("data-path");
|
let path = event.target.parentElement.getAttribute("data-path");
|
||||||
if (document.getElementById(path).classList.contains('clicked')) {
|
if (document.getElementById(path).classList.contains('clicked')) {
|
||||||
return;
|
return;
|
||||||
}else{
|
} else {
|
||||||
getTreeView(path);
|
getTreeView(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
async function getTreeView(path){
|
async function getTreeView(path) {
|
||||||
console.log(path)
|
console.log(path)
|
||||||
const token = getCookie("_xsrf");
|
const token = getCookie("_xsrf");
|
||||||
let res = await fetch(`/api/v2/servers/${server_id}/files`, {
|
let res = await fetch(`/api/v2/servers/${serverId}/files`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'X-XSRFToken': token
|
'X-XSRFToken': token
|
||||||
},
|
},
|
||||||
body: JSON.stringify({"page": "backups", "path": path}),
|
body: JSON.stringify({ "page": "backups", "path": path }),
|
||||||
});
|
});
|
||||||
let responseData = await res.json();
|
let responseData = await res.json();
|
||||||
if (responseData.status === "ok") {
|
if (responseData.status === "ok") {
|
||||||
@ -644,17 +495,17 @@
|
|||||||
let path = response.data.root_path.path;
|
let path = response.data.root_path.path;
|
||||||
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
|
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
|
||||||
Object.entries(response.data).forEach(([key, value]) => {
|
Object.entries(response.data).forEach(([key, value]) => {
|
||||||
if (key === "root_path" || key === "db_stats"){
|
if (key === "root_path" || key === "db_stats") {
|
||||||
//continue is not valid in for each. Return acts as a continue.
|
//continue is not valid in for each. Return acts as a continue.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let checked = ""
|
let checked = ""
|
||||||
let dpath = value.path;
|
let dpath = value.path;
|
||||||
let filename = key;
|
let filename = key;
|
||||||
if (value.excluded){
|
if (value.excluded) {
|
||||||
checked = "checked"
|
checked = "checked"
|
||||||
}
|
}
|
||||||
if (value.dir){
|
if (value.dir) {
|
||||||
text += `<li class="tree-item" data-path="${dpath}">
|
text += `<li class="tree-item" data-path="${dpath}">
|
||||||
\n<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="checkbox" class="checkBoxClass excluded" value="${dpath}" ${checked}>
|
<input type="checkbox" class="checkBoxClass excluded" value="${dpath}" ${checked}>
|
||||||
@ -664,7 +515,7 @@
|
|||||||
<strong>${filename}</strong>
|
<strong>${filename}</strong>
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>`
|
</input></div><li>`
|
||||||
}else{
|
} else {
|
||||||
text += `<li
|
text += `<li
|
||||||
class="d-block tree-ctx-item tree-file"
|
class="d-block tree-ctx-item tree-file"
|
||||||
data-path="${dpath}"
|
data-path="${dpath}"
|
||||||
@ -674,14 +525,14 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
text += `</ul>`;
|
text += `</ul>`;
|
||||||
if(response.data.root_path.top){
|
if (response.data.root_path.top) {
|
||||||
try {
|
try {
|
||||||
document.getElementById('main-tree-div').innerHTML += text;
|
document.getElementById('main-tree-div').innerHTML += text;
|
||||||
document.getElementById('main-tree').parentElement.classList.add("clicked");
|
document.getElementById('main-tree').parentElement.classList.add("clicked");
|
||||||
} catch {
|
} catch {
|
||||||
document.getElementById('files-tree').innerHTML = text;
|
document.getElementById('files-tree').innerHTML = text;
|
||||||
}
|
}
|
||||||
}else{
|
} else {
|
||||||
try {
|
try {
|
||||||
document.getElementById(path + "span").classList.add('tree-caret-down');
|
document.getElementById(path + "span").classList.add('tree-caret-down');
|
||||||
document.getElementById(path).innerHTML += text;
|
document.getElementById(path).innerHTML += text;
|
||||||
|
758
app/frontend/templates/panel/server_backup_edit.html
Normal file
758
app/frontend/templates/panel/server_backup_edit.html
Normal file
@ -0,0 +1,758 @@
|
|||||||
|
{% extends ../base.html %}
|
||||||
|
|
||||||
|
{% block meta %}
|
||||||
|
{% end %}
|
||||||
|
|
||||||
|
{% block title %}Crafty Controller - {{ translate('serverDetails', 'serverDetails', data['lang']) }}{% end %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<div class="content-wrapper">
|
||||||
|
|
||||||
|
<!-- Page Title Header Starts-->
|
||||||
|
<div class="row page-title-header">
|
||||||
|
<div class="col-12">
|
||||||
|
<div class="page-header">
|
||||||
|
<h4 class="page-title">
|
||||||
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
|
<br />
|
||||||
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
|
</h4>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<!-- Page Title Header Ends-->
|
||||||
|
|
||||||
|
{% include "parts/details_stats.html %}
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col-sm-12 grid-margin">
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body pt-0">
|
||||||
|
|
||||||
|
<span class="d-none d-sm-block">
|
||||||
|
{% include "parts/server_controls_list.html %}
|
||||||
|
</span>
|
||||||
|
<span class="d-block d-sm-none">
|
||||||
|
{% include "parts/m_server_controls_list.html %}
|
||||||
|
</span>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md-6 col-sm-12">
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<div id="{{data['backup_config'].get('backup_id', None)}}_status" class="progress"
|
||||||
|
style="height: 15px; display: none;">
|
||||||
|
</div>
|
||||||
|
{% if data['backing_up'] %}
|
||||||
|
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
|
||||||
|
id="total_files">{{data['server_stats']['world_size']}}</span></p>
|
||||||
|
{% end %}
|
||||||
|
|
||||||
|
<br>
|
||||||
|
{% if not data['backing_up'] %}
|
||||||
|
<div id="backup_button" class="form-group">
|
||||||
|
<button class="btn btn-primary" id="backup_now_button">{{ translate('serverBackups', 'backupNow',
|
||||||
|
data['lang']) }}</button>
|
||||||
|
</div>
|
||||||
|
{% end %}
|
||||||
|
<form id="backup-form" class="forms-sample">
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="backup_name">{{ translate('serverBackups', 'name', data['lang']) }}
|
||||||
|
{% if data["backup_config"].get("default", None) %}
|
||||||
|
<span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
|
||||||
|
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
|
||||||
|
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
|
||||||
|
class="fa-solid fa-question"></i></button></small>
|
||||||
|
{% end %}
|
||||||
|
</label>
|
||||||
|
{% if data["backup_config"].get("backup_id", None) %}
|
||||||
|
<input type="text" class="form-control" name="backup_name" id="backup_name"
|
||||||
|
value="{{ data['backup_config']['backup_name'] }}">
|
||||||
|
{% else %}
|
||||||
|
<input type="text" class="form-control" name="backup_name" id="backup_name"
|
||||||
|
placeholder="{{ translate('serverBackups', 'myBackup', data['lang']) }}">
|
||||||
|
{% end %}
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
{% if data['super_user'] %}
|
||||||
|
<label for="server_name">{{ translate('serverBackups', 'storageLocation', data['lang']) }} <small
|
||||||
|
class="text-muted ml-1"> - {{ translate('serverBackups', 'storageLocationDesc', data['lang'])
|
||||||
|
}}</small> </label>
|
||||||
|
<input type="text" class="form-control" name="backup_location" id="backup_location"
|
||||||
|
value="{{ data['backup_config']['backup_location'] }}"
|
||||||
|
placeholder="{{ translate('serverBackups', 'storageLocation', data['lang']) }}">
|
||||||
|
{% end %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="server_path">{{ translate('serverBackups', 'maxBackups', data['lang']) }} <small
|
||||||
|
class="text-muted ml-1"> - {{ translate('serverBackups', 'maxBackupsDesc', data['lang'])
|
||||||
|
}}</small> </label>
|
||||||
|
<input type="text" class="form-control" name="max_backups" id="max_backups"
|
||||||
|
value="{{ data['backup_config']['max_backups'] }}"
|
||||||
|
placeholder="{{ translate('serverBackups', 'maxBackups', data['lang']) }}">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<div class="custom-control custom-switch">
|
||||||
|
{% if data['backup_config']['compress'] %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="compress" name="compress" checked=""
|
||||||
|
value="True">
|
||||||
|
{% else %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="compress" name="compress" value="True">
|
||||||
|
{% end %}
|
||||||
|
<label for="compress" class="custom-control-label">{{ translate('serverBackups', 'compress',
|
||||||
|
data['lang']) }}</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<div class="custom-control custom-switch">
|
||||||
|
{% if data['backup_config']['shutdown']%}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="shutdown" name="shutdown" checked=""
|
||||||
|
value="True">
|
||||||
|
{% else %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="shutdown" name="shutdown" value="True">
|
||||||
|
{% end %}
|
||||||
|
<label for="shutdown" class="custom-control-label">{{ translate('serverBackups', 'shutdown',
|
||||||
|
data['lang']) }}</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<div class="custom-control custom-switch">
|
||||||
|
{% if data['backup_config']['before'] %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="before-check" name="before-check" checked>
|
||||||
|
<input type="text" class="form-control hidden-input" name="before" id="backup_before"
|
||||||
|
value="{{ data['backup_config']['before'] }}" placeholder="We enter the / for you"
|
||||||
|
style="display: inline-block;">
|
||||||
|
{% else %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="before-check" name="before-check">
|
||||||
|
<input type="text" class="form-control hidden-input" name="before" id="backup_before" value=""
|
||||||
|
placeholder="We enter the / for you." style="display: none;">
|
||||||
|
{% end %}
|
||||||
|
<label for="before-check" class="custom-control-label">{{
|
||||||
|
translate('serverBackups', 'before', data['lang']) }}</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<div class="custom-control custom-switch">
|
||||||
|
{% if data['backup_config']['after'] %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="after-check" name="after-check" checked>
|
||||||
|
<input type="text" class="form-control hidden-input" name="after" id="backup_after"
|
||||||
|
value="{{ data['backup_config']['after'] }}" placeholder="We enter the / for you"
|
||||||
|
style="display: inline-block;">
|
||||||
|
<br>
|
||||||
|
{% else %}
|
||||||
|
<input type="checkbox" class="custom-control-input" id="after-check" name="after-check">
|
||||||
|
<input type="text" class="form-control hidden-input" name="after" id="backup_after" value=""
|
||||||
|
placeholder="We enter the / for you." style="display: none;">
|
||||||
|
{% end %}
|
||||||
|
<label for="after-check" class="custom-control-label">{{
|
||||||
|
translate('serverBackups', 'after', data['lang']) }}</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="server">{{ translate('serverBackups', 'exclusionsTitle', data['lang']) }} <small> - {{
|
||||||
|
translate('serverBackups', 'excludedChoose', data['lang']) }}</small></label>
|
||||||
|
<br>
|
||||||
|
<button class="btn btn-primary mr-2" id="root_files_button"
|
||||||
|
data-server_path="{{ data['server_stats']['server_id']['path']}}" type="button">{{
|
||||||
|
translate('serverBackups', 'clickExclude', data['lang']) }}</button>
|
||||||
|
</div>
|
||||||
|
<div class="modal fade" id="dir_select" tabindex="-1" aria-labelledby="dir_select" aria-hidden="true">
|
||||||
|
<div class="modal-dialog">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title" id="exampleModalLongTitle">{{ translate('serverBackups',
|
||||||
|
'excludedChoose', data['lang']) }}</h5>
|
||||||
|
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
||||||
|
<span aria-hidden="true">×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body">
|
||||||
|
<div class="tree-ctx-item" id="main-tree-div" data-path=""
|
||||||
|
style="overflow: scroll; max-height:75%;">
|
||||||
|
<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled>
|
||||||
|
<span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path="">
|
||||||
|
<i class="far fa-folder"></i>
|
||||||
|
<i class="far fa-folder-open"></i>
|
||||||
|
{{ translate('serverFiles', 'files', data['lang']) }}
|
||||||
|
</span>
|
||||||
|
</input>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" id="modal-cancel" class="btn btn-secondary" data-dismiss="modal"><i
|
||||||
|
class="fa-solid fa-xmark"></i></button>
|
||||||
|
<button type="button" id="modal-okay" data-dismiss="modal" class="btn btn-primary"><i
|
||||||
|
class="fa-solid fa-thumbs-up"></i></button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button type="submit" class="btn btn-success mr-2">{{ translate('serverBackups', 'save', data['lang'])
|
||||||
|
}}</button>
|
||||||
|
<button type="reset" class="btn btn-light cancel-button">{{ translate('serverBackups', 'cancel',
|
||||||
|
data['lang'])
|
||||||
|
}}</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-6 col-sm-12">
|
||||||
|
<div class="text-center">
|
||||||
|
|
||||||
|
<table class="table table-responsive dataTable" id="backup_table">
|
||||||
|
<h4 class="card-title">{{ translate('serverBackups', 'currentBackups', data['lang']) }}</h4>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>{{ translate('serverBackups', 'options', data['lang']) }}</th>
|
||||||
|
<th>{{ translate('serverBackups', 'path', data['lang']) }}</th>
|
||||||
|
<th>{{ translate('serverBackups', 'size', data['lang']) }}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for backup in data['backup_list'] %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<a href="/panel/download_backup?file={{ backup['path'] }}&id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{ data['backup_config']['backup_id']}}"
|
||||||
|
class="btn btn-primary">
|
||||||
|
<i class="fas fa-download" aria-hidden="true"></i>
|
||||||
|
{{ translate('serverBackups', 'download', data['lang']) }}
|
||||||
|
</a>
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<button data-file="{{ backup['path'] }}"
|
||||||
|
data-backup_location="{{ data['backup_config']['backup_location'] }}"
|
||||||
|
class="btn btn-danger del_button">
|
||||||
|
<i class="fas fa-trash" aria-hidden="true"></i>
|
||||||
|
{{ translate('serverBackups', 'delete', data['lang']) }}
|
||||||
|
</button>
|
||||||
|
<button data-file="{{ backup['path'] }}" class="btn btn-warning restore_button">
|
||||||
|
<i class="fas fa-undo-alt" aria-hidden="true"></i>
|
||||||
|
{{ translate('serverBackups', 'restore', data['lang']) }}
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
<td>{{ backup['path'] }}</td>
|
||||||
|
<td>{{ backup['size'] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% end %}
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="col-md-12 col-sm-12">
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
<div class="card-header header-sm d-flex justify-content-between align-items-center">
|
||||||
|
<h4 class="card-title"><i class="fas fa-server"></i> {{ translate('serverBackups', 'excludedBackups',
|
||||||
|
data['lang']) }} <small class="text-muted ml-1"></small> </h4>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
<ul>
|
||||||
|
{% for item in data['exclusions'] %}
|
||||||
|
<li>{{item}}</li>
|
||||||
|
<br>
|
||||||
|
{% end %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<style>
|
||||||
|
/* Remove default bullets */
|
||||||
|
.tree-view,
|
||||||
|
.tree-nested {
|
||||||
|
list-style-type: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Style the items */
|
||||||
|
.tree-item,
|
||||||
|
.files-tree-title {
|
||||||
|
cursor: pointer;
|
||||||
|
user-select: none;
|
||||||
|
/* Prevent text selection */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create the caret/arrow with a unicode, and style it */
|
||||||
|
.tree-caret .fa-folder {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tree-caret .fa-folder-open {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Rotate the caret/arrow icon when clicked on (using JavaScript) */
|
||||||
|
.tree-caret-down .fa-folder {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tree-caret-down .fa-folder-open {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the nested list */
|
||||||
|
.tree-nested {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<!-- content-wrapper ends -->
|
||||||
|
|
||||||
|
{% end %}
|
||||||
|
|
||||||
|
{% block js %}
|
||||||
|
<script>
|
||||||
|
|
||||||
|
const server_id = new URLSearchParams(document.location.search).get('id')
|
||||||
|
const backup_id = new URLSearchParams(document.location.search).get('backup_id')
|
||||||
|
|
||||||
|
|
||||||
|
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
|
||||||
|
function getCookie(name) {
|
||||||
|
var r = document.cookie.match("\\b" + name + "=([^;]*)\\b");
|
||||||
|
return r ? r[1] : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function backup_started() {
|
||||||
|
const token = getCookie("_xsrf")
|
||||||
|
let res = await fetch(`/api/v2/servers/${server_id}/action/backup_server/${backup_id}`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-XSRFToken': token
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
console.log(responseData);
|
||||||
|
$("#backup_button").prop('disabled', true)
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bootbox.alert({
|
||||||
|
title: responseData.status,
|
||||||
|
message: responseData.error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
async function del_backup(filename, id) {
|
||||||
|
const token = getCookie("_xsrf")
|
||||||
|
let contents = JSON.stringify({ "filename": filename })
|
||||||
|
let res = await fetch(`/api/v2/servers/${server_id}/backups/backup/${backup_id}/files/`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers: {
|
||||||
|
'token': token,
|
||||||
|
},
|
||||||
|
body: contents
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
window.location.reload();
|
||||||
|
} else {
|
||||||
|
bootbox.alert({
|
||||||
|
"title": responseData.status,
|
||||||
|
"message": responseData.error
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function restore_backup(filename, id) {
|
||||||
|
const token = getCookie("_xsrf")
|
||||||
|
let contents = JSON.stringify({ "filename": filename })
|
||||||
|
var dialog = bootbox.dialog({
|
||||||
|
message: "<i class='fa fa-spin fa-spinner'></i> {{ translate('serverBackups', 'restoring', data['lang']) }}",
|
||||||
|
closeButton: false
|
||||||
|
});
|
||||||
|
let res = await fetch(`/api/v2/servers/${server_id}/backups/backup/${backup_id}/`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'token': token,
|
||||||
|
},
|
||||||
|
body: contents
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
window.location.href = "/panel/dashboard";
|
||||||
|
} else {
|
||||||
|
bootbox.alert({
|
||||||
|
"title": responseData.status,
|
||||||
|
"message": responseData.error
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$("#before-check").on("click", function () {
|
||||||
|
if ($("#before-check:checked").val()) {
|
||||||
|
$("#backup_before").css("display", "inline-block");
|
||||||
|
} else {
|
||||||
|
$("#backup_before").css("display", "none");
|
||||||
|
$("#backup_before").val("");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$("#after-check").on("click", function () {
|
||||||
|
if ($("#after-check:checked").val()) {
|
||||||
|
$("#backup_after").css("display", "inline-block");
|
||||||
|
} else {
|
||||||
|
$("#backup_after").css("display", "none");
|
||||||
|
$("#backup_after").val("");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function replacer(key, value) {
|
||||||
|
if (key === "excluded_dirs") {
|
||||||
|
if (value == 0) {
|
||||||
|
return []
|
||||||
|
} else {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (key != "before" && key != "after") {
|
||||||
|
if (typeof value == "boolean" || key === "executable_update_url") {
|
||||||
|
return value
|
||||||
|
} else {
|
||||||
|
return (isNaN(value) ? value : +value);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$(document).ready(function () {
|
||||||
|
$(".backup-explain").on("click", function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
bootbox.alert($(this).data("explain"));
|
||||||
|
});
|
||||||
|
$(".cancel-button").on("click", function () {
|
||||||
|
location.href = `/panel/server_detail?id=${server_id}&subpage=backup`
|
||||||
|
});
|
||||||
|
webSocket.on('backup_status', function (backup) {
|
||||||
|
text = ``;
|
||||||
|
$(`#${backup.backup_id}_status`).show();
|
||||||
|
if (backup.percent >= 100) {
|
||||||
|
$(`#${backup.backup_id}_status`).hide()
|
||||||
|
setTimeout(function () {
|
||||||
|
window.location.reload(1);
|
||||||
|
}, 5000);
|
||||||
|
} else {
|
||||||
|
text = `<div class="progress-bar progress-bar-striped progress-bar-animated"
|
||||||
|
role="progressbar" style="width:${backup.percent}%;"
|
||||||
|
aria-valuenow="${backup.percent}" aria-valuemin="0" aria-valuemax="100">${backup.percent}%</div>`
|
||||||
|
|
||||||
|
$(`#${backup.backup_id}_status`).html(text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$("#backup-form").on("submit", async function (e) {
|
||||||
|
e.preventDefault();
|
||||||
|
const token = getCookie("_xsrf")
|
||||||
|
let backupForm = document.getElementById("backup-form");
|
||||||
|
|
||||||
|
let formData = new FormData(backupForm);
|
||||||
|
//Remove checks that we don't need in form data.
|
||||||
|
formData.delete("after-check");
|
||||||
|
formData.delete("before-check");
|
||||||
|
//Create an object from the form data entries
|
||||||
|
let formDataObject = Object.fromEntries(formData.entries());
|
||||||
|
//We need to make sure these are sent regardless of whether or not they're checked
|
||||||
|
formDataObject.compress = $("#compress").prop('checked');
|
||||||
|
formDataObject.shutdown = $("#shutdown").prop('checked');
|
||||||
|
if ($("#root_files_button").hasClass("clicked")) {
|
||||||
|
excluded = []
|
||||||
|
$('input.excluded:checkbox:checked').each(function () {
|
||||||
|
excluded.push($(this).val());
|
||||||
|
});
|
||||||
|
formDataObject.excluded_dirs = excluded;
|
||||||
|
}
|
||||||
|
delete formDataObject.root_path
|
||||||
|
console.log(formDataObject);
|
||||||
|
// Format the plain form data as JSON
|
||||||
|
let formDataJsonString = JSON.stringify(formDataObject, replacer);
|
||||||
|
|
||||||
|
console.log(formDataJsonString);
|
||||||
|
let url = `/api/v2/servers/${server_id}/backups/backup/${backup_id}/`
|
||||||
|
let method = "PATCH"
|
||||||
|
if (!backup_id) {
|
||||||
|
url = `/api/v2/servers/${server_id}/backups/`
|
||||||
|
method = "POST";
|
||||||
|
}
|
||||||
|
let res = await fetch(url, {
|
||||||
|
method: method,
|
||||||
|
headers: {
|
||||||
|
'X-XSRFToken': token
|
||||||
|
},
|
||||||
|
body: formDataJsonString,
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
window.location.href = `/panel/server_detail?id=${server_id}&subpage=backup`;
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bootbox.alert({
|
||||||
|
title: responseData.error,
|
||||||
|
message: responseData.error_data
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
if ($('#backup_location').val() == '') {
|
||||||
|
console.log('true')
|
||||||
|
try {
|
||||||
|
document.getElementById('backup_now_button').disabled = true;
|
||||||
|
} catch {
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
document.getElementById('backup_now_button').disabled = false;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
try {
|
||||||
|
document.getElementById('backup_now_button').disabled = false;
|
||||||
|
} catch {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log("ready!");
|
||||||
|
$("#backup_config_box").hide();
|
||||||
|
$("#backup_save_note").hide();
|
||||||
|
|
||||||
|
$("#show_config").click(function () {
|
||||||
|
$("#backup_config_box").toggle();
|
||||||
|
$('#backup_button').hide();
|
||||||
|
$('#backup_save_note').show();
|
||||||
|
$('#backup_data').hide();
|
||||||
|
});
|
||||||
|
|
||||||
|
$('#backup_table').DataTable({
|
||||||
|
"order": [[1, "desc"]],
|
||||||
|
"paging": false,
|
||||||
|
"lengthChange": false,
|
||||||
|
"searching": true,
|
||||||
|
"ordering": true,
|
||||||
|
"info": true,
|
||||||
|
"autoWidth": false,
|
||||||
|
"responsive": true,
|
||||||
|
});
|
||||||
|
|
||||||
|
$(".del_button").click(function () {
|
||||||
|
var file_to_del = $(this).data("file");
|
||||||
|
var backup_location = $(this).data('backup_location');
|
||||||
|
|
||||||
|
console.log("file to delete is" + file_to_del);
|
||||||
|
|
||||||
|
bootbox.confirm({
|
||||||
|
title: "{% raw translate('serverBackups', 'destroyBackup', data['lang']) %}",
|
||||||
|
message: "{{ translate('serverBackups', 'confirmDelete', data['lang']) }}",
|
||||||
|
buttons: {
|
||||||
|
cancel: {
|
||||||
|
label: '<i class="fas fa-times"></i> {{ translate("serverBackups", "cancel", data['lang']) }}'
|
||||||
|
},
|
||||||
|
confirm: {
|
||||||
|
label: '<i class="fas fa-check"></i> {{ translate("serverBackups", "confirm", data['lang']) }}'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
callback: function (result) {
|
||||||
|
console.log(result);
|
||||||
|
if (result == true) {
|
||||||
|
var full_path = backup_location + '/' + file_to_del;
|
||||||
|
del_backup(file_to_del, server_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
$(".restore_button").click(function () {
|
||||||
|
var file_to_restore = $(this).data("file");
|
||||||
|
|
||||||
|
|
||||||
|
bootbox.confirm({
|
||||||
|
title: "{{ translate('serverBackups', 'restore', data['lang']) }} " + file_to_restore,
|
||||||
|
message: "{{ translate('serverBackups', 'confirmRestore', data['lang']) }}",
|
||||||
|
buttons: {
|
||||||
|
cancel: {
|
||||||
|
label: '<i class="fas fa-times"></i> {{ translate("serverBackups", "cancel", data['lang']) }}'
|
||||||
|
},
|
||||||
|
confirm: {
|
||||||
|
label: '<i class="fas fa-check"></i> {{ translate("serverBackups", "restore", data['lang']) }}',
|
||||||
|
className: 'btn-outline-danger'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
callback: function (result) {
|
||||||
|
console.log(result);
|
||||||
|
if (result == true) {
|
||||||
|
restore_backup(file_to_restore, server_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
$("#backup_now_button").click(function () {
|
||||||
|
backup_started();
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById("modal-cancel").addEventListener("click", function () {
|
||||||
|
document.getElementById("root_files_button").classList.remove('clicked');
|
||||||
|
document.getElementById("main-tree-div").innerHTML = '<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled><span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path=""><i class="far fa-folder"></i><i class="far fa-folder-open"></i>{{ translate("serverFiles", "files", data["lang"]) }}</span></input>'
|
||||||
|
})
|
||||||
|
|
||||||
|
document.getElementById("root_files_button").addEventListener("click", function () {
|
||||||
|
if ($("#root_files_button").data('server_path') != "") {
|
||||||
|
if (document.getElementById('root_files_button').classList.contains('clicked')) {
|
||||||
|
show_file_tree();
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
document.getElementById('root_files_button').classList.add('clicked');
|
||||||
|
}
|
||||||
|
path = $("#root_files_button").data('server_path')
|
||||||
|
console.log($("#root_files_button").data('server_path'))
|
||||||
|
const token = getCookie("_xsrf");
|
||||||
|
var dialog = bootbox.dialog({
|
||||||
|
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
|
||||||
|
closeButton: false
|
||||||
|
});
|
||||||
|
|
||||||
|
setTimeout(function () {
|
||||||
|
var x = document.querySelector('.bootbox');
|
||||||
|
if (x) {
|
||||||
|
x.remove()
|
||||||
|
}
|
||||||
|
var x = document.querySelector('.modal-backdrop');
|
||||||
|
if (x) {
|
||||||
|
x.remove()
|
||||||
|
}
|
||||||
|
document.getElementById('main-tree-input').setAttribute('value', path)
|
||||||
|
getTreeView(path);
|
||||||
|
show_file_tree();
|
||||||
|
|
||||||
|
}, 5000);
|
||||||
|
} else {
|
||||||
|
bootbox.alert("You must input a path before selecting this button");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function getDirView(event) {
|
||||||
|
let path = event.target.parentElement.getAttribute("data-path");
|
||||||
|
if (document.getElementById(path).classList.contains('clicked')) {
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
getTreeView(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
async function getTreeView(path) {
|
||||||
|
console.log(path)
|
||||||
|
const token = getCookie("_xsrf");
|
||||||
|
let url = `/api/v2/servers/${server_id}/files/${backup_id}`
|
||||||
|
if (!backup_id) {
|
||||||
|
url = `/api/v2/servers/${server_id}/files/`
|
||||||
|
console.log("NEW URL")
|
||||||
|
}
|
||||||
|
console.log(url);
|
||||||
|
let res = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'X-XSRFToken': token
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ "page": "backups", "path": path }),
|
||||||
|
});
|
||||||
|
let responseData = await res.json();
|
||||||
|
if (responseData.status === "ok") {
|
||||||
|
console.log(responseData);
|
||||||
|
process_tree_response(responseData);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bootbox.alert({
|
||||||
|
title: responseData.status,
|
||||||
|
message: responseData.error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function process_tree_response(response) {
|
||||||
|
let path = response.data.root_path.path;
|
||||||
|
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
|
||||||
|
Object.entries(response.data).forEach(([key, value]) => {
|
||||||
|
if (key === "root_path" || key === "db_stats") {
|
||||||
|
//continue is not valid in for each. Return acts as a continue.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let checked = ""
|
||||||
|
let dpath = value.path;
|
||||||
|
let filename = key;
|
||||||
|
if (value.excluded) {
|
||||||
|
checked = "checked"
|
||||||
|
}
|
||||||
|
if (value.dir) {
|
||||||
|
text += `<li class="tree-item" data-path="${dpath}">
|
||||||
|
\n<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
|
<input type="checkbox" class="checkBoxClass excluded" value="${dpath}" ${checked}>
|
||||||
|
<span id="${dpath}span" class="files-tree-title" data-path="${dpath}" data-name="${filename}" onclick="getDirView(event)">
|
||||||
|
<i style="color: var(--info);" class="far fa-folder"></i>
|
||||||
|
<i style="color: var(--info);" class="far fa-folder-open"></i>
|
||||||
|
<strong>${filename}</strong>
|
||||||
|
</span>
|
||||||
|
</input></div><li>`
|
||||||
|
} else {
|
||||||
|
text += `<li
|
||||||
|
class="d-block tree-ctx-item tree-file"
|
||||||
|
data-path="${dpath}"
|
||||||
|
data-name="${filename}"
|
||||||
|
onclick=""><input type='checkbox' class="checkBoxClass excluded" name='root_path' value="${dpath}" ${checked}><span style="margin-right: 6px;">
|
||||||
|
<i class="far fa-file"></i></span></input>${filename}</li>`
|
||||||
|
}
|
||||||
|
});
|
||||||
|
text += `</ul>`;
|
||||||
|
if (response.data.root_path.top) {
|
||||||
|
try {
|
||||||
|
document.getElementById('main-tree-div').innerHTML += text;
|
||||||
|
document.getElementById('main-tree').parentElement.classList.add("clicked");
|
||||||
|
} catch {
|
||||||
|
document.getElementById('files-tree').innerHTML = text;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
document.getElementById(path + "span").classList.add('tree-caret-down');
|
||||||
|
document.getElementById(path).innerHTML += text;
|
||||||
|
document.getElementById(path).classList.add("clicked");
|
||||||
|
} catch {
|
||||||
|
console.log("Bad")
|
||||||
|
}
|
||||||
|
|
||||||
|
var toggler = document.getElementById(path + "span");
|
||||||
|
|
||||||
|
if (toggler.classList.contains('files-tree-title')) {
|
||||||
|
document.getElementById(path + "span").addEventListener("click", function caretListener() {
|
||||||
|
document.getElementById(path + "ul").classList.toggle("d-block");
|
||||||
|
document.getElementById(path + "span").classList.toggle("tree-caret-down");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getToggleMain(event) {
|
||||||
|
path = event.target.parentElement.getAttribute('data-path');
|
||||||
|
document.getElementById("files-tree").classList.toggle("d-block");
|
||||||
|
document.getElementById(path + "span").classList.toggle("tree-caret-down");
|
||||||
|
document.getElementById(path + "span").classList.toggle("tree-caret");
|
||||||
|
}
|
||||||
|
function show_file_tree() {
|
||||||
|
$("#dir_select").modal();
|
||||||
|
}
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{% end %}
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -67,7 +67,8 @@
|
|||||||
translate('serverFiles', 'download', data['lang']) }}</a>
|
translate('serverFiles', 'download', data['lang']) }}</a>
|
||||||
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteFile" href="#"
|
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteFile" href="#"
|
||||||
style="color: red">{{ translate('serverFiles', 'delete', data['lang']) }}</a>
|
style="color: red">{{ translate('serverFiles', 'delete', data['lang']) }}</a>
|
||||||
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteDir" href="#" style="color: red">{{
|
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteDir" href="#"
|
||||||
|
style="color: red">{{
|
||||||
translate('serverFiles', 'delete', data['lang']) }}</a>
|
translate('serverFiles', 'delete', data['lang']) }}</a>
|
||||||
<a href="javascript:void(0)" class="closebtn" style="color: var(--info);"
|
<a href="javascript:void(0)" class="closebtn" style="color: var(--info);"
|
||||||
onclick="document.getElementById('files-tree-nav').style.display = 'none';">{{
|
onclick="document.getElementById('files-tree-nav').style.display = 'none';">{{
|
||||||
@ -156,7 +157,8 @@
|
|||||||
right: 35px;
|
right: 35px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.tree-file:hover{
|
|
||||||
|
.tree-file:hover {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
@ -721,105 +723,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendFile(file, path, serverId, left, i, onProgress) {
|
|
||||||
let xmlHttpRequest = new XMLHttpRequest();
|
|
||||||
let token = getCookie("_xsrf")
|
|
||||||
let fileName = file.name
|
|
||||||
let target = '/upload?server_id=' + serverId
|
|
||||||
let mimeType = file.type
|
|
||||||
let size = file.size
|
|
||||||
|
|
||||||
xmlHttpRequest.upload.addEventListener('progress', function (e) {
|
|
||||||
|
|
||||||
if (e.loaded <= size) {
|
|
||||||
var percent = Math.round(e.loaded / size * 100);
|
|
||||||
$(`#upload-progress-bar-${i + 1}`).css('width', percent + '%');
|
|
||||||
$(`#upload-progress-bar-${i + 1}`).html(percent + '%');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
xmlHttpRequest.open('POST', target, true);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Path', path);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', 'server_files')
|
|
||||||
xmlHttpRequest.setRequestHeader('X-Files-Left', left);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
|
|
||||||
xmlHttpRequest.setRequestHeader('X-ServerId', serverId);
|
|
||||||
xmlHttpRequest.upload.addEventListener('progress', (event) =>
|
|
||||||
onProgress(Math.floor(event.loaded / event.total * 100)), false);
|
|
||||||
xmlHttpRequest.addEventListener('load', (event) => {
|
|
||||||
if (event.target.responseText == 'success') {
|
|
||||||
console.log('Upload for file', file.name, 'was successful!');
|
|
||||||
let caught = false;
|
|
||||||
try {
|
|
||||||
if (document.getElementById(path).classList.contains("clicked")) {
|
|
||||||
var expanded = true;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
var expanded = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
var par_el = document.getElementById(path + "ul");
|
|
||||||
var items = par_el.children;
|
|
||||||
} catch (err) {
|
|
||||||
console.log(err)
|
|
||||||
caught = true;
|
|
||||||
var par_el = document.getElementById("files-tree");
|
|
||||||
var items = par_el.children;
|
|
||||||
}
|
|
||||||
let name = file.name;
|
|
||||||
console.log(par_el)
|
|
||||||
let full_path = path + '/' + name
|
|
||||||
let flag = false;
|
|
||||||
for (var k = 0; k < items.length; ++k) {
|
|
||||||
if ($(items[k]).attr("data-name") == name) {
|
|
||||||
flag = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!flag) {
|
|
||||||
if (caught && expanded == false) {
|
|
||||||
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="d-block tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
|
|
||||||
} else if (expanded == true) {
|
|
||||||
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
|
|
||||||
}
|
|
||||||
setTreeViewContext();
|
|
||||||
}
|
|
||||||
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped");
|
|
||||||
$(`#upload-progress-bar-${i + 1}`).addClass("bg-success");
|
|
||||||
$(`#upload-progress-bar-${i + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>')
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
let response_text = JSON.parse(event.target.responseText);
|
|
||||||
var x = document.querySelector('.bootbox');
|
|
||||||
if (x) {
|
|
||||||
x.remove()
|
|
||||||
}
|
|
||||||
var x = document.querySelector('.modal-content');
|
|
||||||
if (x) {
|
|
||||||
x.remove()
|
|
||||||
}
|
|
||||||
console.log(JSON.parse(event.target.responseText).info)
|
|
||||||
bootbox.alert({
|
|
||||||
message: JSON.parse(event.target.responseText).info,
|
|
||||||
callback: function () {
|
|
||||||
window.location.reload();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
doUpload = false;
|
|
||||||
}
|
|
||||||
}, false);
|
|
||||||
xmlHttpRequest.addEventListener('error', (e) => {
|
|
||||||
console.error('Error while uploading file', file.name + '.', 'Event:', e)
|
|
||||||
}, false);
|
|
||||||
xmlHttpRequest.send(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
let uploadWaitDialog;
|
let uploadWaitDialog;
|
||||||
let doUpload = true;
|
|
||||||
|
|
||||||
async function uploadFilesE(event) {
|
async function uploadFilesE(event) {
|
||||||
path = event.target.parentElement.getAttribute('data-path');
|
path = event.target.parentElement.getAttribute('data-path');
|
||||||
@ -842,6 +746,9 @@
|
|||||||
label: "{{ translate('serverFiles', 'upload', data['lang']) }}",
|
label: "{{ translate('serverFiles', 'upload', data['lang']) }}",
|
||||||
className: "btn-default",
|
className: "btn-default",
|
||||||
callback: async function () {
|
callback: async function () {
|
||||||
|
if ($("#files").get(0).files.length === 0) {
|
||||||
|
return hideUploadBox();
|
||||||
|
}
|
||||||
var height = files.files.length * 50;
|
var height = files.files.length * 50;
|
||||||
|
|
||||||
var waitMessage = '<p class="text-center mb-0">' +
|
var waitMessage = '<p class="text-center mb-0">' +
|
||||||
@ -858,16 +765,13 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
let nFiles = files.files.length;
|
let nFiles = files.files.length;
|
||||||
for (i = 0; i < nFiles; i++) {
|
const uploadPromises = [];
|
||||||
if (!doUpload) {
|
|
||||||
doUpload = true;
|
|
||||||
hideUploadBox();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
for (let i = 0; i < nFiles; i++) {
|
||||||
|
const file = files.files[i];
|
||||||
const progressHtml = `
|
const progressHtml = `
|
||||||
<div style="width: 100%; min-width: 100%;">
|
<div style="width: 100%; min-width: 100%;">
|
||||||
${files.files[i].name}:
|
${file.name}:
|
||||||
<br><div
|
<br><div
|
||||||
id="upload-progress-bar-${i + 1}"
|
id="upload-progress-bar-${i + 1}"
|
||||||
class="progress-bar progress-bar-striped progress-bar-animated"
|
class="progress-bar progress-bar-striped progress-bar-animated"
|
||||||
@ -879,33 +783,38 @@
|
|||||||
></div>
|
></div>
|
||||||
</div><br>
|
</div><br>
|
||||||
`;
|
`;
|
||||||
|
|
||||||
$('#upload-progress-bar-parent').append(progressHtml);
|
$('#upload-progress-bar-parent').append(progressHtml);
|
||||||
|
|
||||||
await sendFile(files.files[i], path, serverId, nFiles - i - 1, i, (progress) => {
|
const uploadPromise = uploadFile("server_upload", file, path, i, (progress) => {
|
||||||
$(`#upload-progress-bar-${i + 1}`).attr('aria-valuenow', progress)
|
$(`#upload-progress-bar-${i + 1}`).attr('aria-valuenow', progress)
|
||||||
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
|
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
|
||||||
});
|
});
|
||||||
|
uploadPromises.push(uploadPromise);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
await Promise.all(uploadPromises);
|
||||||
|
setTimeout(() => {
|
||||||
hideUploadBox();
|
hideUploadBox();
|
||||||
//$('#upload_file').submit(); //.trigger('submit');
|
}, 2000);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var fileList = document.getElementById("files");
|
|
||||||
fileList.addEventListener("change", function (e) {
|
|
||||||
var list = "";
|
|
||||||
let files = Array.from(this.files)
|
|
||||||
files.forEach(file => {
|
|
||||||
list += "<li class='col-xs-12 file-list'>" + file.name + "</li>"
|
|
||||||
})
|
|
||||||
|
|
||||||
document.getElementById("fileList").innerHTML = list;
|
|
||||||
}, false);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function calculateFileHash(file) {
|
||||||
|
const arrayBuffer = await file.arrayBuffer();
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
|
||||||
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||||
|
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||||||
|
|
||||||
|
return hashHex;
|
||||||
|
}
|
||||||
|
|
||||||
function getDirView(event) {
|
function getDirView(event) {
|
||||||
let path = event.target.parentElement.getAttribute("data-path");
|
let path = event.target.parentElement.getAttribute("data-path");
|
||||||
if (document.getElementById(path).classList.contains('clicked')) {
|
if (document.getElementById(path).classList.contains('clicked')) {
|
||||||
@ -1211,5 +1120,5 @@
|
|||||||
|
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
<script src="../../static/assets/js/shared/upload.js"></script>
|
||||||
{% end %}
|
{% end %}
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -79,6 +79,24 @@
|
|||||||
<option id="command" value="command">{{ translate('serverScheduleConfig', 'custom' , data['lang'])
|
<option id="command" value="command">{{ translate('serverScheduleConfig', 'custom' , data['lang'])
|
||||||
}}</option>
|
}}</option>
|
||||||
</select>
|
</select>
|
||||||
|
<div id="ifBackup" style="display: none;">
|
||||||
|
<br>
|
||||||
|
<label for="action_id">{{ translate('serverSchedules', 'actionId' , data['lang']) }}<small
|
||||||
|
class="text-muted ml-1"></small> </label><br>
|
||||||
|
<select id="action_id" name="action_id"
|
||||||
|
class="form-control form-control-lg select-css" value="{{ data['schedule']['action_id'] }}">
|
||||||
|
{% for backup in data["backups"] %}
|
||||||
|
{% if backup.backup_id == data["schedule"]["action_id"] %}
|
||||||
|
<option id="{{backup.backup_id}}" value="{{backup.backup_id}}">{{backup.backup_name}}</option>
|
||||||
|
{% end %}
|
||||||
|
{% end %}
|
||||||
|
{% for backup in data["backups"] %}
|
||||||
|
{% if backup.backup_id != data["schedule"]["action_id"] %}
|
||||||
|
<option id="{{backup.backup_id}}" value="{{backup.backup_id}}">{{backup.backup_name}}</option>
|
||||||
|
{% end %}
|
||||||
|
{% end %}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="ifBasic">
|
<div id="ifBasic">
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
@ -232,7 +250,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function replacer(key, value) {
|
function replacer(key, value) {
|
||||||
if (key != "start_time" && key != "cron_string" && key != "interval_type") {
|
if (key != "start_time" && key != "cron_string" && key != "interval_type" && key != "action_id") {
|
||||||
if (typeof value == "boolean") {
|
if (typeof value == "boolean") {
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
@ -247,7 +265,7 @@
|
|||||||
}
|
}
|
||||||
} else if (value === "" && key == "start_time"){
|
} else if (value === "" && key == "start_time"){
|
||||||
return "00:00";
|
return "00:00";
|
||||||
}else{
|
}else {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -281,6 +299,11 @@
|
|||||||
// Format the plain form data as JSON
|
// Format the plain form data as JSON
|
||||||
let formDataJsonString = JSON.stringify(formDataObject, replacer);
|
let formDataJsonString = JSON.stringify(formDataObject, replacer);
|
||||||
|
|
||||||
|
let data = JSON.parse(formDataJsonString)
|
||||||
|
if (data["action"] === "backup" && !data["action_id"]){
|
||||||
|
return bootbox.alert("Validation Failed")
|
||||||
|
}
|
||||||
|
|
||||||
let res = await fetch(`/api/v2/servers/${serverId}/tasks/`, {
|
let res = await fetch(`/api/v2/servers/${serverId}/tasks/`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@ -358,6 +381,14 @@
|
|||||||
document.getElementById("ifYes").style.display = "none";
|
document.getElementById("ifYes").style.display = "none";
|
||||||
document.getElementById("command_input").required = false;
|
document.getElementById("command_input").required = false;
|
||||||
}
|
}
|
||||||
|
if (document.getElementById('action').value == "backup"){
|
||||||
|
document.getElementById("ifBackup").style.display = "block";
|
||||||
|
document.getElementById("action_id").required = true;
|
||||||
|
} else {
|
||||||
|
document.getElementById("ifBackup").style.display = "none";
|
||||||
|
document.getElementById("action_id").required = false;
|
||||||
|
$("#action_id").val(null);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
function basicAdvanced() {
|
function basicAdvanced() {
|
||||||
if (document.getElementById('difficulty').value == "advanced") {
|
if (document.getElementById('difficulty').value == "advanced") {
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
|
||||||
data['server_stats']['server_id']['server_name'] }}
|
data['server_stats']['server_id']['server_name'] }}
|
||||||
<br />
|
<br />
|
||||||
<small>UUID: {{ data['server_stats']['server_id']['server_uuid'] }}</small>
|
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
|
||||||
</h4>
|
</h4>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user