mirror of
https://gitlab.com/crafty-controller/crafty-4.git
synced 2024-08-30 18:23:09 +00:00
Merge branch 'dev' into 'master'
Crafty Controller 4.0 Beta 1 🎉 See merge request crafty-controller/crafty-4!237
This commit is contained in:
commit
a0ed862a51
@ -6,6 +6,7 @@ docker-compose.yml
|
||||
|
||||
# git & gitlab related
|
||||
.git/
|
||||
.gitlab/
|
||||
.gitignore
|
||||
.gitlab-ci.yml
|
||||
|
||||
|
@ -2,15 +2,25 @@
|
||||
|
||||
root = true
|
||||
|
||||
[*.{js,py,html}]
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
# end_of_line = lf
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
profile = black
|
||||
# > Handled by Black
|
||||
# indent_style = space
|
||||
# indent_size = 4
|
||||
|
||||
[*.{js,html}]
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.html]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.js]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -10,6 +10,7 @@ __pycache__/
|
||||
|
||||
.env
|
||||
.venv
|
||||
default.env
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
@ -17,8 +18,10 @@ env.bak/
|
||||
venv.bak/
|
||||
|
||||
.idea/
|
||||
servers/
|
||||
backups/
|
||||
/servers/
|
||||
/backups/
|
||||
/docker/servers/
|
||||
/docker/backups/
|
||||
session.lock
|
||||
.header
|
||||
default.json
|
||||
|
23
.pylintrc
23
.pylintrc
@ -78,7 +78,9 @@ confidence=
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=abstract-method,
|
||||
disable=C0330,
|
||||
C0326,
|
||||
abstract-method,
|
||||
attribute-defined-outside-init,
|
||||
bad-inline-option,
|
||||
bare-except,
|
||||
@ -92,7 +94,6 @@ disable=abstract-method,
|
||||
fixme,
|
||||
import-error,
|
||||
inconsistent-return-statements,
|
||||
invalid-name,
|
||||
locally-disabled,
|
||||
logging-format-interpolation,
|
||||
logging-fstring-interpolation,
|
||||
@ -234,10 +235,20 @@ function-naming-style=snake_case
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
good-names=e,
|
||||
ex,
|
||||
f,
|
||||
i,
|
||||
id,
|
||||
ip,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
p,
|
||||
r,
|
||||
rs,
|
||||
s,
|
||||
tz,
|
||||
v,
|
||||
Run,
|
||||
_
|
||||
|
||||
@ -306,7 +317,7 @@ indent-after-paren=4
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=150
|
||||
max-line-length=88
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=2000
|
||||
@ -432,7 +443,7 @@ ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
ignored-modules=jsonschema,orjson
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
|
29
Dockerfile
29
Dockerfile
@ -2,15 +2,13 @@ FROM ubuntu:20.04
|
||||
|
||||
ENV DEBIAN_FRONTEND="noninteractive"
|
||||
|
||||
LABEL maintainer="Dockerfile created by Zedifus <https://gitlab.com/zedifus>"
|
||||
|
||||
# Security Patch for CVE-2021-44228
|
||||
ENV LOG4J_FORMAT_MSG_NO_LOOKUPS=true
|
||||
|
||||
# Create non-root user & required dirs
|
||||
RUN useradd -g root -M crafty \
|
||||
&& mkdir /commander \
|
||||
&& chown -R crafty:root /commander
|
||||
&& mkdir /crafty \
|
||||
&& chown -R crafty:root /crafty
|
||||
|
||||
# Install required system packages
|
||||
RUN apt-get update \
|
||||
@ -32,7 +30,7 @@ RUN apt-get update \
|
||||
|
||||
# Switch to service user for installing crafty deps
|
||||
USER crafty
|
||||
WORKDIR /commander
|
||||
WORKDIR /crafty
|
||||
COPY --chown=crafty:root requirements.txt ./
|
||||
RUN python3 -m venv ./.venv \
|
||||
&& . .venv/bin/activate \
|
||||
@ -53,6 +51,23 @@ EXPOSE 8443
|
||||
EXPOSE 19132
|
||||
EXPOSE 25500-25600
|
||||
|
||||
# Start Crafty Commander through wrapper
|
||||
ENTRYPOINT ["/commander/docker_launcher.sh"]
|
||||
# Start Crafty through wrapper
|
||||
ENTRYPOINT ["/crafty/docker_launcher.sh"]
|
||||
CMD ["-v", "-d", "-i"]
|
||||
|
||||
# Add meta labels
|
||||
ARG BUILD_DATE
|
||||
ARG BUILD_REF
|
||||
ARG CRAFTY_VER
|
||||
LABEL \
|
||||
maintainer="Zedifus <https://gitlab.com/zedifus>" \
|
||||
org.opencontainers.image.created=${BUILD_DATE} \
|
||||
org.opencontainers.image.revision=${BUILD_REF} \
|
||||
org.opencontainers.image.version=${CRAFTY_VER} \
|
||||
org.opencontainers.image.title="Crafty Controller" \
|
||||
org.opencontainers.image.description="A Game Server Control Panel / Launcher" \
|
||||
org.opencontainers.image.url="https://craftycontrol.com/" \
|
||||
org.opencontainers.image.documentation="https://wiki.craftycontrol.com/" \
|
||||
org.opencontainers.image.source="https://gitlab.com/crafty-controller/crafty-4" \
|
||||
org.opencontainers.image.vendor="Arcadia Technology, LLC." \
|
||||
org.opencontainers.image.licenses="GPL-3.0"
|
||||
|
88
README.md
88
README.md
@ -1,4 +1,12 @@
|
||||
# Crafty Controller 4.0.0-alpha.3.5
|
||||
[](https://craftycontrol.com)
|
||||
|
||||
[](https://github.com/psf/black)
|
||||
[](https://www.python.org)
|
||||
[](https://gitlab.com/crafty-controller/crafty-4)
|
||||
[](https://gitlab.com/crafty-controller/crafty-4)
|
||||
[](https://gitlab.com/crafty-controller/crafty-4/-/commits/master)
|
||||
|
||||
# Crafty Controller 4.0.0-beta
|
||||
> Python based Control Panel for your Minecraft Server
|
||||
|
||||
## What is Crafty Controller?
|
||||
@ -15,7 +23,7 @@ Project Homepage - https://craftycontrol.com
|
||||
|
||||
Discord Server - https://discord.gg/9VJPhCE
|
||||
|
||||
Git Repository - https://gitlab.com/crafty-controller/crafty-web
|
||||
Git Repository - https://gitlab.com/crafty-controller/crafty-4
|
||||
|
||||
<br>
|
||||
|
||||
@ -31,7 +39,7 @@ With `Crafty Controller 4.0` we have focused on building our DevOps Principles,
|
||||
> __**⚠ 🔻WARNING: [WSL/WSL2 | WINDOWS 11 | DOCKER DESKTOP]🔻**__ <br>
|
||||
BE ADVISED! Upstream is currently broken for Minecraft running on **Docker under WSL/WSL2, Windows 11 / DOCKER DESKTOP!** <br>
|
||||
On '**Stop**' or '**Restart**' of the MC Server, there is a 90% chance the World's Chunks will be shredded irreparably! <br>
|
||||
Please only run Docker on Linux, If you are using Windows we have a portable installs found here: [Latest-Stable](https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/master/download?job=win-prod-build), [Latest-Development](https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/dev/download?job=win-dev-build)
|
||||
Please only run Docker on Linux, If you are using Windows we have a portable installs found here: [Latest-Stable](https://gitlab.com/crafty-controller/crafty-4/-/jobs/artifacts/master/download?job=win-prod-build), [Latest-Development](https://gitlab.com/crafty-controller/crafty-4/-/jobs/artifacts/dev/download?job=win-dev-build)
|
||||
|
||||
----
|
||||
|
||||
@ -47,35 +55,16 @@ As the Dockerfile uses the permission structure of `crafty:root` **internally**
|
||||
### - Using the registry image 🌎
|
||||
The provided image supports both `arm64` and `amd64` out the box, if you have issues though you can build it yourself with the `compose` file in `docker/`.
|
||||
|
||||
The image is located at: `registry.gitlab.com/crafty-controller/crafty-commander:latest`
|
||||
The image is located at: `registry.gitlab.com/crafty-controller/crafty-4:latest`
|
||||
| Branch | Status |
|
||||
| ----------------- | ------------------------------------------------------------------ |
|
||||
| :latest | [](https://gitlab.com/crafty-controller/crafty-commander/-/commits/master) |
|
||||
| :dev | [](https://gitlab.com/crafty-controller/crafty-commander/-/commits/dev) |
|
||||
| :latest | [](https://gitlab.com/crafty-controller/crafty-4/-/commits/master) |
|
||||
| :dev | [](https://gitlab.com/crafty-controller/crafty-4/-/commits/dev)
|
||||
|
||||
While the repository is still **private / pre-release**,
|
||||
Before you can pull the image you must authenticate docker with the Container Registry.
|
||||
<br>
|
||||
|
||||
To authenticate you will need a [personal access token](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html)
|
||||
with the minimum scope:
|
||||
**Here are some example methods for getting started🚀:**
|
||||
|
||||
- For read (*pull*) access, `read_registry`.
|
||||
- For write (*push*) access, `write_registry`.
|
||||
|
||||
When you have this just run:
|
||||
```bash
|
||||
$ docker login registry.gitlab.com -u <username> -p <token>
|
||||
```
|
||||
or
|
||||
```bash
|
||||
$ echo <token> | docker login registry.gitlab.com -u <username> --password-stdin
|
||||
```
|
||||
or
|
||||
```bash
|
||||
$ cat ~/my_password.txt | docker login registry.gitlab.com -u <username> --password-stdin
|
||||
```
|
||||
|
||||
Then use one of the following methods:
|
||||
### **docker-compose.yml:**
|
||||
```sh
|
||||
# Make your compose file
|
||||
@ -86,8 +75,9 @@ version: '3'
|
||||
|
||||
services:
|
||||
crafty:
|
||||
container_name: crafty_commander
|
||||
image: registry.gitlab.com/crafty-controller/crafty-commander:latest
|
||||
container_name: crafty_container
|
||||
image: registry.gitlab.com/crafty-controller/crafty-4:latest
|
||||
restart: always
|
||||
environment:
|
||||
- TZ=Etc/UTC
|
||||
ports:
|
||||
@ -97,11 +87,11 @@ services:
|
||||
- "19132:19132/udp" # BEDROCK
|
||||
- "25500-25600:25500-25600" # MC SERV PORT RANGE
|
||||
volumes:
|
||||
- ./docker/backups:/commander/backups
|
||||
- ./docker/logs:/commander/logs
|
||||
- ./docker/servers:/commander/servers
|
||||
- ./docker/config:/commander/app/config
|
||||
- ./docker/import:/commander/import
|
||||
- ./docker/backups:/crafty/backups
|
||||
- ./docker/logs:/crafty/logs
|
||||
- ./docker/servers:/crafty/servers
|
||||
- ./docker/config:/crafty/app/config
|
||||
- ./docker/import:/crafty/import
|
||||
```
|
||||
```sh
|
||||
$ docker-compose up -d && docker-compose logs -f
|
||||
@ -111,19 +101,21 @@ $ docker-compose up -d && docker-compose logs -f
|
||||
### **docker run:**
|
||||
```sh
|
||||
$ docker run \
|
||||
--name crafty_commander \
|
||||
--name crafty_container \
|
||||
--detach \
|
||||
--restart always \
|
||||
-p 8000:8000 \
|
||||
-p 8443:8443 \
|
||||
-p 8123:8123 \
|
||||
-p 19132:19132/udp \
|
||||
-p 25500-25600:25500-25600 \
|
||||
-e TZ=Etc/UTC \
|
||||
-v "/$(pwd)/docker/backups:/commander/backups" \
|
||||
-v "/$(pwd)/docker/logs:/commander/logs" \
|
||||
-v "/$(pwd)/docker/servers:/commander/servers" \
|
||||
-v "/$(pwd)/docker/config:/commander/app/config" \
|
||||
-v "/$(pwd)/docker/import:/commander/import" \
|
||||
registry.gitlab.com/crafty-controller/crafty-commander:latest
|
||||
-v "/$(pwd)/docker/backups:/crafty/backups" \
|
||||
-v "/$(pwd)/docker/logs:/crafty/logs" \
|
||||
-v "/$(pwd)/docker/servers:/crafty/servers" \
|
||||
-v "/$(pwd)/docker/config:/crafty/app/config" \
|
||||
-v "/$(pwd)/docker/import:/crafty/import" \
|
||||
registry.gitlab.com/crafty-controller/crafty-4:latest
|
||||
```
|
||||
|
||||
### **Building from the cloned repository:**
|
||||
@ -136,18 +128,20 @@ If you'd rather not use `docker-compose` you can use the following `docker run`
|
||||
$ docker build . -t crafty
|
||||
|
||||
$ docker run \
|
||||
--name crafty_commander \
|
||||
--name crafty_container \
|
||||
--detach \
|
||||
--restart always \
|
||||
-p 8000:8000 \
|
||||
-p 8443:8443 \
|
||||
-p 8123:8123 \
|
||||
-p 19132:19132/udp \
|
||||
-p 25500-25600:25500-25600 \
|
||||
-e TZ=Etc/UTC \
|
||||
-v "/$(pwd)/docker/backups:/commander/backups" \
|
||||
-v "/$(pwd)/docker/logs:/commander/logs" \
|
||||
-v "/$(pwd)/docker/servers:/commander/servers" \
|
||||
-v "/$(pwd)/docker/config:/commander/app/config" \
|
||||
-v "/$(pwd)/docker/import:/commander/import" \
|
||||
-v "/$(pwd)/docker/backups:/crafty/backups" \
|
||||
-v "/$(pwd)/docker/logs:/crafty/logs" \
|
||||
-v "/$(pwd)/docker/servers:/crafty/servers" \
|
||||
-v "/$(pwd)/docker/config:/crafty/app/config" \
|
||||
-v "/$(pwd)/docker/import:/crafty/import" \
|
||||
crafty
|
||||
```
|
||||
A fresh build will take several minutes depending on your system, but will be rapid thereafter.
|
||||
|
@ -1,60 +1,85 @@
|
||||
import logging
|
||||
|
||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
||||
from app.classes.models.crafty_permissions import (
|
||||
PermissionsCrafty,
|
||||
EnumPermissionsCrafty,
|
||||
)
|
||||
from app.classes.models.users import ApiKeys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Crafty_Perms_Controller:
|
||||
|
||||
class CraftyPermsController:
|
||||
@staticmethod
|
||||
def list_defined_crafty_permissions():
|
||||
permissions_list = crafty_permissions.get_permissions_list()
|
||||
permissions_list = PermissionsCrafty.get_permissions_list()
|
||||
return permissions_list
|
||||
|
||||
@staticmethod
|
||||
def get_mask_crafty_permissions(user_id):
|
||||
permissions_mask = crafty_permissions.get_crafty_permissions_mask(user_id)
|
||||
permissions_mask = PermissionsCrafty.get_crafty_permissions_mask(user_id)
|
||||
return permissions_mask
|
||||
|
||||
@staticmethod
|
||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Crafty, value):
|
||||
return crafty_permissions.set_permission(permission_mask, permission_tested, value)
|
||||
def set_permission(
|
||||
permission_mask, permission_tested: EnumPermissionsCrafty, value
|
||||
):
|
||||
return PermissionsCrafty.set_permission(
|
||||
permission_mask, permission_tested, value
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def can_create_server(user_id):
|
||||
return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Server_Creation)
|
||||
return PermissionsCrafty.can_add_in_crafty(
|
||||
user_id, EnumPermissionsCrafty.SERVER_CREATION
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def can_add_user(): # Add back argument 'user_id' when you work on this
|
||||
#TODO: Complete if we need a User Addition limit
|
||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.User_Config)
|
||||
return True
|
||||
def can_add_user(user_id):
|
||||
return PermissionsCrafty.can_add_in_crafty(
|
||||
user_id, EnumPermissionsCrafty.USER_CONFIG
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def can_add_role(): # Add back argument 'user_id' when you work on this
|
||||
#TODO: Complete if we need a Role Addition limit
|
||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Roles_Config)
|
||||
return True
|
||||
def can_add_role(user_id):
|
||||
return PermissionsCrafty.can_add_in_crafty(
|
||||
user_id, EnumPermissionsCrafty.ROLES_CONFIG
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def list_all_crafty_permissions_quantity_limits():
|
||||
return crafty_permissions.get_all_permission_quantity_list()
|
||||
return PermissionsCrafty.get_all_permission_quantity_list()
|
||||
|
||||
@staticmethod
|
||||
def list_crafty_permissions_quantity_limits(user_id):
|
||||
return crafty_permissions.get_permission_quantity_list(user_id)
|
||||
return PermissionsCrafty.get_permission_quantity_list(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_crafty_permissions_list(user_id):
|
||||
permissions_mask = crafty_permissions.get_crafty_permissions_mask(user_id)
|
||||
permissions_list = crafty_permissions.get_permissions(permissions_mask)
|
||||
permissions_mask = PermissionsCrafty.get_crafty_permissions_mask(user_id)
|
||||
permissions_list = PermissionsCrafty.get_permissions(permissions_mask)
|
||||
return permissions_list
|
||||
|
||||
@staticmethod
|
||||
def add_server_creation(user_id):
|
||||
return crafty_permissions.add_server_creation(user_id)
|
||||
"""Increase the "Server Creation" counter for this user
|
||||
|
||||
Args:
|
||||
user_id (int): The modifiable user's ID
|
||||
|
||||
Returns:
|
||||
int: The new count of servers created by this user
|
||||
"""
|
||||
return PermissionsCrafty.add_server_creation(user_id)
|
||||
|
||||
@staticmethod
|
||||
def add_user_creation(user_id):
|
||||
return PermissionsCrafty.add_user_creation(user_id)
|
||||
|
||||
@staticmethod
|
||||
def add_role_creation(user_id):
|
||||
return PermissionsCrafty.add_role_creation(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_permissions_list(key: ApiKeys):
|
||||
return crafty_permissions.get_api_key_permissions_list(key)
|
||||
return PermissionsCrafty.get_api_key_permissions_list(key)
|
||||
|
@ -1,120 +1,144 @@
|
||||
import logging
|
||||
|
||||
from app.classes.models.management import management_helper
|
||||
from app.classes.models.servers import servers_helper
|
||||
from app.classes.models.management import HelpersManagement
|
||||
from app.classes.models.servers import HelperServers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Management_Controller:
|
||||
|
||||
#************************************************************************************************
|
||||
class ManagementController:
|
||||
def __init__(self, management_helper):
|
||||
self.management_helper = management_helper
|
||||
|
||||
# **********************************************************************************
|
||||
# Host_Stats Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_latest_hosts_stats():
|
||||
return management_helper.get_latest_hosts_stats()
|
||||
return HelpersManagement.get_latest_hosts_stats()
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Commands Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_unactioned_commands():
|
||||
return management_helper.get_unactioned_commands()
|
||||
return HelpersManagement.get_unactioned_commands()
|
||||
|
||||
@staticmethod
|
||||
def send_command(user_id, server_id, remote_ip, command):
|
||||
server_name = servers_helper.get_server_friendly_name(server_id)
|
||||
def send_command(self, user_id, server_id, remote_ip, command):
|
||||
server_name = HelperServers.get_server_friendly_name(server_id)
|
||||
|
||||
# Example: Admin issued command start_server for server Survival
|
||||
management_helper.add_to_audit_log(user_id, f"issued command {command} for server {server_name}", server_id, remote_ip)
|
||||
management_helper.add_command(server_id, user_id, remote_ip, command)
|
||||
self.management_helper.add_to_audit_log(
|
||||
user_id,
|
||||
f"issued command {command} for server {server_name}",
|
||||
server_id,
|
||||
remote_ip,
|
||||
)
|
||||
HelpersManagement.add_command(server_id, user_id, remote_ip, command)
|
||||
|
||||
@staticmethod
|
||||
def mark_command_complete(command_id=None):
|
||||
return management_helper.mark_command_complete(command_id)
|
||||
return HelpersManagement.mark_command_complete(command_id)
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Audit_Log Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_actity_log():
|
||||
return management_helper.get_actity_log()
|
||||
return HelpersManagement.get_actity_log()
|
||||
|
||||
@staticmethod
|
||||
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
||||
return management_helper.add_to_audit_log(user_id, log_msg, server_id, source_ip)
|
||||
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
||||
return self.management_helper.add_to_audit_log(
|
||||
user_id, log_msg, server_id, source_ip
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
||||
return management_helper.add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip)
|
||||
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
|
||||
return self.management_helper.add_to_audit_log_raw(
|
||||
user_name, user_id, server_id, log_msg, source_ip
|
||||
)
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Schedules Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment=None, enabled=True):
|
||||
return management_helper.create_scheduled_task(
|
||||
server_id,
|
||||
action,
|
||||
interval,
|
||||
interval_type,
|
||||
start_time,
|
||||
command,
|
||||
comment,
|
||||
enabled
|
||||
)
|
||||
def create_scheduled_task(
|
||||
server_id,
|
||||
action,
|
||||
interval,
|
||||
interval_type,
|
||||
start_time,
|
||||
command,
|
||||
comment=None,
|
||||
enabled=True,
|
||||
):
|
||||
return HelpersManagement.create_scheduled_task(
|
||||
server_id,
|
||||
action,
|
||||
interval,
|
||||
interval_type,
|
||||
start_time,
|
||||
command,
|
||||
comment,
|
||||
enabled,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_scheduled_task(schedule_id):
|
||||
return management_helper.delete_scheduled_task(schedule_id)
|
||||
return HelpersManagement.delete_scheduled_task(schedule_id)
|
||||
|
||||
@staticmethod
|
||||
def update_scheduled_task(schedule_id, updates):
|
||||
return management_helper.update_scheduled_task(schedule_id, updates)
|
||||
return HelpersManagement.update_scheduled_task(schedule_id, updates)
|
||||
|
||||
@staticmethod
|
||||
def get_scheduled_task(schedule_id):
|
||||
return management_helper.get_scheduled_task(schedule_id)
|
||||
return HelpersManagement.get_scheduled_task(schedule_id)
|
||||
|
||||
@staticmethod
|
||||
def get_scheduled_task_model(schedule_id):
|
||||
return management_helper.get_scheduled_task_model(schedule_id)
|
||||
return HelpersManagement.get_scheduled_task_model(schedule_id)
|
||||
|
||||
@staticmethod
|
||||
def get_child_schedules(sch_id):
|
||||
return management_helper.get_child_schedules(sch_id)
|
||||
return HelpersManagement.get_child_schedules(sch_id)
|
||||
|
||||
@staticmethod
|
||||
def get_schedules_by_server(server_id):
|
||||
return management_helper.get_schedules_by_server(server_id)
|
||||
return HelpersManagement.get_schedules_by_server(server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_schedules_all():
|
||||
return management_helper.get_schedules_all()
|
||||
return HelpersManagement.get_schedules_all()
|
||||
|
||||
@staticmethod
|
||||
def get_schedules_enabled():
|
||||
return management_helper.get_schedules_enabled()
|
||||
return HelpersManagement.get_schedules_enabled()
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Backups Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_backup_config(server_id):
|
||||
return management_helper.get_backup_config(server_id)
|
||||
return HelpersManagement.get_backup_config(server_id)
|
||||
|
||||
@staticmethod
|
||||
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None, excluded_dirs: list = None, compress: bool = False,):
|
||||
return management_helper.set_backup_config(server_id, backup_path, max_backups, excluded_dirs, compress)
|
||||
def set_backup_config(
|
||||
self,
|
||||
server_id: int,
|
||||
backup_path: str = None,
|
||||
max_backups: int = None,
|
||||
excluded_dirs: list = None,
|
||||
compress: bool = False,
|
||||
):
|
||||
return self.management_helper.set_backup_config(
|
||||
server_id, backup_path, max_backups, excluded_dirs, compress
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_excluded_backup_dirs(server_id: int):
|
||||
return management_helper.get_excluded_backup_dirs(server_id)
|
||||
return HelpersManagement.get_excluded_backup_dirs(server_id)
|
||||
|
||||
@staticmethod
|
||||
def add_excluded_backup_dir(server_id: int, dir_to_add: str):
|
||||
management_helper.add_excluded_backup_dir(server_id, dir_to_add)
|
||||
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
|
||||
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)
|
||||
|
||||
@staticmethod
|
||||
def del_excluded_backup_dir(server_id: int, dir_to_del: str):
|
||||
management_helper.del_excluded_backup_dir(server_id, dir_to_del)
|
||||
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
|
||||
self.management_helper.del_excluded_backup_dir(server_id, dir_to_del)
|
||||
|
@ -1,32 +1,39 @@
|
||||
import logging
|
||||
import typing as t
|
||||
|
||||
from app.classes.models.roles import roles_helper
|
||||
from app.classes.models.server_permissions import server_permissions
|
||||
from app.classes.models.users import users_helper
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.models.roles import HelperRoles
|
||||
from app.classes.models.server_permissions import PermissionsServers, RoleServers
|
||||
from app.classes.shared.helpers import Helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Roles_Controller:
|
||||
|
||||
class RolesController:
|
||||
def __init__(self, users_helper, roles_helper):
|
||||
self.users_helper = users_helper
|
||||
self.roles_helper = roles_helper
|
||||
|
||||
@staticmethod
|
||||
def get_all_roles():
|
||||
return roles_helper.get_all_roles()
|
||||
return HelperRoles.get_all_roles()
|
||||
|
||||
@staticmethod
|
||||
def get_all_role_ids():
|
||||
return HelperRoles.get_all_role_ids()
|
||||
|
||||
@staticmethod
|
||||
def get_roleid_by_name(role_name):
|
||||
return roles_helper.get_roleid_by_name(role_name)
|
||||
return HelperRoles.get_roleid_by_name(role_name)
|
||||
|
||||
@staticmethod
|
||||
def get_role(role_id):
|
||||
return roles_helper.get_role(role_id)
|
||||
|
||||
return HelperRoles.get_role(role_id)
|
||||
|
||||
@staticmethod
|
||||
def update_role(role_id: str, role_data = None, permissions_mask: str = "00000000"):
|
||||
def update_role(role_id: str, role_data=None, permissions_mask: str = "00000000"):
|
||||
if role_data is None:
|
||||
role_data = {}
|
||||
base_data = Roles_Controller.get_role_with_servers(role_id)
|
||||
base_data = RolesController.get_role_with_servers(role_id)
|
||||
up_data = {}
|
||||
added_servers = set()
|
||||
removed_servers = set()
|
||||
@ -34,49 +41,140 @@ class Roles_Controller:
|
||||
if key == "role_id":
|
||||
continue
|
||||
elif key == "servers":
|
||||
added_servers = role_data['servers'].difference(base_data['servers'])
|
||||
removed_servers = base_data['servers'].difference(role_data['servers'])
|
||||
added_servers = set(role_data["servers"]).difference(
|
||||
set(base_data["servers"])
|
||||
)
|
||||
removed_servers = set(base_data["servers"]).difference(
|
||||
set(role_data["servers"])
|
||||
)
|
||||
elif base_data[key] != role_data[key]:
|
||||
up_data[key] = role_data[key]
|
||||
up_data['last_update'] = helper.get_time_as_string()
|
||||
logger.debug(f"role: {role_data} +server:{added_servers} -server{removed_servers}")
|
||||
up_data["last_update"] = Helpers.get_time_as_string()
|
||||
logger.debug(
|
||||
f"role: {role_data} +server:{added_servers} -server{removed_servers}"
|
||||
)
|
||||
for server in added_servers:
|
||||
server_permissions.get_or_create(role_id, server, permissions_mask)
|
||||
for server in base_data['servers']:
|
||||
server_permissions.update_role_permission(role_id, server, permissions_mask)
|
||||
# TODO: This is horribly inefficient and we should be using bulk queries but im going for functionality at this point
|
||||
server_permissions.delete_roles_permissions(role_id, removed_servers)
|
||||
PermissionsServers.get_or_create(role_id, server, permissions_mask)
|
||||
for server in base_data["servers"]:
|
||||
PermissionsServers.update_role_permission(role_id, server, permissions_mask)
|
||||
# TODO: This is horribly inefficient and we should be using bulk queries
|
||||
# but im going for functionality at this point
|
||||
PermissionsServers.delete_roles_permissions(role_id, removed_servers)
|
||||
if up_data:
|
||||
roles_helper.update_role(role_id, up_data)
|
||||
HelperRoles.update_role(role_id, up_data)
|
||||
|
||||
@staticmethod
|
||||
def add_role(role_name):
|
||||
return roles_helper.add_role(role_name)
|
||||
return HelperRoles.add_role(role_name)
|
||||
|
||||
class RoleServerJsonType(t.TypedDict):
|
||||
server_id: t.Union[str, int]
|
||||
permissions: str
|
||||
|
||||
@staticmethod
|
||||
def remove_role(role_id):
|
||||
role_data = Roles_Controller.get_role_with_servers(role_id)
|
||||
server_permissions.delete_roles_permissions(role_id, role_data['servers'])
|
||||
users_helper.remove_roles_from_role_id(role_id)
|
||||
return roles_helper.remove_role(role_id)
|
||||
def get_server_ids_and_perms_from_role(
|
||||
role_id: t.Union[str, int]
|
||||
) -> t.List[RoleServerJsonType]:
|
||||
# FIXME: somehow retrieve only the server ids, not the whole servers
|
||||
return [
|
||||
{
|
||||
"server_id": role_servers.server_id.server_id,
|
||||
"permissions": role_servers.permissions,
|
||||
}
|
||||
for role_servers in (
|
||||
RoleServers.select(
|
||||
RoleServers.server_id, RoleServers.permissions
|
||||
).where(RoleServers.role_id == role_id)
|
||||
)
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def add_role_advanced(
|
||||
name: str,
|
||||
servers: t.Iterable[RoleServerJsonType],
|
||||
) -> int:
|
||||
"""Add a role with a name and a list of servers
|
||||
|
||||
Args:
|
||||
name (str): The new role's name
|
||||
servers (t.List[RoleServerJsonType]): The new role's servers
|
||||
|
||||
Returns:
|
||||
int: The new role's ID
|
||||
"""
|
||||
role_id: t.Final[int] = HelperRoles.add_role(name)
|
||||
for server in servers:
|
||||
PermissionsServers.get_or_create(
|
||||
role_id, server["server_id"], server["permissions"]
|
||||
)
|
||||
return role_id
|
||||
|
||||
@staticmethod
|
||||
def update_role_advanced(
|
||||
role_id: t.Union[str, int],
|
||||
role_name: t.Optional[str],
|
||||
servers: t.Optional[t.Iterable[RoleServerJsonType]],
|
||||
) -> None:
|
||||
"""Update a role with a name and a list of servers
|
||||
|
||||
Args:
|
||||
role_id (t.Union[str, int]): The ID of the role to be modified
|
||||
role_name (t.Optional[str]): An optional new name for the role
|
||||
servers (t.Optional[t.Iterable[RoleServerJsonType]]): An optional list of servers for the role
|
||||
""" # pylint: disable=line-too-long
|
||||
logger.debug(f"updating role {role_id} with advanced options")
|
||||
|
||||
if servers is not None:
|
||||
base_data = RolesController.get_role_with_servers(role_id)
|
||||
|
||||
server_ids = {server["server_id"] for server in servers}
|
||||
server_permissions_map = {
|
||||
server["server_id"]: server["permissions"] for server in servers
|
||||
}
|
||||
|
||||
added_servers = server_ids.difference(set(base_data["servers"]))
|
||||
removed_servers = set(base_data["servers"]).difference(server_ids)
|
||||
same_servers = server_ids.intersection(set(base_data["servers"]))
|
||||
logger.debug(
|
||||
f"role: {role_id} +server:{added_servers} -server{removed_servers}"
|
||||
)
|
||||
for server_id in added_servers:
|
||||
PermissionsServers.get_or_create(
|
||||
role_id, server_id, server_permissions_map[server_id]
|
||||
)
|
||||
if len(removed_servers) != 0:
|
||||
PermissionsServers.delete_roles_permissions(role_id, removed_servers)
|
||||
for server_id in same_servers:
|
||||
PermissionsServers.update_role_permission(
|
||||
role_id, server_id, server_permissions_map[server_id]
|
||||
)
|
||||
if role_name is not None:
|
||||
up_data = {
|
||||
"role_name": role_name,
|
||||
"last_update": Helpers.get_time_as_string(),
|
||||
}
|
||||
# TODO: do the last_update on the db side
|
||||
HelperRoles.update_role(role_id, up_data)
|
||||
|
||||
def remove_role(self, role_id):
|
||||
role_data = RolesController.get_role_with_servers(role_id)
|
||||
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
|
||||
self.users_helper.remove_roles_from_role_id(role_id)
|
||||
return self.roles_helper.remove_role(role_id)
|
||||
|
||||
@staticmethod
|
||||
def role_id_exists(role_id):
|
||||
return roles_helper.role_id_exists(role_id)
|
||||
return HelperRoles.role_id_exists(role_id)
|
||||
|
||||
@staticmethod
|
||||
def get_role_with_servers(role_id):
|
||||
role = roles_helper.get_role(role_id)
|
||||
role = HelperRoles.get_role(role_id)
|
||||
|
||||
if role:
|
||||
servers_query = server_permissions.get_servers_from_role(role_id)
|
||||
# TODO: this query needs to be narrower
|
||||
servers = set()
|
||||
for s in servers_query:
|
||||
servers.add(s.server_id.server_id)
|
||||
role['servers'] = servers
|
||||
#logger.debug("role: ({}) {}".format(role_id, role))
|
||||
server_ids = PermissionsServers.get_server_ids_from_role(role_id)
|
||||
role["servers"] = server_ids
|
||||
# logger.debug("role: ({}) {}".format(role_id, role))
|
||||
return role
|
||||
else:
|
||||
#logger.debug("role: ({}) {}".format(role_id, {}))
|
||||
# logger.debug("role: ({}) {}".format(role_id, {}))
|
||||
return {}
|
||||
|
@ -1,99 +1,117 @@
|
||||
import logging
|
||||
from app.classes.controllers.servers_controller import ServersController
|
||||
|
||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
||||
from app.classes.models.users import users_helper, ApiKeys
|
||||
from app.classes.models.roles import roles_helper
|
||||
from app.classes.models.servers import servers_helper
|
||||
from app.classes.shared.main_models import db_helper
|
||||
from app.classes.models.server_permissions import (
|
||||
PermissionsServers,
|
||||
EnumPermissionsServer,
|
||||
)
|
||||
from app.classes.models.users import HelperUsers, ApiKeys
|
||||
from app.classes.models.roles import HelperRoles
|
||||
from app.classes.models.servers import HelperServers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Server_Perms_Controller:
|
||||
|
||||
class ServerPermsController:
|
||||
@staticmethod
|
||||
def get_server_user_list(server_id):
|
||||
return server_permissions.get_server_user_list(server_id)
|
||||
return PermissionsServers.get_server_user_list(server_id)
|
||||
|
||||
@staticmethod
|
||||
def list_defined_permissions():
|
||||
permissions_list = server_permissions.get_permissions_list()
|
||||
permissions_list = PermissionsServers.get_permissions_list()
|
||||
return permissions_list
|
||||
|
||||
@staticmethod
|
||||
def get_mask_permissions(role_id, server_id):
|
||||
permissions_mask = server_permissions.get_permissions_mask(role_id, server_id)
|
||||
permissions_mask = PermissionsServers.get_permissions_mask(role_id, server_id)
|
||||
return permissions_mask
|
||||
|
||||
@staticmethod
|
||||
def get_role_permissions(role_id):
|
||||
permissions_list = server_permissions.get_role_permissions_list(role_id)
|
||||
return permissions_list
|
||||
def get_role_permissions_dict(role_id):
|
||||
return PermissionsServers.get_role_permissions_dict(role_id)
|
||||
|
||||
@staticmethod
|
||||
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
||||
return server_permissions.add_role_server(server_id, role_id, rs_permissions)
|
||||
return PermissionsServers.add_role_server(server_id, role_id, rs_permissions)
|
||||
|
||||
@staticmethod
|
||||
def get_server_roles(server_id):
|
||||
return server_permissions.get_server_roles(server_id)
|
||||
return PermissionsServers.get_server_roles(server_id)
|
||||
|
||||
@staticmethod
|
||||
def backup_role_swap(old_server_id, new_server_id):
|
||||
role_list = server_permissions.get_server_roles(old_server_id)
|
||||
role_list = PermissionsServers.get_server_roles(old_server_id)
|
||||
for role in role_list:
|
||||
server_permissions.add_role_server(
|
||||
new_server_id, role.role_id,
|
||||
server_permissions.get_permissions_mask(int(role.role_id), int(old_server_id)))
|
||||
#server_permissions.add_role_server(new_server_id, role.role_id, '00001000')
|
||||
PermissionsServers.add_role_server(
|
||||
new_server_id,
|
||||
role.role_id,
|
||||
PermissionsServers.get_permissions_mask(
|
||||
int(role.role_id), int(old_server_id)
|
||||
),
|
||||
)
|
||||
# Permissions_Servers.add_role_server(
|
||||
# new_server_id, role.role_id, "00001000"
|
||||
# )
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Servers Permissions Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_permissions_mask(role_id, server_id):
|
||||
return server_permissions.get_permissions_mask(role_id, server_id)
|
||||
return PermissionsServers.get_permissions_mask(role_id, server_id)
|
||||
|
||||
@staticmethod
|
||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Server, value):
|
||||
return server_permissions.set_permission(permission_mask, permission_tested, value)
|
||||
|
||||
@staticmethod
|
||||
def get_role_permissions_list(role_id):
|
||||
return server_permissions.get_role_permissions_list(role_id)
|
||||
def set_permission(
|
||||
permission_mask, permission_tested: EnumPermissionsServer, value
|
||||
):
|
||||
return PermissionsServers.set_permission(
|
||||
permission_mask, permission_tested, value
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user_id_permissions_list(user_id: str, server_id: str):
|
||||
return server_permissions.get_user_id_permissions_list(user_id, server_id)
|
||||
return PermissionsServers.get_user_id_permissions_list(user_id, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_id_permissions_list(key_id: str, server_id: str):
|
||||
key = users_helper.get_user_api_key(key_id)
|
||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
||||
key = HelperUsers.get_user_api_key(key_id)
|
||||
return PermissionsServers.get_api_key_permissions_list(key, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
||||
return PermissionsServers.get_api_key_permissions_list(key, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_authorized_servers_stats_from_roles(user_id):
|
||||
user_roles = users_helper.get_user_roles_id(user_id)
|
||||
user_roles = HelperUsers.get_user_roles_id(user_id)
|
||||
roles_list = []
|
||||
role_server = []
|
||||
authorized_servers = []
|
||||
server_data = []
|
||||
|
||||
for u in user_roles:
|
||||
roles_list.append(roles_helper.get_role(u.role_id))
|
||||
for user in user_roles:
|
||||
roles_list.append(HelperRoles.get_role(user.role_id))
|
||||
|
||||
for r in roles_list:
|
||||
role_test = server_permissions.get_role_servers_from_role_id(r.get('role_id'))
|
||||
for t in role_test:
|
||||
role_server.append(t)
|
||||
for role in roles_list:
|
||||
role_test = PermissionsServers.get_role_servers_from_role_id(
|
||||
role.get("role_id")
|
||||
)
|
||||
for test in role_test:
|
||||
role_server.append(test)
|
||||
|
||||
for s in role_server:
|
||||
authorized_servers.append(servers_helper.get_server_data_by_id(s.server_id))
|
||||
for server in role_server:
|
||||
authorized_servers.append(
|
||||
HelperServers.get_server_data_by_id(server.server_id)
|
||||
)
|
||||
|
||||
for s in authorized_servers:
|
||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0]})
|
||||
for server in authorized_servers:
|
||||
srv = ServersController().get_server_instance_by_id(server.get("server_id"))
|
||||
latest = srv.stats_helper.get_latest_server_stats()
|
||||
server_data.append(
|
||||
{
|
||||
"server_data": server,
|
||||
"stats": latest,
|
||||
}
|
||||
)
|
||||
return server_data
|
||||
|
@ -1,23 +1,45 @@
|
||||
import os
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import typing as t
|
||||
|
||||
from app.classes.controllers.roles_controller import Roles_Controller
|
||||
from app.classes.models.servers import servers_helper
|
||||
from app.classes.models.users import users_helper, ApiKeys
|
||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.main_models import db_helper
|
||||
from app.classes.controllers.roles_controller import RolesController
|
||||
|
||||
from app.classes.shared.singleton import Singleton
|
||||
from app.classes.shared.server import ServerInstance
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_models import DatabaseShortcuts
|
||||
|
||||
from app.classes.minecraft.server_props import ServerProps
|
||||
from app.classes.minecraft.stats import Stats
|
||||
|
||||
from app.classes.models.servers import HelperServers
|
||||
from app.classes.models.users import HelperUsers, ApiKeys
|
||||
from app.classes.models.server_permissions import (
|
||||
PermissionsServers,
|
||||
EnumPermissionsServer,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Servers_Controller:
|
||||
|
||||
#************************************************************************************************
|
||||
class ServersController(metaclass=Singleton):
|
||||
servers_list: ServerInstance
|
||||
|
||||
def __init__(self, helper, servers_helper, management_helper):
|
||||
self.helper: Helpers = helper
|
||||
self.servers_helper: HelperServers = servers_helper
|
||||
self.management_helper = management_helper
|
||||
self.servers_list = []
|
||||
self.stats = Stats(self.helper, self)
|
||||
|
||||
# **********************************************************************************
|
||||
# Generic Servers Methods
|
||||
#************************************************************************************************
|
||||
@staticmethod
|
||||
# **********************************************************************************
|
||||
def create_server(
|
||||
self,
|
||||
name: str,
|
||||
server_uuid: str,
|
||||
server_dir: str,
|
||||
@ -27,8 +49,31 @@ class Servers_Controller:
|
||||
server_log_file: str,
|
||||
server_stop: str,
|
||||
server_type: str,
|
||||
server_port=25565):
|
||||
return servers_helper.create_server(
|
||||
server_port: int = 25565,
|
||||
server_host: str = "127.0.0.1",
|
||||
) -> int:
|
||||
"""Create a server in the database
|
||||
|
||||
Args:
|
||||
name: The name of the server
|
||||
server_uuid: This is the UUID of the server
|
||||
server_dir: The directory where the server is located
|
||||
backup_path: The path to the backup folder
|
||||
server_command: The command to start the server
|
||||
server_file: The name of the server file
|
||||
server_log_file: The path to the server log file
|
||||
server_stop: This is the command to stop the server
|
||||
server_type: This is the type of server you're creating.
|
||||
server_port: The port the server will be monitored on, defaults to 25565
|
||||
server_host: The host the server will be monitored on, defaults to 127.0.0.1
|
||||
|
||||
Returns:
|
||||
int: The new server's id
|
||||
|
||||
Raises:
|
||||
PeeweeException: If the server already exists
|
||||
"""
|
||||
return HelperServers.create_server(
|
||||
name,
|
||||
server_uuid,
|
||||
server_dir,
|
||||
@ -38,199 +83,459 @@ class Servers_Controller:
|
||||
server_log_file,
|
||||
server_stop,
|
||||
server_type,
|
||||
server_port)
|
||||
server_port,
|
||||
server_host,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_server_obj(server_id):
|
||||
return servers_helper.get_server_obj(server_id)
|
||||
return HelperServers.get_server_obj(server_id)
|
||||
|
||||
@staticmethod
|
||||
def update_server(server_obj):
|
||||
return servers_helper.update_server(server_obj)
|
||||
return HelperServers.update_server(server_obj)
|
||||
|
||||
@staticmethod
|
||||
def set_download(server_id):
|
||||
return servers_helper.set_download(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.set_download()
|
||||
|
||||
@staticmethod
|
||||
def finish_download(server_id):
|
||||
return servers_helper.finish_download(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.finish_download()
|
||||
|
||||
@staticmethod
|
||||
def get_download_status(server_id):
|
||||
return servers_helper.get_download_status(server_id)
|
||||
server = ServersController().get_server_instance_by_id(server_id)
|
||||
return server.stats_helper.get_download_status()
|
||||
|
||||
@staticmethod
|
||||
def remove_server(server_id):
|
||||
roles_list = server_permissions.get_roles_from_server(server_id)
|
||||
def remove_server(self, server_id):
|
||||
roles_list = PermissionsServers.get_roles_from_server(server_id)
|
||||
for role in roles_list:
|
||||
role_id = role.role_id
|
||||
role_data = Roles_Controller.get_role_with_servers(role_id)
|
||||
role_data['servers'] = {server_id}
|
||||
server_permissions.delete_roles_permissions(role_id, role_data['servers'])
|
||||
server_permissions.remove_roles_of_server(server_id)
|
||||
servers_helper.remove_server(server_id)
|
||||
role_data = RolesController.get_role_with_servers(role_id)
|
||||
role_data["servers"] = {server_id}
|
||||
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
|
||||
PermissionsServers.remove_roles_of_server(server_id)
|
||||
self.servers_helper.remove_server(server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_server_data_by_id(server_id):
|
||||
return servers_helper.get_server_data_by_id(server_id)
|
||||
return HelperServers.get_server_data_by_id(server_id)
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Servers Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
|
||||
def get_server_instance_by_id(self, server_id: t.Union[str, int]) -> ServerInstance:
|
||||
for server in self.servers_list:
|
||||
if int(server["server_id"]) == int(server_id):
|
||||
return server["server_obj"]
|
||||
|
||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||
raise Exception(f"Unable to find server object for server id {server_id}")
|
||||
|
||||
def init_all_servers(self):
|
||||
|
||||
servers = self.get_all_defined_servers()
|
||||
|
||||
for server in servers:
|
||||
server_id = server.get("server_id")
|
||||
|
||||
# if we have already initialized this server, let's skip it.
|
||||
if self.check_server_loaded(server_id):
|
||||
continue
|
||||
|
||||
# if this server path no longer exists - let's warn and bomb out
|
||||
if not Helpers.check_path_exists(
|
||||
Helpers.get_os_understandable_path(server["path"])
|
||||
):
|
||||
logger.warning(
|
||||
f"Unable to find server "
|
||||
f"{server['server_name']} at path {server['path']}. "
|
||||
f"Skipping this server"
|
||||
)
|
||||
|
||||
Console.warning(
|
||||
f"Unable to find server "
|
||||
f"{server['server_name']} at path {server['path']}. "
|
||||
f"Skipping this server"
|
||||
)
|
||||
continue
|
||||
|
||||
settings_file = os.path.join(
|
||||
Helpers.get_os_understandable_path(server["path"]), "server.properties"
|
||||
)
|
||||
|
||||
# if the properties file isn't there, let's warn
|
||||
if not Helpers.check_file_exists(settings_file):
|
||||
logger.error(f"Unable to find {settings_file}. Skipping this server.")
|
||||
Console.error(f"Unable to find {settings_file}. Skipping this server.")
|
||||
continue
|
||||
|
||||
settings = ServerProps(settings_file)
|
||||
|
||||
temp_server_dict = {
|
||||
"server_id": server.get("server_id"),
|
||||
"server_data_obj": server,
|
||||
"server_obj": ServerInstance(
|
||||
server.get("server_id"),
|
||||
self.helper,
|
||||
self.management_helper,
|
||||
self.stats,
|
||||
),
|
||||
"server_settings": settings.props,
|
||||
}
|
||||
|
||||
# setup the server, do the auto start and all that jazz
|
||||
temp_server_dict["server_obj"].do_server_setup(server)
|
||||
|
||||
# add this temp object to the list of init servers
|
||||
self.servers_list.append(temp_server_dict)
|
||||
|
||||
if server["auto_start"]:
|
||||
self.set_waiting_start(server["server_id"], True)
|
||||
|
||||
self.refresh_server_settings(server["server_id"])
|
||||
|
||||
Console.info(
|
||||
f"Loaded Server: ID {server['server_id']}"
|
||||
f" | Name: {server['server_name']}"
|
||||
f" | Autostart: {server['auto_start']}"
|
||||
f" | Delay: {server['auto_start_delay']}"
|
||||
)
|
||||
|
||||
def check_server_loaded(self, server_id_to_check: int):
|
||||
|
||||
logger.info(f"Checking to see if we already registered {server_id_to_check}")
|
||||
|
||||
for server in self.servers_list:
|
||||
known_server = server.get("server_id")
|
||||
if known_server is None:
|
||||
return False
|
||||
|
||||
if known_server == server_id_to_check:
|
||||
logger.info(
|
||||
f"skipping initialization of server {server_id_to_check} "
|
||||
f"because it is already loaded"
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def refresh_server_settings(self, server_id: int):
|
||||
server_obj = self.get_server_instance_by_id(server_id)
|
||||
server_obj.reload_server_settings()
|
||||
|
||||
@staticmethod
|
||||
def get_all_defined_servers():
|
||||
return servers_helper.get_all_defined_servers()
|
||||
return HelperServers.get_all_defined_servers()
|
||||
|
||||
@staticmethod
|
||||
def get_authorized_servers(user_id):
|
||||
server_data = []
|
||||
user_roles = users_helper.user_role_query(user_id)
|
||||
for us in user_roles:
|
||||
role_servers = server_permissions.get_role_servers_from_role_id(us.role_id)
|
||||
server_data: t.List[t.Dict[str, t.Any]] = []
|
||||
user_roles = HelperUsers.user_role_query(user_id)
|
||||
for user in user_roles:
|
||||
role_servers = PermissionsServers.get_role_servers_from_role_id(
|
||||
user.role_id
|
||||
)
|
||||
for role in role_servers:
|
||||
server_data.append(servers_helper.get_server_data_by_id(role.server_id))
|
||||
server_data.append(
|
||||
ServersController().get_server_instance_by_id(
|
||||
role.server_id.server_id
|
||||
)
|
||||
)
|
||||
|
||||
return server_data
|
||||
|
||||
@staticmethod
|
||||
def get_all_servers_stats():
|
||||
return servers_helper.get_all_servers_stats()
|
||||
def get_authorized_users(server_id: str):
|
||||
user_ids: t.Set[int] = set()
|
||||
roles_list = PermissionsServers.get_roles_from_server(server_id)
|
||||
for role in roles_list:
|
||||
role_users = HelperUsers.get_users_from_role(role.role_id)
|
||||
for user_role in role_users:
|
||||
user_ids.add(user_role.user_id)
|
||||
|
||||
for user_id in HelperUsers.get_super_user_list():
|
||||
user_ids.add(user_id)
|
||||
|
||||
return user_ids
|
||||
|
||||
def get_all_servers_stats(self):
|
||||
server_data = []
|
||||
try:
|
||||
for server in self.servers_list:
|
||||
srv = ServersController().get_server_instance_by_id(
|
||||
server.get("server_id")
|
||||
)
|
||||
latest = srv.stats_helper.get_latest_server_stats()
|
||||
server_data.append(
|
||||
{
|
||||
"server_data": server["server_data_obj"],
|
||||
"stats": latest,
|
||||
"user_command_permission": True,
|
||||
}
|
||||
)
|
||||
except IndexError as ex:
|
||||
logger.error(
|
||||
f"Stats collection failed with error: {ex}. Was a server just created?"
|
||||
)
|
||||
return server_data
|
||||
|
||||
@staticmethod
|
||||
def get_authorized_servers_stats_api_key(api_key: ApiKeys):
|
||||
server_data = []
|
||||
authorized_servers = Servers_Controller.get_authorized_servers(api_key.user.user_id)
|
||||
authorized_servers = ServersController().get_authorized_servers(
|
||||
api_key.user_id # TODO: API key authorized servers?
|
||||
)
|
||||
|
||||
for s in authorized_servers:
|
||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
||||
key_permissions = server_permissions.get_api_key_permissions_list(api_key, s.get('server_id'))
|
||||
if Enum_Permissions_Server.Commands in key_permissions:
|
||||
for server in authorized_servers:
|
||||
srv: ServerInstance = server
|
||||
latest = srv.stats_helper.get_latest_server_stats()
|
||||
key_permissions = PermissionsServers.get_api_key_permissions_list(
|
||||
api_key, server.server_id
|
||||
)
|
||||
if EnumPermissionsServer.COMMANDS in key_permissions:
|
||||
user_command_permission = True
|
||||
else:
|
||||
user_command_permission = False
|
||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0],
|
||||
"user_command_permission": user_command_permission})
|
||||
server_data.append(
|
||||
{
|
||||
"server_data": DatabaseShortcuts.get_data_obj(server.server_object),
|
||||
"stats": latest,
|
||||
"user_command_permission": user_command_permission,
|
||||
}
|
||||
)
|
||||
return server_data
|
||||
|
||||
@staticmethod
|
||||
def get_authorized_servers_stats(user_id):
|
||||
server_data = []
|
||||
authorized_servers = Servers_Controller.get_authorized_servers(user_id)
|
||||
authorized_servers = ServersController.get_authorized_servers(user_id)
|
||||
|
||||
for s in authorized_servers:
|
||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
||||
for server in authorized_servers:
|
||||
srv: ServerInstance = server
|
||||
latest = srv.stats_helper.get_latest_server_stats()
|
||||
# TODO
|
||||
user_permissions = server_permissions.get_user_id_permissions_list(user_id, s.get('server_id'))
|
||||
if Enum_Permissions_Server.Commands in user_permissions:
|
||||
user_permissions = PermissionsServers.get_user_id_permissions_list(
|
||||
user_id, server.server_id
|
||||
)
|
||||
if EnumPermissionsServer.COMMANDS in user_permissions:
|
||||
user_command_permission = True
|
||||
else:
|
||||
user_command_permission = False
|
||||
server_data.append({
|
||||
'server_data': s,
|
||||
'stats': db_helper.return_rows(latest)[0],
|
||||
'user_command_permission': user_command_permission
|
||||
})
|
||||
server_data.append(
|
||||
{
|
||||
"server_data": DatabaseShortcuts.get_data_obj(server.server_object),
|
||||
"stats": latest,
|
||||
"user_command_permission": user_command_permission,
|
||||
}
|
||||
)
|
||||
|
||||
return server_data
|
||||
|
||||
@staticmethod
|
||||
def get_server_friendly_name(server_id):
|
||||
return servers_helper.get_server_friendly_name(server_id)
|
||||
return HelperServers.get_server_friendly_name(server_id)
|
||||
|
||||
#************************************************************************************************
|
||||
def get_server_settings(self, server_id):
|
||||
for server in self.servers_list:
|
||||
if int(server["server_id"]) == int(server_id):
|
||||
return server["server_settings"]
|
||||
|
||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||
return False
|
||||
|
||||
def crash_detection(self, server_obj):
|
||||
svr = self.get_server_instance_by_id(server_obj.server_id)
|
||||
# start or stop crash detection depending upon user preference
|
||||
# The below functions check to see if the server is running.
|
||||
# They only execute if it's running.
|
||||
if server_obj.crash_detection == 1:
|
||||
svr.start_crash_detection()
|
||||
else:
|
||||
svr.stop_crash_detection()
|
||||
|
||||
def get_server_obj_optional(
|
||||
self, server_id: t.Union[str, int]
|
||||
) -> t.Optional[ServerInstance]:
|
||||
for server in self.servers_list:
|
||||
if str(server["server_id"]) == str(server_id):
|
||||
return server["server_obj"]
|
||||
|
||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||
return None
|
||||
|
||||
def get_server_data(self, server_id: str):
|
||||
for server in self.servers_list:
|
||||
if str(server["server_id"]) == str(server_id):
|
||||
return server["server_data_obj"]
|
||||
|
||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||
return False
|
||||
|
||||
def list_defined_servers(self):
|
||||
defined_servers = []
|
||||
for server in self.servers_list:
|
||||
defined_servers.append(
|
||||
self.get_server_instance_by_id(server.get("server_id"))
|
||||
)
|
||||
return defined_servers
|
||||
|
||||
@staticmethod
|
||||
def get_all_server_ids() -> t.List[int]:
|
||||
return HelperServers.get_all_server_ids()
|
||||
|
||||
def list_running_servers(self):
|
||||
running_servers = []
|
||||
|
||||
# for each server
|
||||
for server in self.servers_list:
|
||||
# is the server running?
|
||||
srv_obj: ServerInstance = server["server_obj"]
|
||||
running = srv_obj.check_running()
|
||||
# if so, let's add a dictionary to the list of running servers
|
||||
if running:
|
||||
running_servers.append({"id": srv_obj.server_id, "name": srv_obj.name})
|
||||
|
||||
return running_servers
|
||||
|
||||
def stop_all_servers(self):
|
||||
servers = self.list_running_servers()
|
||||
logger.info(f"Found {len(servers)} running server(s)")
|
||||
Console.info(f"Found {len(servers)} running server(s)")
|
||||
|
||||
logger.info("Stopping All Servers")
|
||||
Console.info("Stopping All Servers")
|
||||
|
||||
for server in servers:
|
||||
logger.info(f"Stopping Server ID {server['id']} - {server['name']}")
|
||||
Console.info(f"Stopping Server ID {server['id']} - {server['name']}")
|
||||
|
||||
self.stop_server(server["id"])
|
||||
|
||||
# let's wait 2 seconds to let everything flush out
|
||||
time.sleep(2)
|
||||
|
||||
logger.info("All Servers Stopped")
|
||||
Console.info("All Servers Stopped")
|
||||
|
||||
def stop_server(self, server_id):
|
||||
# issue the stop command
|
||||
svr_obj = self.get_server_instance_by_id(server_id)
|
||||
svr_obj.stop_threaded_server()
|
||||
|
||||
# **********************************************************************************
|
||||
# Servers_Stats Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_server_stats_by_id(server_id):
|
||||
return servers_helper.get_server_stats_by_id(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.get_latest_server_stats()
|
||||
|
||||
@staticmethod
|
||||
def server_id_exists(server_id):
|
||||
return servers_helper.server_id_exists(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.server_id_exists()
|
||||
|
||||
@staticmethod
|
||||
def get_server_type_by_id(server_id):
|
||||
return servers_helper.get_server_type_by_id(server_id)
|
||||
return HelperServers.get_server_type_by_id(server_id)
|
||||
|
||||
@staticmethod
|
||||
def server_id_authorized(server_id_a, user_id):
|
||||
user_roles = users_helper.user_role_query(user_id)
|
||||
user_roles = HelperUsers.user_role_query(user_id)
|
||||
for role in user_roles:
|
||||
for server_id_b in server_permissions.get_role_servers_from_role_id(role.role_id):
|
||||
for server_id_b in PermissionsServers.get_role_servers_from_role_id(
|
||||
role.role_id
|
||||
):
|
||||
if str(server_id_a) == str(server_id_b.server_id):
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_crashed(server_id):
|
||||
return servers_helper.is_crashed(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.is_crashed()
|
||||
|
||||
@staticmethod
|
||||
def server_id_authorized_api_key(server_id: str, api_key: ApiKeys) -> bool:
|
||||
# TODO
|
||||
return Servers_Controller.server_id_authorized(server_id, api_key.user.user_id)
|
||||
return ServersController.server_id_authorized(server_id, api_key.user.user_id)
|
||||
# There is no view server permission
|
||||
# permission_helper.both_have_perm(api_key)
|
||||
|
||||
@staticmethod
|
||||
def set_update(server_id, value):
|
||||
return servers_helper.set_update(server_id, value)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.set_update(value)
|
||||
|
||||
@staticmethod
|
||||
def get_TTL_without_player(server_id):
|
||||
return servers_helper.get_TTL_without_player(server_id)
|
||||
def get_ttl_without_player(server_id):
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.get_ttl_without_player()
|
||||
|
||||
@staticmethod
|
||||
def can_stop_no_players(server_id, time_limit):
|
||||
return servers_helper.can_stop_no_players(server_id, time_limit)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.can_stop_no_players(time_limit)
|
||||
|
||||
@staticmethod
|
||||
def set_waiting_start(server_id, value):
|
||||
servers_helper.set_waiting_start(server_id, value)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
srv.stats_helper.set_waiting_start(value)
|
||||
|
||||
@staticmethod
|
||||
def get_waiting_start(server_id):
|
||||
return servers_helper.get_waiting_start(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.get_waiting_start()
|
||||
|
||||
@staticmethod
|
||||
def get_update_status(server_id):
|
||||
return servers_helper.get_update_status(server_id)
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
return srv.stats_helper.get_update_status()
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Servers Helpers Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_banned_players(server_id):
|
||||
stats = servers_helper.get_server_stats_by_id(server_id)
|
||||
server_path = stats['server_id']['path']
|
||||
path = os.path.join(server_path, 'banned-players.json')
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
stats = srv.stats_helper.get_server_stats()
|
||||
server_path = stats["server_id"]["path"]
|
||||
path = os.path.join(server_path, "banned-players.json")
|
||||
|
||||
try:
|
||||
with open(helper.get_os_understandable_path(path), encoding='utf-8') as file:
|
||||
with open(
|
||||
Helpers.get_os_understandable_path(path), encoding="utf-8"
|
||||
) as file:
|
||||
content = file.read()
|
||||
file.close()
|
||||
except Exception as ex:
|
||||
print (ex)
|
||||
print(ex)
|
||||
return None
|
||||
|
||||
return json.loads(content)
|
||||
|
||||
def check_for_old_logs(self):
|
||||
servers = servers_helper.get_all_defined_servers()
|
||||
servers = HelperServers.get_all_defined_servers()
|
||||
for server in servers:
|
||||
logs_path = os.path.split(server['log_path'])[0]
|
||||
latest_log_file = os.path.split(server['log_path'])[1]
|
||||
logs_delete_after = int(server['logs_delete_after'])
|
||||
logs_path = os.path.split(server["log_path"])[0]
|
||||
latest_log_file = os.path.split(server["log_path"])[1]
|
||||
logs_delete_after = int(server["logs_delete_after"])
|
||||
if logs_delete_after == 0:
|
||||
continue
|
||||
|
||||
log_files = list(filter(
|
||||
lambda val: val != latest_log_file,
|
||||
os.listdir(logs_path)
|
||||
))
|
||||
log_files = list(
|
||||
filter(lambda val: val != latest_log_file, os.listdir(logs_path))
|
||||
)
|
||||
for log_file in log_files:
|
||||
log_file_path = os.path.join(logs_path, log_file)
|
||||
if helper.check_file_exists(log_file_path) and \
|
||||
helper.is_file_older_than_x_days(log_file_path, logs_delete_after):
|
||||
if Helpers.check_file_exists(
|
||||
log_file_path
|
||||
) and Helpers.is_file_older_than_x_days(
|
||||
log_file_path, logs_delete_after
|
||||
):
|
||||
os.remove(log_file_path)
|
||||
|
@ -1,57 +1,139 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
import typing as t
|
||||
|
||||
from app.classes.models.users import users_helper
|
||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.authentication import authentication
|
||||
from app.classes.models.users import HelperUsers
|
||||
from app.classes.models.crafty_permissions import (
|
||||
PermissionsCrafty,
|
||||
EnumPermissionsCrafty,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Users_Controller:
|
||||
|
||||
#************************************************************************************************
|
||||
class UsersController:
|
||||
class ApiPermissionDict(t.TypedDict):
|
||||
name: str
|
||||
quantity: int
|
||||
enabled: bool
|
||||
|
||||
def __init__(self, helper, users_helper, authentication):
|
||||
self.helper = helper
|
||||
self.users_helper = users_helper
|
||||
self.authentication = authentication
|
||||
|
||||
_permissions_props = {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
permission.name
|
||||
for permission in PermissionsCrafty.get_permissions_list()
|
||||
],
|
||||
},
|
||||
"quantity": {"type": "number", "minimum": 0},
|
||||
"enabled": {"type": "boolean"},
|
||||
}
|
||||
self.user_jsonschema_props: t.Final = {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"minLength": 4,
|
||||
"pattern": "^[a-z0-9_]+$",
|
||||
"examples": ["admin"],
|
||||
"title": "Username",
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"minLength": 4,
|
||||
"examples": ["crafty"],
|
||||
"title": "Password",
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"format": "email",
|
||||
"examples": ["default@example.com"],
|
||||
"title": "E-Mail",
|
||||
},
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"examples": [True],
|
||||
"title": "Enabled",
|
||||
},
|
||||
"lang": {
|
||||
"type": "string",
|
||||
"maxLength": 10,
|
||||
"minLength": 2,
|
||||
"examples": ["en"],
|
||||
"title": "Language",
|
||||
},
|
||||
"superuser": {
|
||||
"type": "boolean",
|
||||
"examples": [False],
|
||||
"title": "Superuser",
|
||||
},
|
||||
"permissions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": _permissions_props,
|
||||
"required": ["name", "quantity", "enabled"],
|
||||
},
|
||||
},
|
||||
"roles": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
},
|
||||
},
|
||||
"hints": {"type": "boolean"},
|
||||
}
|
||||
|
||||
# **********************************************************************************
|
||||
# Users Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_all_users():
|
||||
return users_helper.get_all_users()
|
||||
return HelperUsers.get_all_users()
|
||||
|
||||
@staticmethod
|
||||
def get_all_user_ids() -> t.List[int]:
|
||||
return HelperUsers.get_all_user_ids()
|
||||
|
||||
@staticmethod
|
||||
def get_id_by_name(username):
|
||||
return users_helper.get_user_id_by_name(username)
|
||||
return HelperUsers.get_user_id_by_name(username)
|
||||
|
||||
@staticmethod
|
||||
def get_user_lang_by_id(user_id):
|
||||
return users_helper.get_user_lang_by_id(user_id)
|
||||
return HelperUsers.get_user_lang_by_id(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_by_id(user_id):
|
||||
return users_helper.get_user(user_id)
|
||||
return HelperUsers.get_user(user_id)
|
||||
|
||||
@staticmethod
|
||||
def update_server_order(user_id, user_server_order):
|
||||
users_helper.update_server_order(user_id, user_server_order)
|
||||
HelperUsers.update_server_order(user_id, user_server_order)
|
||||
|
||||
@staticmethod
|
||||
def get_server_order(user_id):
|
||||
return users_helper.get_server_order(user_id)
|
||||
return HelperUsers.get_server_order(user_id)
|
||||
|
||||
@staticmethod
|
||||
def user_query(user_id):
|
||||
return users_helper.user_query(user_id)
|
||||
return HelperUsers.user_query(user_id)
|
||||
|
||||
@staticmethod
|
||||
def set_support_path(user_id, support_path):
|
||||
users_helper.set_support_path(user_id, support_path)
|
||||
HelperUsers.set_support_path(user_id, support_path)
|
||||
|
||||
@staticmethod
|
||||
def update_user(user_id: str, user_data=None, user_crafty_data=None):
|
||||
def update_user(self, user_id: str, user_data=None, user_crafty_data=None):
|
||||
if user_crafty_data is None:
|
||||
user_crafty_data = {}
|
||||
if user_data is None:
|
||||
user_data = {}
|
||||
base_data = users_helper.get_user(user_id)
|
||||
base_data = HelperUsers.get_user(user_id)
|
||||
up_data = {}
|
||||
added_roles = set()
|
||||
removed_roles = set()
|
||||
@ -59,110 +141,171 @@ class Users_Controller:
|
||||
if key == "user_id":
|
||||
continue
|
||||
elif key == "roles":
|
||||
added_roles = user_data['roles'].difference(base_data['roles'])
|
||||
removed_roles = base_data['roles'].difference(user_data['roles'])
|
||||
added_roles = set(user_data["roles"]).difference(
|
||||
set(base_data["roles"])
|
||||
)
|
||||
removed_roles = set(base_data["roles"]).difference(
|
||||
set(user_data["roles"])
|
||||
)
|
||||
elif key == "password":
|
||||
if user_data['password'] is not None and user_data['password'] != "":
|
||||
up_data['password'] = helper.encode_pass(user_data['password'])
|
||||
if user_data["password"] is not None and user_data["password"] != "":
|
||||
up_data["password"] = self.helper.encode_pass(user_data["password"])
|
||||
elif key == "lang":
|
||||
up_data["lang"] = user_data["lang"]
|
||||
elif key == "hints":
|
||||
up_data["hints"] = user_data["hints"]
|
||||
elif base_data[key] != user_data[key]:
|
||||
up_data[key] = user_data[key]
|
||||
up_data['last_update'] = helper.get_time_as_string()
|
||||
up_data['lang'] = user_data['lang']
|
||||
up_data["last_update"] = self.helper.get_time_as_string()
|
||||
logger.debug(f"user: {user_data} +role:{added_roles} -role:{removed_roles}")
|
||||
for role in added_roles:
|
||||
users_helper.get_or_create(user_id=user_id, role_id=role)
|
||||
permissions_mask = user_crafty_data.get('permissions_mask', '000')
|
||||
HelperUsers.get_or_create(user_id=user_id, role_id=role)
|
||||
permissions_mask = user_crafty_data.get("permissions_mask", "000")
|
||||
|
||||
if 'server_quantity' in user_crafty_data:
|
||||
limit_server_creation = user_crafty_data['server_quantity'][
|
||||
Enum_Permissions_Crafty.Server_Creation.name]
|
||||
if "server_quantity" in user_crafty_data:
|
||||
limit_server_creation = user_crafty_data["server_quantity"].get(
|
||||
EnumPermissionsCrafty.SERVER_CREATION.name, 0
|
||||
)
|
||||
|
||||
limit_user_creation = user_crafty_data['server_quantity'][Enum_Permissions_Crafty.User_Config.name]
|
||||
limit_role_creation = user_crafty_data['server_quantity'][Enum_Permissions_Crafty.Roles_Config.name]
|
||||
else:
|
||||
limit_server_creation = 0
|
||||
limit_user_creation = 0
|
||||
limit_role_creation = 0
|
||||
limit_user_creation = user_crafty_data["server_quantity"].get(
|
||||
EnumPermissionsCrafty.USER_CONFIG.name, 0
|
||||
)
|
||||
limit_role_creation = user_crafty_data["server_quantity"].get(
|
||||
EnumPermissionsCrafty.ROLES_CONFIG.name, 0
|
||||
)
|
||||
else:
|
||||
limit_server_creation = 0
|
||||
limit_user_creation = 0
|
||||
limit_role_creation = 0
|
||||
|
||||
crafty_permissions.add_or_update_user(
|
||||
user_id,
|
||||
permissions_mask,
|
||||
limit_server_creation,
|
||||
limit_user_creation,
|
||||
limit_role_creation)
|
||||
PermissionsCrafty.add_or_update_user(
|
||||
user_id,
|
||||
permissions_mask,
|
||||
limit_server_creation,
|
||||
limit_user_creation,
|
||||
limit_role_creation,
|
||||
)
|
||||
|
||||
users_helper.delete_user_roles(user_id, removed_roles)
|
||||
self.users_helper.delete_user_roles(user_id, removed_roles)
|
||||
|
||||
users_helper.update_user(user_id, up_data)
|
||||
self.users_helper.update_user(user_id, up_data)
|
||||
|
||||
def raw_update_user(self, user_id: int, up_data: t.Optional[t.Dict[str, t.Any]]):
|
||||
"""Directly passes the data to the model helper.
|
||||
|
||||
Args:
|
||||
user_id (int): The id of the user to update.
|
||||
up_data (t.Optional[t.Dict[str, t.Any]]): Update data.
|
||||
"""
|
||||
self.users_helper.update_user(user_id, up_data)
|
||||
|
||||
def add_user(
|
||||
self,
|
||||
username,
|
||||
password,
|
||||
email="default@example.com",
|
||||
enabled: bool = True,
|
||||
superuser: bool = False,
|
||||
):
|
||||
return self.users_helper.add_user(
|
||||
username,
|
||||
password=password,
|
||||
email=email,
|
||||
enabled=enabled,
|
||||
superuser=superuser,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_user(username, password=None, email="default@example.com", enabled: bool = True, superuser: bool = False):
|
||||
return users_helper.add_user(username, password=password, email=email, enabled=enabled, superuser=superuser)
|
||||
def add_rawpass_user(
|
||||
username,
|
||||
password,
|
||||
email="default@example.com",
|
||||
enabled: bool = True,
|
||||
superuser: bool = False,
|
||||
):
|
||||
return HelperUsers.add_rawpass_user(
|
||||
username,
|
||||
password=password,
|
||||
email=email,
|
||||
enabled=enabled,
|
||||
superuser=superuser,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_user(user_id):
|
||||
return users_helper.remove_user(user_id)
|
||||
def remove_user(self, user_id):
|
||||
return self.users_helper.remove_user(user_id)
|
||||
|
||||
@staticmethod
|
||||
def user_id_exists(user_id):
|
||||
return users_helper.user_id_exists(user_id)
|
||||
return HelperUsers.user_id_exists(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_id_by_api_token(token: str) -> str:
|
||||
token_data = authentication.check_no_iat(token)
|
||||
return token_data['user_id']
|
||||
def set_prepare(user_id):
|
||||
return HelperUsers.set_prepare(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_by_api_token(token: str):
|
||||
_, user = authentication.check(token)
|
||||
def stop_prepare(user_id):
|
||||
return HelperUsers.stop_prepare(user_id)
|
||||
|
||||
def get_user_id_by_api_token(self, token: str) -> str:
|
||||
token_data = self.authentication.check_no_iat(token)
|
||||
return token_data["user_id"]
|
||||
|
||||
def get_user_by_api_token(self, token: str):
|
||||
_, _, user = self.authentication.check_err(token)
|
||||
return user
|
||||
|
||||
# ************************************************************************************************
|
||||
def get_api_key_by_token(self, token: str):
|
||||
key, _, _ = self.authentication.check(token)
|
||||
return key
|
||||
|
||||
# **********************************************************************************
|
||||
# User Roles Methods
|
||||
# ************************************************************************************************
|
||||
# **********************************************************************************
|
||||
|
||||
@staticmethod
|
||||
def get_user_roles_id(user_id):
|
||||
return users_helper.get_user_roles_id(user_id)
|
||||
return HelperUsers.get_user_roles_id(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_roles_names(user_id):
|
||||
return users_helper.get_user_roles_names(user_id)
|
||||
return HelperUsers.get_user_roles_names(user_id)
|
||||
|
||||
@staticmethod
|
||||
def add_role_to_user(user_id, role_id):
|
||||
return users_helper.add_role_to_user(user_id, role_id)
|
||||
def add_role_to_user(self, user_id, role_id):
|
||||
return self.users_helper.add_role_to_user(user_id, role_id)
|
||||
|
||||
@staticmethod
|
||||
def add_user_roles(user):
|
||||
return users_helper.add_user_roles(user)
|
||||
def add_user_roles(self, user):
|
||||
return self.users_helper.add_user_roles(user)
|
||||
|
||||
@staticmethod
|
||||
def user_role_query(user_id):
|
||||
return users_helper.user_role_query(user_id)
|
||||
return HelperUsers.user_role_query(user_id)
|
||||
|
||||
# ************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Api Keys Methods
|
||||
# ************************************************************************************************
|
||||
# **********************************************************************************
|
||||
|
||||
@staticmethod
|
||||
def get_user_api_keys(user_id: str):
|
||||
return users_helper.get_user_api_keys(user_id)
|
||||
return HelperUsers.get_user_api_keys(user_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_api_key(key_id: str):
|
||||
return users_helper.get_user_api_key(key_id)
|
||||
return HelperUsers.get_user_api_key(key_id)
|
||||
|
||||
@staticmethod
|
||||
def add_user_api_key(name: str, user_id: str, superuser: bool = False,
|
||||
server_permissions_mask: Optional[str] = None,
|
||||
crafty_permissions_mask: Optional[str] = None):
|
||||
return users_helper.add_user_api_key(name, user_id, superuser, server_permissions_mask, crafty_permissions_mask)
|
||||
def add_user_api_key(
|
||||
self,
|
||||
name: str,
|
||||
user_id: str,
|
||||
superuser: bool = False,
|
||||
server_permissions_mask: t.Optional[str] = None,
|
||||
crafty_permissions_mask: t.Optional[str] = None,
|
||||
):
|
||||
return self.users_helper.add_user_api_key(
|
||||
name, user_id, superuser, server_permissions_mask, crafty_permissions_mask
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_user_api_keys(user_id: str):
|
||||
return users_helper.delete_user_api_keys(user_id)
|
||||
def delete_user_api_keys(self, user_id: str):
|
||||
return self.users_helper.delete_user_api_keys(user_id)
|
||||
|
||||
@staticmethod
|
||||
def delete_user_api_key(key_id: str):
|
||||
return users_helper.delete_user_api_key(key_id)
|
||||
def delete_user_api_key(self, key_id: str):
|
||||
return self.users_helper.delete_user_api_key(key_id)
|
||||
|
@ -1,23 +1,29 @@
|
||||
from contextlib import redirect_stderr
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
import psutil
|
||||
|
||||
from app.classes.shared.null_writer import NullWriter
|
||||
|
||||
with redirect_stderr(NullWriter()):
|
||||
import psutil
|
||||
|
||||
|
||||
class BedrockPing:
|
||||
magic = b'\x00\xff\xff\x00\xfe\xfe\xfe\xfe\xfd\xfd\xfd\xfd\x12\x34\x56\x78'
|
||||
fields = { # (len, signed)
|
||||
magic = b"\x00\xff\xff\x00\xfe\xfe\xfe\xfe\xfd\xfd\xfd\xfd\x12\x34\x56\x78"
|
||||
fields = { # (len, signed)
|
||||
"byte": (1, False),
|
||||
"long": (8, True),
|
||||
"ulong": (8, False),
|
||||
"magic": (16, False),
|
||||
"short": (2, True),
|
||||
"ushort": (2, False), #unsigned short
|
||||
"string": (2, False), #strlen is ushort
|
||||
"ushort": (2, False), # unsigned short
|
||||
"string": (2, False), # strlen is ushort
|
||||
"bool": (1, False),
|
||||
"address": (7, False),
|
||||
"uint24le": (3, False)
|
||||
"uint24le": (3, False),
|
||||
}
|
||||
byte_order = 'big'
|
||||
byte_order = "big"
|
||||
|
||||
def __init__(self, bedrock_addr, bedrock_port, client_guid=0, timeout=5):
|
||||
self.addr = bedrock_addr
|
||||
@ -36,51 +42,72 @@ class BedrockPing:
|
||||
@staticmethod
|
||||
def __slice(in_bytes, pattern):
|
||||
ret = []
|
||||
bi = 0 # bytes index
|
||||
pi = 0 # pattern index
|
||||
while bi < len(in_bytes):
|
||||
bytes_index = 0
|
||||
pattern_index = 0
|
||||
while bytes_index < len(in_bytes):
|
||||
try:
|
||||
f = BedrockPing.fields[pattern[pi]]
|
||||
field = BedrockPing.fields[pattern[pattern_index]]
|
||||
except IndexError as index_error:
|
||||
raise IndexError("Ran out of pattern with additional bytes remaining") from index_error
|
||||
if pattern[pi] == "string":
|
||||
shl = f[0] # string header length
|
||||
sl = int.from_bytes(in_bytes[bi:bi+shl], BedrockPing.byte_order, signed=f[1]) # string length
|
||||
l = shl+sl
|
||||
ret.append(in_bytes[bi+shl:bi+shl+sl].decode('ascii'))
|
||||
elif pattern[pi] == "magic":
|
||||
l = f[0] # length of field
|
||||
ret.append(in_bytes[bi:bi+l])
|
||||
raise IndexError(
|
||||
"Ran out of pattern with additional bytes remaining"
|
||||
) from index_error
|
||||
if pattern[pattern_index] == "string":
|
||||
string_header_length = field[0]
|
||||
string_length = int.from_bytes(
|
||||
in_bytes[bytes_index : bytes_index + string_header_length],
|
||||
BedrockPing.byte_order,
|
||||
signed=field[1],
|
||||
)
|
||||
length = string_header_length + string_length
|
||||
ret.append(
|
||||
in_bytes[
|
||||
bytes_index
|
||||
+ string_header_length : bytes_index
|
||||
+ string_header_length
|
||||
+ string_length
|
||||
].decode("ascii")
|
||||
)
|
||||
elif pattern[pattern_index] == "magic":
|
||||
length = field[0]
|
||||
ret.append(in_bytes[bytes_index : bytes_index + length])
|
||||
else:
|
||||
l = f[0] # length of field
|
||||
ret.append(int.from_bytes(in_bytes[bi:bi+l], BedrockPing.byte_order, signed=f[1]))
|
||||
bi+=l
|
||||
pi+=1
|
||||
length = field[0]
|
||||
ret.append(
|
||||
int.from_bytes(
|
||||
in_bytes[bytes_index : bytes_index + length],
|
||||
BedrockPing.byte_order,
|
||||
signed=field[1],
|
||||
)
|
||||
)
|
||||
bytes_index += length
|
||||
pattern_index += 1
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def __get_time():
|
||||
#return time.time_ns() // 1000000
|
||||
# return time.time_ns() // 1000000
|
||||
return time.perf_counter_ns() // 1000000
|
||||
|
||||
def __sendping(self):
|
||||
pack_id = BedrockPing.__byter(0x01, 'byte')
|
||||
now = BedrockPing.__byter(BedrockPing.__get_time(), 'ulong')
|
||||
pack_id = BedrockPing.__byter(0x01, "byte")
|
||||
now = BedrockPing.__byter(BedrockPing.__get_time(), "ulong")
|
||||
guid = self.guid_bytes
|
||||
d2s = pack_id+now+BedrockPing.magic+guid
|
||||
#print("S:", d2s)
|
||||
d2s = pack_id + now + BedrockPing.magic + guid
|
||||
# print("S:", d2s)
|
||||
self.sock.sendto(d2s, (self.addr, self.port))
|
||||
|
||||
def __recvpong(self):
|
||||
data = self.sock.recv(4096)
|
||||
if data[0] == 0x1c:
|
||||
if data[0] == 0x1C:
|
||||
ret = {}
|
||||
sliced = BedrockPing.__slice(data,["byte","ulong","ulong","magic","string"])
|
||||
sliced = BedrockPing.__slice(
|
||||
data, ["byte", "ulong", "ulong", "magic", "string"]
|
||||
)
|
||||
if sliced[3] != BedrockPing.magic:
|
||||
raise ValueError(f"Incorrect magic received ({sliced[3]})")
|
||||
ret["server_guid"] = sliced[2]
|
||||
ret["server_string_raw"] = sliced[4]
|
||||
server_info = sliced[4].split(';')
|
||||
server_info = sliced[4].split(";")
|
||||
ret["server_edition"] = server_info[0]
|
||||
ret["server_motd"] = (server_info[1], server_info[7])
|
||||
ret["server_protocol_version"] = server_info[2]
|
||||
@ -103,5 +130,7 @@ class BedrockPing:
|
||||
self.__sendping()
|
||||
return self.__recvpong()
|
||||
except ValueError as e:
|
||||
print(f"E: {e}, checking next packet. Retries remaining: {rtr}/{retries}")
|
||||
print(
|
||||
f"E: {e}, checking next packet. Retries remaining: {rtr}/{retries}"
|
||||
)
|
||||
rtr -= 1
|
||||
|
@ -9,28 +9,29 @@ import uuid
|
||||
import random
|
||||
|
||||
from app.classes.minecraft.bedrock_ping import BedrockPing
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.console import Console
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Server:
|
||||
def __init__(self, data):
|
||||
self.description = data.get('description')
|
||||
self.description = data.get("description")
|
||||
# print(self.description)
|
||||
if isinstance(self.description, dict):
|
||||
|
||||
# cat server
|
||||
if "translate" in self.description:
|
||||
self.description = self.description['translate']
|
||||
self.description = self.description["translate"]
|
||||
|
||||
# waterfall / bungee
|
||||
elif 'extra' in self.description:
|
||||
elif "extra" in self.description:
|
||||
lines = []
|
||||
|
||||
description = self.description
|
||||
if 'extra' in description.keys():
|
||||
for e in description['extra']:
|
||||
#Conversion format code needed only for Java Version
|
||||
if "extra" in description.keys():
|
||||
for e in description["extra"]:
|
||||
# Conversion format code needed only for Java Version
|
||||
lines.append(get_code_format("reset"))
|
||||
if "bold" in e.keys():
|
||||
lines.append(get_code_format("bold"))
|
||||
@ -43,77 +44,76 @@ class Server:
|
||||
if "obfuscated" in e.keys():
|
||||
lines.append(get_code_format("obfuscated"))
|
||||
if "color" in e.keys():
|
||||
lines.append(get_code_format(e['color']))
|
||||
#Then append the text
|
||||
lines.append(get_code_format(e["color"]))
|
||||
# Then append the text
|
||||
if "text" in e.keys():
|
||||
if e['text'] == '\n':
|
||||
if e["text"] == "\n":
|
||||
lines.append("§§")
|
||||
else:
|
||||
lines.append(e['text'])
|
||||
lines.append(e["text"])
|
||||
|
||||
total_text = " ".join(lines)
|
||||
self.description = total_text
|
||||
|
||||
# normal MC
|
||||
else:
|
||||
self.description = self.description['text']
|
||||
self.description = self.description["text"]
|
||||
|
||||
self.icon = base64.b64decode(data.get('favicon', '')[22:])
|
||||
self.icon = base64.b64decode(data.get("favicon", "")[22:])
|
||||
try:
|
||||
self.players = Players(data['players']).report()
|
||||
self.players = Players(data["players"]).report()
|
||||
except KeyError:
|
||||
logger.error("Error geting player information key error")
|
||||
self.players = []
|
||||
self.version = data['version']['name']
|
||||
self.protocol = data['version']['protocol']
|
||||
self.version = data["version"]["name"]
|
||||
self.protocol = data["version"]["protocol"]
|
||||
|
||||
|
||||
class Players(list):
|
||||
def __init__(self, data):
|
||||
super().__init__(Player(x) for x in data.get('sample', []))
|
||||
self.max = data['max']
|
||||
self.online = data['online']
|
||||
super().__init__(Player(x) for x in data.get("sample", []))
|
||||
self.max = data["max"]
|
||||
self.online = data["online"]
|
||||
|
||||
def report(self):
|
||||
players = []
|
||||
|
||||
for x in self:
|
||||
players.append(str(x))
|
||||
for player in self:
|
||||
players.append(str(player))
|
||||
|
||||
r_data = {
|
||||
'online': self.online,
|
||||
'max': self.max,
|
||||
'players': players
|
||||
}
|
||||
r_data = {"online": self.online, "max": self.max, "players": players}
|
||||
|
||||
return json.dumps(r_data)
|
||||
|
||||
|
||||
class Player:
|
||||
def __init__(self, data):
|
||||
self.id = data['id']
|
||||
self.name = data['name']
|
||||
self.id = data["id"]
|
||||
self.name = data["name"]
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
def get_code_format(format_name):
|
||||
root_dir = os.path.abspath(os.path.curdir)
|
||||
format_file = os.path.join(root_dir, 'app', 'config', 'motd_format.json')
|
||||
format_file = os.path.join(root_dir, "app", "config", "motd_format.json")
|
||||
try:
|
||||
with open(format_file, "r", encoding='utf-8') as f:
|
||||
with open(format_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
if format_name in data.keys():
|
||||
return data.get(format_name)
|
||||
else:
|
||||
logger.error(f"Format MOTD Error: format name {format_name} does not exist")
|
||||
console.error(f"Format MOTD Error: format name {format_name} does not exist")
|
||||
Console.error(
|
||||
f"Format MOTD Error: format name {format_name} does not exist"
|
||||
)
|
||||
return ""
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"Config File Error: Unable to read {format_file} due to {e}")
|
||||
console.critical(f"Config File Error: Unable to read {format_file} due to {e}")
|
||||
Console.critical(f"Config File Error: Unable to read {format_file} due to {e}")
|
||||
|
||||
return ""
|
||||
|
||||
@ -128,10 +128,10 @@ def ping(ip, port):
|
||||
if not k:
|
||||
return 0
|
||||
k = k[0]
|
||||
i |= (k & 0x7f) << (j * 7)
|
||||
i |= (k & 0x7F) << (j * 7)
|
||||
j += 1
|
||||
if j > 5:
|
||||
raise ValueError('var_int too big')
|
||||
raise ValueError("var_int too big")
|
||||
if not k & 0x80:
|
||||
return i
|
||||
|
||||
@ -143,15 +143,15 @@ def ping(ip, port):
|
||||
return False
|
||||
|
||||
try:
|
||||
host = ip.encode('utf-8')
|
||||
data = b'' # wiki.vg/Server_List_Ping
|
||||
data += b'\x00' # packet ID
|
||||
data += b'\x04' # protocol variant
|
||||
data += struct.pack('>b', len(host)) + host
|
||||
data += struct.pack('>H', port)
|
||||
data += b'\x01' # next state
|
||||
data = struct.pack('>b', len(data)) + data
|
||||
sock.sendall(data + b'\x01\x00') # handshake + status ping
|
||||
host = ip.encode("utf-8")
|
||||
data = b"" # wiki.vg/Server_List_Ping
|
||||
data += b"\x00" # packet ID
|
||||
data += b"\x04" # protocol variant
|
||||
data += struct.pack(">b", len(host)) + host
|
||||
data += struct.pack(">H", port)
|
||||
data += b"\x01" # next state
|
||||
data = struct.pack(">b", len(data)) + data
|
||||
sock.sendall(data + b"\x01\x00") # handshake + status ping
|
||||
length = read_var_int() # full packet length
|
||||
if length < 10:
|
||||
if length < 0:
|
||||
@ -161,7 +161,7 @@ def ping(ip, port):
|
||||
|
||||
sock.recv(1) # packet type, 0 for pings
|
||||
length = read_var_int() # string length
|
||||
data = b''
|
||||
data = b""
|
||||
while len(data) != length:
|
||||
chunk = sock.recv(length - len(data))
|
||||
if not chunk:
|
||||
@ -176,13 +176,14 @@ def ping(ip, port):
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
|
||||
# For the rest of requests see wiki.vg/Protocol
|
||||
def ping_bedrock(ip, port):
|
||||
rd = random.Random()
|
||||
rand = random.Random()
|
||||
try:
|
||||
#pylint: disable=consider-using-f-string
|
||||
rd.seed(''.join(re.findall('..', '%012x' % uuid.getnode())))
|
||||
client_guid = uuid.UUID(int=rd.getrandbits(32)).int
|
||||
# pylint: disable=consider-using-f-string
|
||||
rand.seed("".join(re.findall("..", "%012x" % uuid.getnode())))
|
||||
client_guid = uuid.UUID(int=rand.getrandbits(32)).int
|
||||
except:
|
||||
client_guid = 0
|
||||
try:
|
||||
|
@ -1,66 +1,67 @@
|
||||
import pprint
|
||||
import os
|
||||
|
||||
class ServerProps:
|
||||
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
self.props = self._parse()
|
||||
|
||||
def _parse(self):
|
||||
"""Loads and parses the file specified in self.filepath"""
|
||||
with open(self.filepath, encoding='utf-8') as fp:
|
||||
line = fp.readline()
|
||||
d = {}
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
while line:
|
||||
if '#' != line[0]:
|
||||
s = line
|
||||
s1 = s[:s.find('=')]
|
||||
if '\n' in s:
|
||||
s2 = s[s.find('=')+1:s.find('\n')]
|
||||
else:
|
||||
s2 = s[s.find('=')+1:]
|
||||
d[s1] = s2
|
||||
else:
|
||||
with open(".header", "a+", encoding='utf-8') as h:
|
||||
h.write(line)
|
||||
line = fp.readline()
|
||||
return d
|
||||
|
||||
def print(self):
|
||||
"""Prints the properties dictionary (using pprint)"""
|
||||
pprint.pprint(self.props)
|
||||
|
||||
def get(self):
|
||||
"""Returns the properties dictionary"""
|
||||
return self.props
|
||||
|
||||
def update(self, key, val):
|
||||
"""Updates property in the properties dictionary [ update("pvp", "true") ] and returns boolean condition"""
|
||||
if key in self.props.keys():
|
||||
self.props[key] = val
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def save(self):
|
||||
"""Writes to the new file"""
|
||||
with open(self.filepath, "a+", encoding='utf-8') as f:
|
||||
f.truncate(0)
|
||||
with open(".header", encoding='utf-8') as header:
|
||||
line = header.readline()
|
||||
while line:
|
||||
f.write(line)
|
||||
line = header.readline()
|
||||
header.close()
|
||||
for key, value in self.props.items():
|
||||
f.write(key + "=" + value + "\n")
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
|
||||
@staticmethod
|
||||
def cleanup():
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
import pprint
|
||||
import os
|
||||
|
||||
|
||||
class ServerProps:
|
||||
def __init__(self, filepath):
|
||||
self.filepath = filepath
|
||||
self.props = self._parse()
|
||||
|
||||
def _parse(self):
|
||||
# Loads and parses the file specified in self.filepath
|
||||
with open(self.filepath, encoding="utf-8") as full_path:
|
||||
line = full_path.readline()
|
||||
dictionary = {}
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
while line:
|
||||
if "#" != line[0]:
|
||||
string = line
|
||||
string1 = string[: string.find("=")]
|
||||
if "\n" in string:
|
||||
string2 = string[string.find("=") + 1 : string.find("\n")]
|
||||
else:
|
||||
string2 = string[string.find("=") + 1 :]
|
||||
dictionary[string1] = string2
|
||||
else:
|
||||
with open(".header", "a+", encoding="utf-8") as header:
|
||||
header.write(line)
|
||||
line = full_path.readline()
|
||||
return dictionary
|
||||
|
||||
def print(self):
|
||||
# Prints the properties dictionary (using pprint)
|
||||
pprint.pprint(self.props)
|
||||
|
||||
def get(self):
|
||||
# Returns the properties dictionary
|
||||
return self.props
|
||||
|
||||
def update(self, key, val):
|
||||
# Updates property in the properties dictionary [ update("pvp", "true") ]
|
||||
# and returns boolean condition
|
||||
if key in self.props.keys():
|
||||
self.props[key] = val
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def save(self):
|
||||
# Writes to the new file
|
||||
with open(self.filepath, "a+", encoding="utf-8") as f:
|
||||
f.truncate(0)
|
||||
with open(".header", encoding="utf-8") as header:
|
||||
line = header.readline()
|
||||
while line:
|
||||
f.write(line)
|
||||
line = header.readline()
|
||||
header.close()
|
||||
for key, value in self.props.items():
|
||||
f.write(key + "=" + value + "\n")
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
|
||||
@staticmethod
|
||||
def cleanup():
|
||||
if os.path.exists(".header"):
|
||||
os.remove(".header")
|
||||
|
@ -4,45 +4,32 @@ import time
|
||||
import shutil
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import requests
|
||||
|
||||
from app.classes.controllers.servers_controller import Servers_Controller
|
||||
from app.classes.models.server_permissions import server_permissions
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
from app.classes.controllers.servers_controller import ServersController
|
||||
from app.classes.models.server_permissions import PermissionsServers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import requests
|
||||
|
||||
except ModuleNotFoundError as err:
|
||||
helper.auto_installer_fix(err)
|
||||
|
||||
class ServerJars:
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, helper):
|
||||
self.helper = helper
|
||||
self.base_url = "https://serverjars.com"
|
||||
|
||||
def _get_api_result(self, call_url: str):
|
||||
full_url = f"{self.base_url}{call_url}"
|
||||
|
||||
try:
|
||||
r = requests.get(full_url, timeout=2)
|
||||
|
||||
if r.status_code not in [200, 201]:
|
||||
return {}
|
||||
response = requests.get(full_url, timeout=2)
|
||||
response.raise_for_status()
|
||||
api_data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
|
||||
logger.error(f"Unable to load {full_url} api due to error: {e}")
|
||||
return {}
|
||||
|
||||
try:
|
||||
api_data = json.loads(r.content)
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to parse serverjar.com api result due to error: {e}")
|
||||
return {}
|
||||
|
||||
api_result = api_data.get('status')
|
||||
api_response = api_data.get('response', {})
|
||||
api_result = api_data.get("status")
|
||||
api_response = api_data.get("response", {})
|
||||
|
||||
if api_result != "success":
|
||||
logger.error(f"Api returned a failed status: {api_result}")
|
||||
@ -50,12 +37,11 @@ class ServerJars:
|
||||
|
||||
return api_response
|
||||
|
||||
@staticmethod
|
||||
def _read_cache():
|
||||
cache_file = helper.serverjar_cache
|
||||
def _read_cache(self):
|
||||
cache_file = self.helper.serverjar_cache
|
||||
cache = {}
|
||||
try:
|
||||
with open(cache_file, "r", encoding='utf-8') as f:
|
||||
with open(cache_file, "r", encoding="utf-8") as f:
|
||||
cache = json.load(f)
|
||||
|
||||
except Exception as e:
|
||||
@ -65,39 +51,16 @@ class ServerJars:
|
||||
|
||||
def get_serverjar_data(self):
|
||||
data = self._read_cache()
|
||||
return data.get('servers')
|
||||
|
||||
def get_serverjar_data_sorted(self):
|
||||
data = self.get_serverjar_data()
|
||||
|
||||
def str_to_int(x, counter=0):
|
||||
try:
|
||||
return ord(x[0]) + str_to_int(x[1:], counter + 1) + len(x)
|
||||
except IndexError:
|
||||
return 0
|
||||
|
||||
def to_int(x):
|
||||
try:
|
||||
return int(x)
|
||||
except ValueError:
|
||||
temp = x.split('-')
|
||||
return to_int(temp[0]) + str_to_int(temp[1]) / 100000
|
||||
|
||||
sort_key_fn = lambda x: [to_int(y) for y in x.split('.')]
|
||||
|
||||
for key in data.keys():
|
||||
data[key] = sorted(data[key], key=sort_key_fn)
|
||||
|
||||
return data
|
||||
return data.get("servers")
|
||||
|
||||
def _check_api_alive(self):
|
||||
logger.info("Checking serverjars.com API status")
|
||||
|
||||
check_url = f"{self.base_url}/api/fetchTypes"
|
||||
try:
|
||||
r = requests.get(check_url, timeout=2)
|
||||
response = requests.get(check_url, timeout=2)
|
||||
|
||||
if r.status_code in [200, 201]:
|
||||
if response.status_code in [200, 201]:
|
||||
logger.info("Serverjars.com API is alive")
|
||||
return True
|
||||
except Exception as e:
|
||||
@ -109,8 +72,8 @@ class ServerJars:
|
||||
|
||||
def refresh_cache(self):
|
||||
|
||||
cache_file = helper.serverjar_cache
|
||||
cache_old = helper.is_file_older_than_x_days(cache_file)
|
||||
cache_file = self.helper.serverjar_cache
|
||||
cache_old = self.helper.is_file_older_than_x_days(cache_file)
|
||||
|
||||
# debug override
|
||||
# cache_old = True
|
||||
@ -125,10 +88,7 @@ class ServerJars:
|
||||
if cache_old:
|
||||
logger.info("Cache file is over 1 day old, refreshing")
|
||||
now = datetime.now()
|
||||
data = {
|
||||
'last_refreshed': now.strftime("%m/%d/%Y, %H:%M:%S"),
|
||||
'servers': {}
|
||||
}
|
||||
data = {"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"), "servers": {}}
|
||||
|
||||
jar_types = self._get_server_type_list()
|
||||
|
||||
@ -140,81 +100,89 @@ class ServerJars:
|
||||
# jar versions for this server
|
||||
versions = self._get_jar_details(s)
|
||||
|
||||
# add these versions (a list) to the dict with a key of the server type
|
||||
data['servers'].update({
|
||||
s: versions
|
||||
})
|
||||
# add these versions (a list) to the dict with
|
||||
# a key of the server type
|
||||
data["servers"].update({s: versions})
|
||||
|
||||
# save our cache
|
||||
try:
|
||||
with open(cache_file, "w", encoding='utf-8') as f:
|
||||
with open(cache_file, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps(data, indent=4))
|
||||
logger.info("Cache file refreshed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
||||
|
||||
def _get_jar_details(self, jar_type='servers'):
|
||||
url = f'/api/fetchAll/{jar_type}'
|
||||
def _get_jar_details(self, jar_type="servers"):
|
||||
url = f"/api/fetchAll/{jar_type}"
|
||||
response = self._get_api_result(url)
|
||||
temp = []
|
||||
for v in response:
|
||||
temp.append(v.get('version'))
|
||||
time.sleep(.5)
|
||||
temp.append(v.get("version"))
|
||||
time.sleep(0.5)
|
||||
return temp
|
||||
|
||||
def _get_server_type_list(self):
|
||||
url = '/api/fetchTypes/'
|
||||
url = "/api/fetchTypes/"
|
||||
response = self._get_api_result(url)
|
||||
return response
|
||||
|
||||
def download_jar(self, server, version, path, server_id):
|
||||
update_thread = threading.Thread(target=self.a_download_jar, daemon=True, args=(server, version, path, server_id))
|
||||
update_thread = threading.Thread(
|
||||
name=f"server_download-{server_id}-{server}-{version}",
|
||||
target=self.a_download_jar,
|
||||
daemon=True,
|
||||
args=(server, version, path, server_id),
|
||||
)
|
||||
update_thread.start()
|
||||
|
||||
def a_download_jar(self, server, version, path, server_id):
|
||||
#delaying download for server register to finish
|
||||
# delaying download for server register to finish
|
||||
time.sleep(3)
|
||||
fetch_url = f"{self.base_url}/api/fetchJar/{server}/{version}"
|
||||
server_users = server_permissions.get_server_user_list(server_id)
|
||||
server_users = PermissionsServers.get_server_user_list(server_id)
|
||||
|
||||
|
||||
#We need to make sure the server is registered before we submit a db update for it's stats.
|
||||
# We need to make sure the server is registered before
|
||||
# we submit a db update for it's stats.
|
||||
while True:
|
||||
try:
|
||||
Servers_Controller.set_download(server_id)
|
||||
ServersController.set_download(server_id)
|
||||
for user in server_users:
|
||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
||||
})
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "send_start_reload", {}
|
||||
)
|
||||
|
||||
break
|
||||
except:
|
||||
logger.debug("server not registered yet. Delaying download.")
|
||||
except Exception as ex:
|
||||
logger.debug(f"server not registered yet. Delaying download - {ex}")
|
||||
|
||||
# open a file stream
|
||||
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
||||
try:
|
||||
with open(path, 'wb') as output:
|
||||
with open(path, "wb") as output:
|
||||
shutil.copyfileobj(r.raw, output)
|
||||
Servers_Controller.finish_download(server_id)
|
||||
ServersController.finish_download(server_id)
|
||||
|
||||
for user in server_users:
|
||||
websocket_helper.broadcast_user(user, 'notification', "Executable download finished")
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "notification", "Executable download finished"
|
||||
)
|
||||
time.sleep(3)
|
||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
||||
})
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "send_start_reload", {}
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to save jar to {path} due to error:{e}")
|
||||
Servers_Controller.finish_download(server_id)
|
||||
server_users = server_permissions.get_server_user_list(server_id)
|
||||
ServersController.finish_download(server_id)
|
||||
server_users = PermissionsServers.get_server_user_list(server_id)
|
||||
for user in server_users:
|
||||
websocket_helper.broadcast_user(user, 'notification', "Executable download finished")
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "notification", "Executable download finished"
|
||||
)
|
||||
time.sleep(3)
|
||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
||||
})
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "send_start_reload", {}
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
server_jar_obj = ServerJars()
|
||||
|
@ -1,53 +1,140 @@
|
||||
from __future__ import annotations
|
||||
from contextlib import redirect_stderr
|
||||
import json
|
||||
import logging
|
||||
import datetime
|
||||
import base64
|
||||
import psutil
|
||||
import typing as t
|
||||
|
||||
from app.classes.shared.null_writer import NullWriter
|
||||
from app.classes.minecraft.mc_ping import ping
|
||||
from app.classes.models.management import Host_Stats
|
||||
from app.classes.models.servers import servers_helper
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.models.management import HostStats
|
||||
from app.classes.models.servers import HelperServers
|
||||
from app.classes.shared.helpers import Helpers
|
||||
|
||||
with redirect_stderr(NullWriter()):
|
||||
import psutil
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from app.classes.shared.main_controller import Controller
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Stats:
|
||||
|
||||
def __init__(self, controller):
|
||||
class DiskDataDict(t.TypedDict):
|
||||
device: str
|
||||
total_raw: int
|
||||
total: str
|
||||
used_raw: int
|
||||
used: str
|
||||
free_raw: int
|
||||
free: str
|
||||
percent_used: float
|
||||
fs: str
|
||||
mount: str
|
||||
|
||||
|
||||
class NodeStatsDict(t.TypedDict):
|
||||
boot_time: str
|
||||
cpu_usage: float
|
||||
cpu_count: int
|
||||
cpu_cur_freq: float
|
||||
cpu_max_freq: float
|
||||
mem_percent: float
|
||||
mem_usage_raw: int
|
||||
mem_usage: str
|
||||
mem_total_raw: int
|
||||
mem_total: str
|
||||
disk_data: t.List[DiskDataDict]
|
||||
|
||||
|
||||
class NodeStatsReturnDict(t.TypedDict):
|
||||
node_stats: NodeStatsDict
|
||||
|
||||
|
||||
class Stats:
|
||||
helper: Helpers
|
||||
controller: Controller
|
||||
|
||||
@staticmethod
|
||||
def try_get_boot_time():
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(
|
||||
psutil.boot_time(), datetime.timezone.utc
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"error while getting boot time due to {e}")
|
||||
# unix epoch with no timezone data
|
||||
return datetime.datetime.fromtimestamp(0, datetime.timezone.utc)
|
||||
|
||||
@staticmethod
|
||||
def try_get_cpu_usage():
|
||||
try:
|
||||
return psutil.cpu_percent(interval=0.5) / psutil.cpu_count()
|
||||
except Exception as e:
|
||||
logger.debug(f"error while getting cpu percentage due to {e}")
|
||||
return -1
|
||||
|
||||
def __init__(self, helper, controller):
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
|
||||
def get_node_stats(self):
|
||||
boot_time = datetime.datetime.fromtimestamp(psutil.boot_time())
|
||||
data = {}
|
||||
def get_node_stats(self) -> NodeStatsReturnDict:
|
||||
try:
|
||||
cpu_freq = psutil.cpu_freq()
|
||||
except NotImplementedError:
|
||||
cpu_freq = psutil._common.scpufreq(current=0, min=0, max=0)
|
||||
node_stats = {
|
||||
'boot_time': str(boot_time),
|
||||
'cpu_usage': psutil.cpu_percent(interval=0.5) / psutil.cpu_count(),
|
||||
'cpu_count': psutil.cpu_count(),
|
||||
'cpu_cur_freq': round(cpu_freq[0], 2),
|
||||
'cpu_max_freq': cpu_freq[2],
|
||||
'mem_percent': psutil.virtual_memory()[2],
|
||||
'mem_usage': helper.human_readable_file_size(psutil.virtual_memory()[3]),
|
||||
'mem_total': helper.human_readable_file_size(psutil.virtual_memory()[0]),
|
||||
'disk_data': self._all_disk_usage()
|
||||
}
|
||||
#server_stats = self.get_servers_stats()
|
||||
#data['servers'] = server_stats
|
||||
data['node_stats'] = node_stats
|
||||
memory = psutil.virtual_memory()
|
||||
try:
|
||||
node_stats: NodeStatsDict = {
|
||||
"boot_time": str(Stats.try_get_boot_time()),
|
||||
"cpu_usage": Stats.try_get_cpu_usage(),
|
||||
"cpu_count": psutil.cpu_count(),
|
||||
"cpu_cur_freq": round(cpu_freq[0], 2),
|
||||
"cpu_max_freq": cpu_freq[2],
|
||||
"mem_percent": memory.percent,
|
||||
"mem_usage_raw": memory.used,
|
||||
"mem_usage": Helpers.human_readable_file_size(memory.used),
|
||||
"mem_total_raw": memory.total,
|
||||
"mem_total": Helpers.human_readable_file_size(memory.total),
|
||||
"disk_data": Stats._try_all_disk_usage(),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.debug(f"error while getting host stats due to {e}")
|
||||
node_stats: NodeStatsDict = {
|
||||
"boot_time": str(
|
||||
datetime.datetime.fromtimestamp(0, datetime.timezone.utc)
|
||||
),
|
||||
"cpu_usage": -1,
|
||||
"cpu_count": -1,
|
||||
"cpu_cur_freq": -1,
|
||||
"cpu_max_freq": -1,
|
||||
"mem_percent": -1,
|
||||
"mem_usage_raw": -1,
|
||||
"mem_usage": "",
|
||||
"mem_total_raw": -1,
|
||||
"mem_total": "",
|
||||
"disk_data": [],
|
||||
}
|
||||
# server_stats = self.get_servers_stats()
|
||||
# data['servers'] = server_stats
|
||||
|
||||
return data
|
||||
return {
|
||||
"node_stats": node_stats,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _try_get_process_stats(process):
|
||||
try:
|
||||
return Stats._get_process_stats(process)
|
||||
except Exception as e:
|
||||
logger.debug(f"error while getting process stats due to {e}")
|
||||
return {"cpu_usage": -1, "memory_usage": -1, "mem_percentage": -1}
|
||||
|
||||
@staticmethod
|
||||
def _get_process_stats(process):
|
||||
if process is None:
|
||||
process_stats = {
|
||||
'cpu_usage': 0,
|
||||
'memory_usage': 0,
|
||||
'mem_percentage': 0
|
||||
}
|
||||
process_stats = {"cpu_usage": 0, "memory_usage": 0, "mem_percentage": 0}
|
||||
return process_stats
|
||||
else:
|
||||
process_pid = process.pid
|
||||
@ -63,31 +150,43 @@ class Stats:
|
||||
# this is a faster way of getting data for a process
|
||||
with p.oneshot():
|
||||
process_stats = {
|
||||
'cpu_usage': real_cpu,
|
||||
'memory_usage': helper.human_readable_file_size(p.memory_info()[0]),
|
||||
'mem_percentage': round(p.memory_percent(), 0)
|
||||
"cpu_usage": real_cpu,
|
||||
"memory_usage": Helpers.human_readable_file_size(
|
||||
p.memory_info()[0]
|
||||
),
|
||||
"mem_percentage": round(p.memory_percent(), 0),
|
||||
}
|
||||
return process_stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to get process details for pid: {process_pid} due to error: {e}")
|
||||
logger.error(
|
||||
f"Unable to get process details for pid: {process_pid} Error: {e}"
|
||||
)
|
||||
|
||||
# Dummy Data
|
||||
process_stats = {
|
||||
'cpu_usage': 0,
|
||||
'memory_usage': 0,
|
||||
"cpu_usage": 0,
|
||||
"memory_usage": 0,
|
||||
}
|
||||
return process_stats
|
||||
|
||||
# shamelessly stolen from https://github.com/giampaolo/psutil/blob/master/scripts/disk_usage.py
|
||||
@staticmethod
|
||||
def _all_disk_usage():
|
||||
def _try_all_disk_usage():
|
||||
try:
|
||||
return Stats._all_disk_usage()
|
||||
except Exception as e:
|
||||
logger.debug(f"error while getting disk data due to {e}")
|
||||
return []
|
||||
|
||||
# Source: https://github.com/giampaolo/psutil/blob/master/scripts/disk_usage.py
|
||||
@staticmethod
|
||||
def _all_disk_usage() -> t.List[DiskDataDict]:
|
||||
disk_data = []
|
||||
# print(templ % ("Device", "Total", "Used", "Free", "Use ", "Type","Mount"))
|
||||
|
||||
for part in psutil.disk_partitions(all=False):
|
||||
if helper.is_os_windows():
|
||||
if 'cdrom' in part.opts or part.fstype == '':
|
||||
if Helpers.is_os_windows():
|
||||
if "cdrom" in part.opts or part.fstype == "":
|
||||
# skip cd-rom drives with no disk in it; they may raise
|
||||
# ENOENT, pop-up a Windows GUI error for a non-ready
|
||||
# partition or just hang.
|
||||
@ -95,13 +194,16 @@ class Stats:
|
||||
usage = psutil.disk_usage(part.mountpoint)
|
||||
disk_data.append(
|
||||
{
|
||||
'device': part.device,
|
||||
'total': helper.human_readable_file_size(usage.total),
|
||||
'used': helper.human_readable_file_size(usage.used),
|
||||
'free': helper.human_readable_file_size(usage.free),
|
||||
'percent_used': int(usage.percent),
|
||||
'fs': part.fstype,
|
||||
'mount': part.mountpoint
|
||||
"device": part.device,
|
||||
"total_raw": usage.total,
|
||||
"total": Helpers.human_readable_file_size(usage.total),
|
||||
"used_raw": usage.used,
|
||||
"used": Helpers.human_readable_file_size(usage.used),
|
||||
"free_raw": usage.free,
|
||||
"free": Helpers.human_readable_file_size(usage.free),
|
||||
"percent_used": usage.percent,
|
||||
"fs": part.fstype,
|
||||
"mount": part.mountpoint,
|
||||
}
|
||||
)
|
||||
|
||||
@ -112,15 +214,15 @@ class Stats:
|
||||
|
||||
total_size = 0
|
||||
|
||||
total_size = helper.get_dir_size(server_path)
|
||||
total_size = Helpers.get_dir_size(server_path)
|
||||
|
||||
level_total_size = helper.human_readable_file_size(total_size)
|
||||
level_total_size = Helpers.human_readable_file_size(total_size)
|
||||
|
||||
return level_total_size
|
||||
|
||||
def get_server_players(self, server_id):
|
||||
|
||||
server = servers_helper.get_server_data_by_id(server_id)
|
||||
server = HelperServers.get_server_data_by_id(server_id)
|
||||
|
||||
logger.info(f"Getting players for server {server}")
|
||||
|
||||
@ -128,22 +230,20 @@ class Stats:
|
||||
# server_settings = server.get('server_settings', {})
|
||||
# server_data = server.get('server_data_obj', {})
|
||||
|
||||
|
||||
# TODO: search server properties file for possible override of 127.0.0.1
|
||||
internal_ip = server['server_ip']
|
||||
server_port = server['server_port']
|
||||
internal_ip = server["server_ip"]
|
||||
server_port = server["server_port"]
|
||||
|
||||
logger.debug("Pinging {internal_ip} on port {server_port}")
|
||||
if servers_helper.get_server_type_by_id(server_id) != 'minecraft-bedrock':
|
||||
logger.debug(f"Pinging {internal_ip} on port {server_port}")
|
||||
if HelperServers.get_server_type_by_id(server_id) != "minecraft-bedrock":
|
||||
int_mc_ping = ping(internal_ip, int(server_port))
|
||||
|
||||
|
||||
ping_data = {}
|
||||
|
||||
# if we got a good ping return, let's parse it
|
||||
if int_mc_ping:
|
||||
ping_data = Stats.parse_server_ping(int_mc_ping)
|
||||
return ping_data['players']
|
||||
return ping_data["players"]
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
@ -156,87 +256,64 @@ class Stats:
|
||||
except Exception as e:
|
||||
logger.info(f"Unable to read json from ping_obj: {e}")
|
||||
|
||||
|
||||
try:
|
||||
server_icon = base64.encodebytes(ping_obj.icon)
|
||||
server_icon = server_icon.decode('utf-8')
|
||||
except Exception as e:
|
||||
server_icon = server_icon.decode("utf-8")
|
||||
except Exception as e:
|
||||
server_icon = False
|
||||
logger.info(f"Unable to read the server icon : {e}")
|
||||
|
||||
ping_data = {
|
||||
'online': online_stats.get("online", 0),
|
||||
'max': online_stats.get('max', 0),
|
||||
'players': online_stats.get('players', 0),
|
||||
'server_description': ping_obj.description,
|
||||
'server_version': ping_obj.version,
|
||||
'server_icon': server_icon
|
||||
"online": online_stats.get("online", 0),
|
||||
"max": online_stats.get("max", 0),
|
||||
"players": online_stats.get("players", 0),
|
||||
"server_description": ping_obj.description,
|
||||
"server_version": ping_obj.version,
|
||||
"server_icon": server_icon,
|
||||
}
|
||||
|
||||
return ping_data
|
||||
|
||||
@staticmethod
|
||||
def parse_server_RakNet_ping(ping_obj: object):
|
||||
def parse_server_raknet_ping(ping_obj: object):
|
||||
|
||||
try:
|
||||
server_icon = base64.encodebytes(ping_obj['icon'])
|
||||
except Exception as e:
|
||||
server_icon = base64.encodebytes(ping_obj["icon"])
|
||||
except Exception as e:
|
||||
server_icon = False
|
||||
logger.info(f"Unable to read the server icon : {e}")
|
||||
ping_data = {
|
||||
'online': ping_obj['server_player_count'],
|
||||
'max': ping_obj['server_player_max'],
|
||||
'players': [],
|
||||
'server_description': ping_obj['server_edition'],
|
||||
'server_version': ping_obj['server_version_name'],
|
||||
'server_icon': server_icon
|
||||
"online": ping_obj["server_player_count"],
|
||||
"max": ping_obj["server_player_max"],
|
||||
"players": [],
|
||||
"server_description": ping_obj["server_edition"],
|
||||
"server_version": ping_obj["server_version_name"],
|
||||
"server_icon": server_icon,
|
||||
}
|
||||
|
||||
|
||||
return ping_data
|
||||
|
||||
|
||||
def record_stats(self):
|
||||
stats_to_send = self.get_node_stats()
|
||||
node_stats = stats_to_send.get('node_stats')
|
||||
node_stats = stats_to_send["node_stats"]
|
||||
|
||||
Host_Stats.insert({
|
||||
Host_Stats.boot_time: node_stats.get('boot_time', "Unknown"),
|
||||
Host_Stats.cpu_usage: round(node_stats.get('cpu_usage', 0), 2),
|
||||
Host_Stats.cpu_cores: node_stats.get('cpu_count', 0),
|
||||
Host_Stats.cpu_cur_freq: node_stats.get('cpu_cur_freq', 0),
|
||||
Host_Stats.cpu_max_freq: node_stats.get('cpu_max_freq', 0),
|
||||
Host_Stats.mem_usage: node_stats.get('mem_usage', "0 MB"),
|
||||
Host_Stats.mem_percent: node_stats.get('mem_percent', 0),
|
||||
Host_Stats.mem_total: node_stats.get('mem_total', "0 MB"),
|
||||
Host_Stats.disk_json: node_stats.get('disk_data', '{}')
|
||||
}).execute()
|
||||
|
||||
# server_stats = stats_to_send.get('servers')#
|
||||
#
|
||||
# for server in server_stats:
|
||||
# Server_Stats.insert({
|
||||
# Server_Stats.server_id: server.get('id', 0),
|
||||
# Server_Stats.started: server.get('started', ""),
|
||||
# Server_Stats.running: server.get('running', False),
|
||||
# Server_Stats.cpu: server.get('cpu', 0),
|
||||
# Server_Stats.mem: server.get('mem', 0),
|
||||
# Server_Stats.mem_percent: server.get('mem_percent', 0),
|
||||
# Server_Stats.world_name: server.get('world_name', ""),
|
||||
# Server_Stats.world_size: server.get('world_size', ""),
|
||||
# Server_Stats.server_port: server.get('server_port', ""),
|
||||
# Server_Stats.int_ping_results: server.get('int_ping_results', False),
|
||||
# Server_Stats.online: server.get("online", False),
|
||||
# Server_Stats.max: server.get("max", False),
|
||||
# Server_Stats.players: server.get("players", False),
|
||||
# Server_Stats.desc: server.get("desc", False),
|
||||
# Server_Stats.version: server.get("version", False)
|
||||
# }).execute()
|
||||
HostStats.insert(
|
||||
{
|
||||
HostStats.boot_time: node_stats.get("boot_time", "Unknown"),
|
||||
HostStats.cpu_usage: round(node_stats.get("cpu_usage", 0), 2),
|
||||
HostStats.cpu_cores: node_stats.get("cpu_count", 0),
|
||||
HostStats.cpu_cur_freq: node_stats.get("cpu_cur_freq", 0),
|
||||
HostStats.cpu_max_freq: node_stats.get("cpu_max_freq", 0),
|
||||
HostStats.mem_usage: node_stats.get("mem_usage", "0 MB"),
|
||||
HostStats.mem_percent: node_stats.get("mem_percent", 0),
|
||||
HostStats.mem_total: node_stats.get("mem_total", "0 MB"),
|
||||
HostStats.disk_json: node_stats.get("disk_data", "{}"),
|
||||
}
|
||||
).execute()
|
||||
|
||||
# delete old data
|
||||
max_age = helper.get_setting("history_max_age")
|
||||
max_age = self.helper.get_setting("history_max_age")
|
||||
now = datetime.datetime.now()
|
||||
last_week = now.day - max_age
|
||||
minimum_to_exist = now - datetime.timedelta(days=max_age)
|
||||
|
||||
Host_Stats.delete().where(Host_Stats.time < last_week).execute()
|
||||
# Server_Stats.delete().where(Server_Stats.created < last_week).execute()
|
||||
HostStats.delete().where(HostStats.time < minimum_to_exist).execute()
|
||||
|
8
app/classes/models/base_model.py
Normal file
8
app/classes/models/base_model.py
Normal file
@ -0,0 +1,8 @@
|
||||
import peewee
|
||||
|
||||
database_proxy = peewee.DatabaseProxy()
|
||||
|
||||
|
||||
class BaseModel(peewee.Model):
|
||||
class Meta:
|
||||
database = database_proxy
|
@ -1,29 +1,24 @@
|
||||
import logging
|
||||
import typing as t
|
||||
from enum import Enum
|
||||
from peewee import (
|
||||
ForeignKeyField,
|
||||
CharField,
|
||||
IntegerField,
|
||||
DoesNotExist,
|
||||
)
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.permission_helper import permission_helper
|
||||
from app.classes.models.users import Users, ApiKeys
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DoesNotExist
|
||||
from enum import Enum
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.models.base_model import BaseModel
|
||||
from app.classes.models.users import Users, ApiKeys, HelperUsers
|
||||
from app.classes.shared.permission_helper import PermissionHelper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# User_Crafty Class
|
||||
#************************************************************************************************
|
||||
class User_Crafty(Model):
|
||||
user_id = ForeignKeyField(Users, backref='users_crafty')
|
||||
# **********************************************************************************
|
||||
class UserCrafty(BaseModel):
|
||||
user_id = ForeignKeyField(Users, backref="users_crafty")
|
||||
permissions = CharField(default="00000000")
|
||||
limit_server_creation = IntegerField(default=-1)
|
||||
limit_user_creation = IntegerField(default=0)
|
||||
@ -33,167 +28,195 @@ class User_Crafty(Model):
|
||||
created_role = IntegerField(default=0)
|
||||
|
||||
class Meta:
|
||||
table_name = 'user_crafty'
|
||||
database = database
|
||||
table_name = "user_crafty"
|
||||
|
||||
#************************************************************************************************
|
||||
|
||||
# **********************************************************************************
|
||||
# Crafty Permissions Class
|
||||
#************************************************************************************************
|
||||
class Enum_Permissions_Crafty(Enum):
|
||||
Server_Creation = 0
|
||||
User_Config = 1
|
||||
Roles_Config = 2
|
||||
# **********************************************************************************
|
||||
class EnumPermissionsCrafty(Enum):
|
||||
SERVER_CREATION = 0
|
||||
USER_CONFIG = 1
|
||||
ROLES_CONFIG = 2
|
||||
|
||||
class Permissions_Crafty:
|
||||
|
||||
#************************************************************************************************
|
||||
class PermissionsCrafty:
|
||||
# **********************************************************************************
|
||||
# Crafty Permissions Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_permissions_list():
|
||||
permissions_list = []
|
||||
for member in Enum_Permissions_Crafty.__members__.items():
|
||||
permissions_list.append(member[1])
|
||||
return permissions_list
|
||||
return list(EnumPermissionsCrafty.__members__.values())
|
||||
|
||||
@staticmethod
|
||||
def get_permissions(permissions_mask):
|
||||
permissions_list = []
|
||||
for member in Enum_Permissions_Crafty.__members__.items():
|
||||
if crafty_permissions.has_permission(permissions_mask, member[1]):
|
||||
permissions_list.append(member[1])
|
||||
return permissions_list
|
||||
return [
|
||||
permission
|
||||
for permission in EnumPermissionsCrafty.__members__.values()
|
||||
if PermissionsCrafty.has_permission(permissions_mask, permission)
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def has_permission(permission_mask, permission_tested: Enum_Permissions_Crafty):
|
||||
result = False
|
||||
if permission_mask[permission_tested.value] == '1':
|
||||
result = True
|
||||
return result
|
||||
def has_permission(permission_mask, permission_tested: EnumPermissionsCrafty):
|
||||
return permission_mask[permission_tested.value] == "1"
|
||||
|
||||
@staticmethod
|
||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Crafty, value):
|
||||
l = list(permission_mask)
|
||||
l[permission_tested.value] = str(value)
|
||||
permission_mask = ''.join(l)
|
||||
return permission_mask
|
||||
def set_permission(
|
||||
permission_mask, permission_tested: EnumPermissionsCrafty, value
|
||||
):
|
||||
index = permission_tested.value
|
||||
return permission_mask[:index] + str(value) + permission_mask[index + 1 :]
|
||||
|
||||
@staticmethod
|
||||
def get_permission(permission_mask, permission_tested: Enum_Permissions_Crafty):
|
||||
def get_permission(permission_mask, permission_tested: EnumPermissionsCrafty):
|
||||
return permission_mask[permission_tested.value]
|
||||
|
||||
@staticmethod
|
||||
def get_crafty_permissions_mask(user_id):
|
||||
permissions_mask = ''
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
# TODO: only get the permissions of the UserCrafty
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
permissions_mask = user_crafty.permissions
|
||||
return permissions_mask
|
||||
|
||||
@staticmethod
|
||||
def get_all_permission_quantity_list():
|
||||
quantity_list = {
|
||||
Enum_Permissions_Crafty.Server_Creation.name: -1,
|
||||
Enum_Permissions_Crafty.User_Config.name: -1,
|
||||
Enum_Permissions_Crafty.Roles_Config.name: -1,
|
||||
}
|
||||
return quantity_list
|
||||
return {name: -1 for name in EnumPermissionsCrafty.__members__.keys()}
|
||||
|
||||
@staticmethod
|
||||
def get_permission_quantity_list(user_id):
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
quantity_list = {
|
||||
Enum_Permissions_Crafty.Server_Creation.name: user_crafty.limit_server_creation,
|
||||
Enum_Permissions_Crafty.User_Config.name: user_crafty.limit_user_creation,
|
||||
Enum_Permissions_Crafty.Roles_Config.name: user_crafty.limit_role_creation,
|
||||
EnumPermissionsCrafty.SERVER_CREATION.name: user_crafty.limit_server_creation, # pylint: disable=line-too-long
|
||||
EnumPermissionsCrafty.USER_CONFIG.name: user_crafty.limit_user_creation,
|
||||
EnumPermissionsCrafty.ROLES_CONFIG.name: user_crafty.limit_role_creation,
|
||||
}
|
||||
return quantity_list
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# User_Crafty Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_User_Crafty(user_id):
|
||||
def get_user_crafty(user_id):
|
||||
try:
|
||||
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
||||
user_crafty = UserCrafty.get(UserCrafty.user_id == user_id)
|
||||
except DoesNotExist:
|
||||
user_crafty = User_Crafty.insert({
|
||||
User_Crafty.user_id: user_id,
|
||||
User_Crafty.permissions: "000",
|
||||
User_Crafty.limit_server_creation: 0,
|
||||
User_Crafty.limit_user_creation: 0,
|
||||
User_Crafty.limit_role_creation: 0,
|
||||
User_Crafty.created_server: 0,
|
||||
User_Crafty.created_user: 0,
|
||||
User_Crafty.created_role: 0,
|
||||
}).execute()
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
UserCrafty.insert(
|
||||
{
|
||||
UserCrafty.user_id: user_id,
|
||||
UserCrafty.permissions: "000",
|
||||
UserCrafty.limit_server_creation: 0,
|
||||
UserCrafty.limit_user_creation: 0,
|
||||
UserCrafty.limit_role_creation: 0,
|
||||
UserCrafty.created_server: 0,
|
||||
UserCrafty.created_user: 0,
|
||||
UserCrafty.created_role: 0,
|
||||
}
|
||||
).execute()
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
return user_crafty
|
||||
|
||||
@staticmethod
|
||||
def get_user_crafty_optional(user_id) -> t.Optional[UserCrafty]:
|
||||
try:
|
||||
return UserCrafty.get(UserCrafty.user_id == user_id)
|
||||
except DoesNotExist:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def add_user_crafty(user_id, uc_permissions):
|
||||
user_crafty = User_Crafty.insert({User_Crafty.user_id: user_id, User_Crafty.permissions: uc_permissions}).execute()
|
||||
user_crafty = UserCrafty.insert(
|
||||
{UserCrafty.user_id: user_id, UserCrafty.permissions: uc_permissions}
|
||||
).execute()
|
||||
return user_crafty
|
||||
|
||||
@staticmethod
|
||||
def add_or_update_user(user_id, permissions_mask, limit_server_creation, limit_user_creation, limit_role_creation):
|
||||
try:
|
||||
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
||||
user_crafty.permissions = permissions_mask
|
||||
user_crafty.limit_server_creation = limit_server_creation
|
||||
user_crafty.limit_user_creation = limit_user_creation
|
||||
user_crafty.limit_role_creation = limit_role_creation
|
||||
User_Crafty.save(user_crafty)
|
||||
except:
|
||||
User_Crafty.insert({
|
||||
User_Crafty.user_id: user_id,
|
||||
User_Crafty.permissions: permissions_mask,
|
||||
User_Crafty.limit_server_creation: limit_server_creation,
|
||||
User_Crafty.limit_user_creation: limit_user_creation,
|
||||
User_Crafty.limit_role_creation: limit_role_creation
|
||||
}).execute()
|
||||
def add_or_update_user(
|
||||
user_id,
|
||||
permissions_mask,
|
||||
limit_server_creation,
|
||||
limit_user_creation,
|
||||
limit_role_creation,
|
||||
):
|
||||
# http://docs.peewee-orm.com/en/latest/peewee/querying.html#upsert
|
||||
|
||||
UserCrafty.replace(
|
||||
{
|
||||
UserCrafty.user_id: user_id,
|
||||
UserCrafty.permissions: permissions_mask,
|
||||
UserCrafty.limit_server_creation: limit_server_creation,
|
||||
UserCrafty.limit_user_creation: limit_user_creation,
|
||||
UserCrafty.limit_role_creation: limit_role_creation,
|
||||
}
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_created_quantity_list(user_id):
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
quantity_list = {
|
||||
Enum_Permissions_Crafty.Server_Creation.name: user_crafty.created_server,
|
||||
Enum_Permissions_Crafty.User_Config.name: user_crafty.created_user,
|
||||
Enum_Permissions_Crafty.Roles_Config.name: user_crafty.created_role,
|
||||
EnumPermissionsCrafty.SERVER_CREATION.name: user_crafty.created_server,
|
||||
EnumPermissionsCrafty.USER_CONFIG.name: user_crafty.created_user,
|
||||
EnumPermissionsCrafty.ROLES_CONFIG.name: user_crafty.created_role,
|
||||
}
|
||||
return quantity_list
|
||||
|
||||
@staticmethod
|
||||
def get_crafty_limit_value(user_id, permission):
|
||||
quantity_list = crafty_permissions.get_permission_quantity_list(user_id)
|
||||
quantity_list = PermissionsCrafty.get_permission_quantity_list(user_id)
|
||||
return quantity_list[permission]
|
||||
|
||||
@staticmethod
|
||||
def can_add_in_crafty(user_id, permission):
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
can = crafty_permissions.has_permission(user_crafty.permissions, permission)
|
||||
limit_list = crafty_permissions.get_permission_quantity_list(user_id)
|
||||
quantity_list = crafty_permissions.get_created_quantity_list(user_id)
|
||||
return can and ((quantity_list[permission.name] < limit_list[permission.name]) or limit_list[permission.name] == -1 )
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
can = PermissionsCrafty.has_permission(user_crafty.permissions, permission)
|
||||
limit_list = PermissionsCrafty.get_permission_quantity_list(user_id)
|
||||
quantity_list = PermissionsCrafty.get_created_quantity_list(user_id)
|
||||
return can and (
|
||||
(quantity_list[permission.name] < limit_list[permission.name])
|
||||
or limit_list[permission.name] == -1
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_server_creation(user_id):
|
||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||
user_crafty.created_server += 1
|
||||
User_Crafty.save(user_crafty)
|
||||
return user_crafty.created_server
|
||||
def add_server_creation(user_id: int):
|
||||
"""Increase the "Server Creation" counter for this user
|
||||
|
||||
Args:
|
||||
user_id (int): The modifiable user's ID
|
||||
"""
|
||||
UserCrafty.update(created_server=UserCrafty.created_server + 1).where(
|
||||
UserCrafty.user_id == user_id
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def add_user_creation(user_id):
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
user_crafty.created_user += 1
|
||||
UserCrafty.save(user_crafty)
|
||||
return user_crafty.created_user
|
||||
|
||||
@staticmethod
|
||||
def add_role_creation(user_id):
|
||||
user_crafty = PermissionsCrafty.get_user_crafty(user_id)
|
||||
user_crafty.created_role += 1
|
||||
UserCrafty.save(user_crafty)
|
||||
return user_crafty.created_role
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_permissions_list(key: ApiKeys):
|
||||
user = key.user
|
||||
if user.superuser and key.superuser:
|
||||
return crafty_permissions.get_permissions_list()
|
||||
user = HelperUsers.get_user(key.user_id)
|
||||
if user["superuser"] and key.superuser:
|
||||
return PermissionsCrafty.get_permissions_list()
|
||||
else:
|
||||
user_permissions_mask = crafty_permissions.get_crafty_permissions_mask(user.user_id)
|
||||
if user["superuser"]:
|
||||
# User is superuser but API key isn't
|
||||
user_permissions_mask = "111"
|
||||
else:
|
||||
# Not superuser
|
||||
user_permissions_mask = PermissionsCrafty.get_crafty_permissions_mask(
|
||||
user["user_id"]
|
||||
)
|
||||
key_permissions_mask: str = key.crafty_permissions
|
||||
permissions_mask = permission_helper.combine_masks(user_permissions_mask, key_permissions_mask)
|
||||
permissions_list = crafty_permissions.get_permissions(permissions_mask)
|
||||
permissions_mask = PermissionHelper.combine_masks(
|
||||
user_permissions_mask, key_permissions_mask
|
||||
)
|
||||
permissions_list = PermissionsCrafty.get_permissions(permissions_mask)
|
||||
return permissions_list
|
||||
|
||||
|
||||
|
||||
crafty_permissions = Permissions_Crafty()
|
||||
|
@ -1,47 +1,47 @@
|
||||
import logging
|
||||
import datetime
|
||||
from peewee import (
|
||||
ForeignKeyField,
|
||||
CharField,
|
||||
IntegerField,
|
||||
DateTimeField,
|
||||
FloatField,
|
||||
TextField,
|
||||
AutoField,
|
||||
BooleanField,
|
||||
)
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from app.classes.models.users import Users, users_helper
|
||||
from app.classes.models.base_model import BaseModel
|
||||
from app.classes.models.users import Users, HelperUsers
|
||||
from app.classes.models.servers import Servers
|
||||
from app.classes.models.server_permissions import server_permissions
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.main_models import db_helper
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DateTimeField, FloatField, TextField, AutoField, BooleanField
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.models.server_permissions import PermissionsServers
|
||||
from app.classes.shared.main_models import DatabaseShortcuts
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Audit_Log Class
|
||||
#************************************************************************************************
|
||||
class Audit_Log(Model):
|
||||
# **********************************************************************************
|
||||
class AuditLog(BaseModel):
|
||||
audit_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
user_name = CharField(default="")
|
||||
user_id = IntegerField(default=0, index=True)
|
||||
source_ip = CharField(default='127.0.0.1')
|
||||
server_id = IntegerField(default=None, index=True) # When auditing global events, use server ID 0
|
||||
log_msg = TextField(default='')
|
||||
source_ip = CharField(default="127.0.0.1")
|
||||
server_id = IntegerField(
|
||||
default=None, index=True
|
||||
) # When auditing global events, use server ID 0
|
||||
log_msg = TextField(default="")
|
||||
|
||||
class Meta:
|
||||
database = database
|
||||
table_name = "audit_log"
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Host_Stats Class
|
||||
#************************************************************************************************
|
||||
class Host_Stats(Model):
|
||||
# **********************************************************************************
|
||||
class HostStats(BaseModel):
|
||||
time = DateTimeField(default=datetime.datetime.now, index=True)
|
||||
boot_time = CharField(default="")
|
||||
cpu_usage = FloatField(default=0)
|
||||
@ -55,30 +55,28 @@ class Host_Stats(Model):
|
||||
|
||||
class Meta:
|
||||
table_name = "host_stats"
|
||||
database = database
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Commands Class
|
||||
#************************************************************************************************
|
||||
class Commands(Model):
|
||||
# **********************************************************************************
|
||||
class Commands(BaseModel):
|
||||
command_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
server_id = ForeignKeyField(Servers, backref='server', index=True)
|
||||
user = ForeignKeyField(Users, backref='user', index=True)
|
||||
source_ip = CharField(default='127.0.0.1')
|
||||
command = CharField(default='')
|
||||
server_id = ForeignKeyField(Servers, backref="server", index=True)
|
||||
user = ForeignKeyField(Users, backref="user", index=True)
|
||||
source_ip = CharField(default="127.0.0.1")
|
||||
command = CharField(default="")
|
||||
executed = BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table_name = "commands"
|
||||
database = database
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Webhooks Class
|
||||
#************************************************************************************************
|
||||
class Webhooks(Model):
|
||||
# **********************************************************************************
|
||||
class Webhooks(BaseModel):
|
||||
id = AutoField()
|
||||
name = CharField(max_length=64, unique=True, index=True)
|
||||
method = CharField(default="POST")
|
||||
@ -88,15 +86,14 @@ class Webhooks(Model):
|
||||
|
||||
class Meta:
|
||||
table_name = "webhooks"
|
||||
database = database
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Schedules Class
|
||||
#************************************************************************************************
|
||||
class Schedules(Model):
|
||||
# **********************************************************************************
|
||||
class Schedules(BaseModel):
|
||||
schedule_id = IntegerField(unique=True, primary_key=True)
|
||||
server_id = ForeignKeyField(Servers, backref='schedule_server')
|
||||
server_id = ForeignKeyField(Servers, backref="schedule_server")
|
||||
enabled = BooleanField()
|
||||
action = CharField()
|
||||
interval = IntegerField()
|
||||
@ -110,44 +107,49 @@ class Schedules(Model):
|
||||
delay = IntegerField(default=0)
|
||||
|
||||
class Meta:
|
||||
table_name = 'schedules'
|
||||
database = database
|
||||
table_name = "schedules"
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Backups Class
|
||||
#************************************************************************************************
|
||||
class Backups(Model):
|
||||
# **********************************************************************************
|
||||
class Backups(BaseModel):
|
||||
excluded_dirs = CharField(null=True)
|
||||
max_backups = IntegerField()
|
||||
server_id = ForeignKeyField(Servers, backref='backups_server')
|
||||
server_id = ForeignKeyField(Servers, backref="backups_server")
|
||||
compress = BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table_name = 'backups'
|
||||
database = database
|
||||
table_name = "backups"
|
||||
|
||||
class helpers_management:
|
||||
|
||||
#************************************************************************************************
|
||||
class HelpersManagement:
|
||||
def __init__(self, database, helper):
|
||||
self.database = database
|
||||
self.helper = helper
|
||||
|
||||
# **********************************************************************************
|
||||
# Host_Stats Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_latest_hosts_stats():
|
||||
#pylint: disable=no-member
|
||||
query = Host_Stats.select().order_by(Host_Stats.id.desc()).get()
|
||||
# pylint: disable=no-member
|
||||
query = HostStats.select().order_by(HostStats.id.desc()).get()
|
||||
return model_to_dict(query)
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Commands Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def add_command(server_id, user_id, remote_ip, command):
|
||||
Commands.insert({
|
||||
Commands.server_id: server_id,
|
||||
Commands.user: user_id,
|
||||
Commands.source_ip: remote_ip,
|
||||
Commands.command: command
|
||||
}).execute()
|
||||
Commands.insert(
|
||||
{
|
||||
Commands.server_id: server_id,
|
||||
Commands.user: user_id,
|
||||
Commands.source_ip: remote_ip,
|
||||
Commands.command: command,
|
||||
}
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_unactioned_commands():
|
||||
@ -158,72 +160,80 @@ class helpers_management:
|
||||
def mark_command_complete(command_id=None):
|
||||
if command_id is not None:
|
||||
logger.debug(f"Marking Command {command_id} completed")
|
||||
Commands.update({
|
||||
Commands.executed: True
|
||||
}).where(Commands.command_id == command_id).execute()
|
||||
Commands.update({Commands.executed: True}).where(
|
||||
Commands.command_id == command_id
|
||||
).execute()
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Audit_Log Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_actity_log():
|
||||
q = Audit_Log.select()
|
||||
return db_helper.return_db_rows(q)
|
||||
query = AuditLog.select()
|
||||
return DatabaseShortcuts.return_db_rows(query)
|
||||
|
||||
@staticmethod
|
||||
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
||||
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
|
||||
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
|
||||
user_data = users_helper.get_user(user_id)
|
||||
user_data = HelperUsers.get_user(user_id)
|
||||
|
||||
audit_msg = f"{str(user_data['username']).capitalize()} {log_msg}"
|
||||
|
||||
server_users = server_permissions.get_server_user_list(server_id)
|
||||
server_users = PermissionsServers.get_server_user_list(server_id)
|
||||
for user in server_users:
|
||||
websocket_helper.broadcast_user(user,'notification', audit_msg)
|
||||
try:
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user, "notification", audit_msg
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error broadcasting to user {user} - {e}")
|
||||
|
||||
Audit_Log.insert({
|
||||
Audit_Log.user_name: user_data['username'],
|
||||
Audit_Log.user_id: user_id,
|
||||
Audit_Log.server_id: server_id,
|
||||
Audit_Log.log_msg: audit_msg,
|
||||
Audit_Log.source_ip: source_ip
|
||||
}).execute()
|
||||
#deletes records when they're more than 100
|
||||
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
||||
AuditLog.insert(
|
||||
{
|
||||
AuditLog.user_name: user_data["username"],
|
||||
AuditLog.user_id: user_id,
|
||||
AuditLog.server_id: server_id,
|
||||
AuditLog.log_msg: audit_msg,
|
||||
AuditLog.source_ip: source_ip,
|
||||
}
|
||||
).execute()
|
||||
# deletes records when there's more than 300
|
||||
ordered = AuditLog.select().order_by(+AuditLog.created)
|
||||
for item in ordered:
|
||||
if not helper.get_setting('max_audit_entries'):
|
||||
if not self.helper.get_setting("max_audit_entries"):
|
||||
max_entries = 300
|
||||
else:
|
||||
max_entries = helper.get_setting('max_audit_entries')
|
||||
if Audit_Log.select().count() > max_entries:
|
||||
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
||||
max_entries = self.helper.get_setting("max_audit_entries")
|
||||
if AuditLog.select().count() > max_entries:
|
||||
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
|
||||
else:
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
||||
Audit_Log.insert({
|
||||
Audit_Log.user_name: user_name,
|
||||
Audit_Log.user_id: user_id,
|
||||
Audit_Log.server_id: server_id,
|
||||
Audit_Log.log_msg: log_msg,
|
||||
Audit_Log.source_ip: source_ip
|
||||
}).execute()
|
||||
#deletes records when they're more than 100
|
||||
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
||||
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
|
||||
AuditLog.insert(
|
||||
{
|
||||
AuditLog.user_name: user_name,
|
||||
AuditLog.user_id: user_id,
|
||||
AuditLog.server_id: server_id,
|
||||
AuditLog.log_msg: log_msg,
|
||||
AuditLog.source_ip: source_ip,
|
||||
}
|
||||
).execute()
|
||||
# deletes records when there's more than 300
|
||||
ordered = AuditLog.select().order_by(+AuditLog.created)
|
||||
for item in ordered:
|
||||
#configurable through app/config/config.json
|
||||
if not helper.get_setting('max_audit_entries'):
|
||||
# configurable through app/config/config.json
|
||||
if not self.helper.get_setting("max_audit_entries"):
|
||||
max_entries = 300
|
||||
else:
|
||||
max_entries = helper.get_setting('max_audit_entries')
|
||||
if Audit_Log.select().count() > max_entries:
|
||||
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
||||
max_entries = self.helper.get_setting("max_audit_entries")
|
||||
if AuditLog.select().count() > max_entries:
|
||||
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
|
||||
else:
|
||||
return
|
||||
#************************************************************************************************
|
||||
|
||||
# **********************************************************************************
|
||||
# Schedules Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def create_scheduled_task(
|
||||
server_id,
|
||||
@ -235,30 +245,31 @@ class helpers_management:
|
||||
comment=None,
|
||||
enabled=True,
|
||||
one_time=False,
|
||||
cron_string='* * * * *',
|
||||
cron_string="* * * * *",
|
||||
parent=None,
|
||||
delay=0):
|
||||
sch_id = Schedules.insert({
|
||||
Schedules.server_id: server_id,
|
||||
Schedules.action: action,
|
||||
Schedules.enabled: enabled,
|
||||
Schedules.interval: interval,
|
||||
Schedules.interval_type: interval_type,
|
||||
Schedules.start_time: start_time,
|
||||
Schedules.command: command,
|
||||
Schedules.comment: comment,
|
||||
Schedules.one_time: one_time,
|
||||
Schedules.cron_string: cron_string,
|
||||
Schedules.parent: parent,
|
||||
Schedules.delay: delay
|
||||
|
||||
}).execute()
|
||||
delay=0,
|
||||
):
|
||||
sch_id = Schedules.insert(
|
||||
{
|
||||
Schedules.server_id: server_id,
|
||||
Schedules.action: action,
|
||||
Schedules.enabled: enabled,
|
||||
Schedules.interval: interval,
|
||||
Schedules.interval_type: interval_type,
|
||||
Schedules.start_time: start_time,
|
||||
Schedules.command: command,
|
||||
Schedules.comment: comment,
|
||||
Schedules.one_time: one_time,
|
||||
Schedules.cron_string: cron_string,
|
||||
Schedules.parent: parent,
|
||||
Schedules.delay: delay,
|
||||
}
|
||||
).execute()
|
||||
return sch_id
|
||||
|
||||
@staticmethod
|
||||
def delete_scheduled_task(schedule_id):
|
||||
sch = Schedules.get(Schedules.schedule_id == schedule_id)
|
||||
return Schedules.delete_instance(sch)
|
||||
return Schedules.delete().where(Schedules.schedule_id == schedule_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def update_scheduled_task(schedule_id, updates):
|
||||
@ -282,7 +293,11 @@ class helpers_management:
|
||||
|
||||
@staticmethod
|
||||
def get_child_schedules_by_server(schedule_id, server_id):
|
||||
return Schedules.select().where(Schedules.server_id == server_id, Schedules.parent == schedule_id).execute()
|
||||
return (
|
||||
Schedules.select()
|
||||
.where(Schedules.server_id == server_id, Schedules.parent == schedule_id)
|
||||
.execute()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_child_schedules(schedule_id):
|
||||
@ -294,22 +309,27 @@ class helpers_management:
|
||||
|
||||
@staticmethod
|
||||
def get_schedules_enabled():
|
||||
#pylint: disable=singleton-comparison
|
||||
return Schedules.select().where(Schedules.enabled == True).execute()
|
||||
return (
|
||||
Schedules.select()
|
||||
.where(Schedules.enabled == True) # pylint: disable=singleton-comparison
|
||||
.execute()
|
||||
)
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Backups Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_backup_config(server_id):
|
||||
try:
|
||||
row = Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
||||
row = (
|
||||
Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
||||
)
|
||||
conf = {
|
||||
"backup_path": row.server_id.backup_path,
|
||||
"excluded_dirs": row.excluded_dirs,
|
||||
"max_backups": row.max_backups,
|
||||
"server_id": row.server_id.server_id,
|
||||
"compress": row.compress
|
||||
"server_id": row.server_id_id,
|
||||
"compress": row.compress,
|
||||
}
|
||||
except IndexError:
|
||||
conf = {
|
||||
@ -321,44 +341,62 @@ class helpers_management:
|
||||
}
|
||||
return conf
|
||||
|
||||
@staticmethod
|
||||
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None, excluded_dirs: list = None, compress: bool = False):
|
||||
def set_backup_config(
|
||||
self,
|
||||
server_id: int,
|
||||
backup_path: str = None,
|
||||
max_backups: int = None,
|
||||
excluded_dirs: list = None,
|
||||
compress: bool = False,
|
||||
):
|
||||
logger.debug(f"Updating server {server_id} backup config with {locals()}")
|
||||
if Backups.select().where(Backups.server_id == server_id).count() != 0:
|
||||
if Backups.select().where(Backups.server_id == server_id).exists():
|
||||
new_row = False
|
||||
conf = {}
|
||||
else:
|
||||
conf = {
|
||||
"excluded_dirs": None,
|
||||
"max_backups": 0,
|
||||
"server_id": server_id,
|
||||
"compress": False
|
||||
"server_id": server_id,
|
||||
"compress": False,
|
||||
}
|
||||
new_row = True
|
||||
if max_backups is not None:
|
||||
conf['max_backups'] = max_backups
|
||||
conf["max_backups"] = max_backups
|
||||
if excluded_dirs is not None:
|
||||
dirs_to_exclude = ",".join(excluded_dirs)
|
||||
conf['excluded_dirs'] = dirs_to_exclude
|
||||
conf['compress'] = compress
|
||||
conf["excluded_dirs"] = dirs_to_exclude
|
||||
conf["compress"] = compress
|
||||
if not new_row:
|
||||
with database.atomic():
|
||||
with self.database.atomic():
|
||||
if backup_path is not None:
|
||||
u1 = Servers.update(backup_path=backup_path).where(Servers.server_id == server_id).execute()
|
||||
server_rows = (
|
||||
Servers.update(backup_path=backup_path)
|
||||
.where(Servers.server_id == server_id)
|
||||
.execute()
|
||||
)
|
||||
else:
|
||||
u1 = 0
|
||||
u2 = Backups.update(conf).where(Backups.server_id == server_id).execute()
|
||||
logger.debug(f"Updating existing backup record. {u1}+{u2} rows affected")
|
||||
server_rows = 0
|
||||
backup_rows = (
|
||||
Backups.update(conf).where(Backups.server_id == server_id).execute()
|
||||
)
|
||||
logger.debug(
|
||||
f"Updating existing backup record. "
|
||||
f"{server_rows}+{backup_rows} rows affected"
|
||||
)
|
||||
else:
|
||||
with database.atomic():
|
||||
with self.database.atomic():
|
||||
conf["server_id"] = server_id
|
||||
if backup_path is not None:
|
||||
Servers.update(backup_path=backup_path).where(Servers.server_id == server_id)
|
||||
Servers.update(backup_path=backup_path).where(
|
||||
Servers.server_id == server_id
|
||||
)
|
||||
Backups.create(**conf)
|
||||
logger.debug("Creating new backup record.")
|
||||
|
||||
def get_excluded_backup_dirs(self, server_id: int):
|
||||
excluded_dirs = self.get_backup_config(server_id)['excluded_dirs']
|
||||
@staticmethod
|
||||
def get_excluded_backup_dirs(server_id: int):
|
||||
excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
|
||||
if excluded_dirs is not None and excluded_dirs != "":
|
||||
dir_list = excluded_dirs.split(",")
|
||||
else:
|
||||
@ -366,29 +404,31 @@ class helpers_management:
|
||||
return dir_list
|
||||
|
||||
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
|
||||
dir_list = self.get_excluded_backup_dirs()
|
||||
dir_list = self.get_excluded_backup_dirs(server_id)
|
||||
if dir_to_add not in dir_list:
|
||||
dir_list.append(dir_to_add)
|
||||
excluded_dirs = ",".join(dir_list)
|
||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
||||
else:
|
||||
logger.debug(f"Not adding {dir_to_add} to excluded directories - already in the excluded directory list for server ID {server_id}")
|
||||
logger.debug(
|
||||
f"Not adding {dir_to_add} to excluded directories - "
|
||||
f"already in the excluded directory list for server ID {server_id}"
|
||||
)
|
||||
|
||||
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
|
||||
dir_list = self.get_excluded_backup_dirs()
|
||||
dir_list = self.get_excluded_backup_dirs(server_id)
|
||||
if dir_to_del in dir_list:
|
||||
dir_list.remove(dir_to_del)
|
||||
excluded_dirs = ",".join(dir_list)
|
||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
||||
else:
|
||||
logger.debug(f"Not removing {dir_to_del} from excluded directories - not in the excluded directory list for server ID {server_id}")
|
||||
logger.debug(
|
||||
f"Not removing {dir_to_del} from excluded directories - "
|
||||
f"not in the excluded directory list for server ID {server_id}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def clear_unexecuted_commands():
|
||||
Commands.update({
|
||||
Commands.executed: True
|
||||
#pylint: disable=singleton-comparison
|
||||
}).where(Commands.executed == False).execute()
|
||||
|
||||
|
||||
management_helper = helpers_management()
|
||||
Commands.update({Commands.executed: True}).where(
|
||||
Commands.executed == False # pylint: disable=singleton-comparison
|
||||
).execute()
|
||||
|
@ -1,26 +1,23 @@
|
||||
import logging
|
||||
import datetime
|
||||
import typing as t
|
||||
from peewee import (
|
||||
CharField,
|
||||
DoesNotExist,
|
||||
AutoField,
|
||||
DateTimeField,
|
||||
)
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, CharField, DoesNotExist, AutoField, DateTimeField
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.models.base_model import BaseModel
|
||||
from app.classes.shared.helpers import Helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Roles Class
|
||||
#************************************************************************************************
|
||||
class Roles(Model):
|
||||
# **********************************************************************************
|
||||
class Roles(BaseModel):
|
||||
role_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
last_update = DateTimeField(default=datetime.datetime.now)
|
||||
@ -28,16 +25,22 @@ class Roles(Model):
|
||||
|
||||
class Meta:
|
||||
table_name = "roles"
|
||||
database = database
|
||||
|
||||
#************************************************************************************************
|
||||
|
||||
# **********************************************************************************
|
||||
# Roles Helpers
|
||||
#************************************************************************************************
|
||||
class helper_roles:
|
||||
# **********************************************************************************
|
||||
class HelperRoles:
|
||||
def __init__(self, database):
|
||||
self.database = database
|
||||
|
||||
@staticmethod
|
||||
def get_all_roles():
|
||||
query = Roles.select()
|
||||
return query
|
||||
return Roles.select()
|
||||
|
||||
@staticmethod
|
||||
def get_all_role_ids() -> t.List[int]:
|
||||
return [role.role_id for role in Roles.select(Roles.role_id).execute()]
|
||||
|
||||
@staticmethod
|
||||
def get_roleid_by_name(role_name):
|
||||
@ -50,28 +53,40 @@ class helper_roles:
|
||||
def get_role(role_id):
|
||||
return model_to_dict(Roles.get(Roles.role_id == role_id))
|
||||
|
||||
@staticmethod
|
||||
def get_role_columns(
|
||||
role_id: t.Union[str, int], column_names: t.List[str]
|
||||
) -> t.List[t.Any]:
|
||||
columns = [getattr(Roles, column) for column in column_names]
|
||||
return model_to_dict(
|
||||
Roles.select(*columns).where(Roles.role_id == role_id).get(),
|
||||
only=columns,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_role_column(role_id: t.Union[str, int], column_name: str) -> t.Any:
|
||||
column = getattr(Roles, column_name)
|
||||
return getattr(
|
||||
Roles.select(column).where(Roles.role_id == role_id).get(), column_name
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_role(role_name):
|
||||
role_id = Roles.insert({
|
||||
Roles.role_name: role_name.lower(),
|
||||
Roles.created: helper.get_time_as_string()
|
||||
}).execute()
|
||||
role_id = Roles.insert(
|
||||
{
|
||||
Roles.role_name: role_name.lower(),
|
||||
Roles.created: Helpers.get_time_as_string(),
|
||||
}
|
||||
).execute()
|
||||
return role_id
|
||||
|
||||
@staticmethod
|
||||
def update_role(role_id, up_data):
|
||||
return Roles.update(up_data).where(Roles.role_id == role_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def remove_role(role_id):
|
||||
with database.atomic():
|
||||
role = Roles.get(Roles.role_id == role_id)
|
||||
return role.delete_instance()
|
||||
def remove_role(self, role_id):
|
||||
return Roles.delete().where(Roles.role_id == role_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def role_id_exists(role_id):
|
||||
if not roles_helper.get_role(role_id):
|
||||
return False
|
||||
return True
|
||||
|
||||
roles_helper = helper_roles()
|
||||
def role_id_exists(role_id) -> bool:
|
||||
return Roles.select().where(Roles.role_id == role_id).exists()
|
||||
|
@ -1,187 +1,239 @@
|
||||
import logging
|
||||
import typing as t
|
||||
from enum import Enum
|
||||
from peewee import (
|
||||
ForeignKeyField,
|
||||
CharField,
|
||||
CompositeKey,
|
||||
JOIN,
|
||||
)
|
||||
|
||||
from app.classes.models.base_model import BaseModel
|
||||
from app.classes.models.servers import Servers
|
||||
from app.classes.models.roles import Roles
|
||||
from app.classes.models.users import User_Roles, users_helper, ApiKeys, Users
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.permission_helper import permission_helper
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, CompositeKey, JOIN
|
||||
from enum import Enum
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.models.users import UserRoles, HelperUsers, ApiKeys, Users
|
||||
from app.classes.shared.permission_helper import PermissionHelper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Role Servers Class
|
||||
#************************************************************************************************
|
||||
class Role_Servers(Model):
|
||||
role_id = ForeignKeyField(Roles, backref='role_server')
|
||||
server_id = ForeignKeyField(Servers, backref='role_server')
|
||||
# **********************************************************************************
|
||||
class RoleServers(BaseModel):
|
||||
role_id = ForeignKeyField(Roles, backref="role_server")
|
||||
server_id = ForeignKeyField(Servers, backref="role_server")
|
||||
permissions = CharField(default="00000000")
|
||||
|
||||
class Meta:
|
||||
table_name = 'role_servers'
|
||||
primary_key = CompositeKey('role_id', 'server_id')
|
||||
database = database
|
||||
table_name = "role_servers"
|
||||
primary_key = CompositeKey("role_id", "server_id")
|
||||
|
||||
#************************************************************************************************
|
||||
|
||||
# **********************************************************************************
|
||||
# Servers Permissions Class
|
||||
#************************************************************************************************
|
||||
class Enum_Permissions_Server(Enum):
|
||||
Commands = 0
|
||||
Terminal = 1
|
||||
Logs = 2
|
||||
Schedule = 3
|
||||
Backup = 4
|
||||
Files = 5
|
||||
Config = 6
|
||||
Players = 7
|
||||
# **********************************************************************************
|
||||
class EnumPermissionsServer(Enum):
|
||||
COMMANDS = 0
|
||||
TERMINAL = 1
|
||||
LOGS = 2
|
||||
SCHEDULE = 3
|
||||
BACKUP = 4
|
||||
FILES = 5
|
||||
CONFIG = 6
|
||||
PLAYERS = 7
|
||||
|
||||
class Permissions_Servers:
|
||||
|
||||
class PermissionsServers:
|
||||
@staticmethod
|
||||
def get_or_create(role_id, server, permissions_mask):
|
||||
return Role_Servers.get_or_create(role_id=role_id, server_id=server, permissions=permissions_mask)
|
||||
return RoleServers.get_or_create(
|
||||
role_id=role_id, server_id=server, permissions=permissions_mask
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_permissions_list():
|
||||
permissions_list = []
|
||||
for member in Enum_Permissions_Server.__members__.items():
|
||||
permissions_list.append(member[1])
|
||||
return permissions_list
|
||||
return list(EnumPermissionsServer.__members__.values())
|
||||
|
||||
@staticmethod
|
||||
def get_permissions(permissions_mask):
|
||||
permissions_list = []
|
||||
for member in Enum_Permissions_Server.__members__.items():
|
||||
if server_permissions.has_permission(permissions_mask, member[1]):
|
||||
permissions_list.append(member[1])
|
||||
return permissions_list
|
||||
return [
|
||||
permission
|
||||
for permission in EnumPermissionsServer.__members__.values()
|
||||
if PermissionsServers.has_permission(permissions_mask, permission)
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def has_permission(permission_mask, permission_tested: Enum_Permissions_Server):
|
||||
return permission_mask[permission_tested.value] == '1'
|
||||
def has_permission(permission_mask, permission_tested: EnumPermissionsServer):
|
||||
return permission_mask[permission_tested.value] == "1"
|
||||
|
||||
@staticmethod
|
||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Server, value):
|
||||
def set_permission(
|
||||
permission_mask, permission_tested: EnumPermissionsServer, value
|
||||
):
|
||||
list_perms = list(permission_mask)
|
||||
list_perms[permission_tested.value] = str(value)
|
||||
permission_mask = ''.join(list_perms)
|
||||
permission_mask = "".join(list_perms)
|
||||
return permission_mask
|
||||
|
||||
@staticmethod
|
||||
def get_permission(permission_mask, permission_tested: Enum_Permissions_Server):
|
||||
def get_permission(permission_mask, permission_tested: EnumPermissionsServer):
|
||||
return permission_mask[permission_tested.value]
|
||||
|
||||
@staticmethod
|
||||
def get_token_permissions(permissions_mask, api_permissions_mask):
|
||||
permissions_list = []
|
||||
for member in Enum_Permissions_Server.__members__.items():
|
||||
if permission_helper.both_have_perm(permissions_mask, api_permissions_mask, member[1]):
|
||||
permissions_list.append(member[1])
|
||||
return permissions_list
|
||||
return [
|
||||
permission
|
||||
for permission in EnumPermissionsServer.__members__.values()
|
||||
if PermissionHelper.both_have_perm(
|
||||
permissions_mask, api_permissions_mask, permission
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Role_Servers Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_role_servers_from_role_id(roleid):
|
||||
return Role_Servers.select().where(Role_Servers.role_id == roleid)
|
||||
def get_role_servers_from_role_id(roleid: t.Union[str, int]):
|
||||
return RoleServers.select().where(RoleServers.role_id == roleid)
|
||||
|
||||
@staticmethod
|
||||
def get_servers_from_role(role_id):
|
||||
return Role_Servers.select().join(Servers, JOIN.INNER).where(Role_Servers.role_id == role_id)
|
||||
def get_servers_from_role(role_id: t.Union[str, int]):
|
||||
return (
|
||||
RoleServers.select()
|
||||
.join(Servers, JOIN.INNER)
|
||||
.where(RoleServers.role_id == role_id)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_server_ids_from_role(role_id: t.Union[str, int]) -> t.List[int]:
|
||||
# FIXME: somehow retrieve only the server ids, not the whole servers
|
||||
return [
|
||||
role_servers.server_id.server_id
|
||||
for role_servers in (
|
||||
RoleServers.select(RoleServers.server_id).where(
|
||||
RoleServers.role_id == role_id
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_roles_from_server(server_id):
|
||||
return Role_Servers.select().join(Roles, JOIN.INNER).where(Role_Servers.server_id == server_id)
|
||||
return (
|
||||
RoleServers.select()
|
||||
.join(Roles, JOIN.INNER)
|
||||
.where(RoleServers.server_id == server_id)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
||||
servers = Role_Servers.insert({Role_Servers.server_id: server_id, Role_Servers.role_id: role_id,
|
||||
Role_Servers.permissions: rs_permissions}).execute()
|
||||
servers = RoleServers.insert(
|
||||
{
|
||||
RoleServers.server_id: server_id,
|
||||
RoleServers.role_id: role_id,
|
||||
RoleServers.permissions: rs_permissions,
|
||||
}
|
||||
).execute()
|
||||
return servers
|
||||
|
||||
@staticmethod
|
||||
def get_permissions_mask(role_id, server_id):
|
||||
permissions_mask = ''
|
||||
role_server = Role_Servers.select().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id == server_id).get()
|
||||
permissions_mask = ""
|
||||
role_server = (
|
||||
RoleServers.select()
|
||||
.where(RoleServers.role_id == role_id)
|
||||
.where(RoleServers.server_id == server_id)
|
||||
.get()
|
||||
)
|
||||
permissions_mask = role_server.permissions
|
||||
return permissions_mask
|
||||
|
||||
@staticmethod
|
||||
def get_server_roles(server_id):
|
||||
role_list = []
|
||||
roles = Role_Servers.select().where(Role_Servers.server_id == server_id).execute()
|
||||
roles = RoleServers.select().where(RoleServers.server_id == server_id).execute()
|
||||
for role in roles:
|
||||
role_list.append(role.role_id)
|
||||
return role_list
|
||||
|
||||
@staticmethod
|
||||
def get_role_permissions_list(role_id):
|
||||
permissions_mask = '00000000'
|
||||
role_server = Role_Servers.get_or_none(Role_Servers.role_id == role_id)
|
||||
if role_server is not None:
|
||||
permissions_mask = role_server.permissions
|
||||
permissions_list = server_permissions.get_permissions(permissions_mask)
|
||||
role_server = RoleServers.get_or_none(RoleServers.role_id == role_id)
|
||||
permissions_mask = (
|
||||
"00000000" if role_server is None else role_server.permissions
|
||||
)
|
||||
permissions_list = PermissionsServers.get_permissions(permissions_mask)
|
||||
return permissions_list
|
||||
|
||||
@staticmethod
|
||||
def update_role_permission(role_id, server_id, permissions_mask):
|
||||
role_server = Role_Servers.select().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id == server_id).get()
|
||||
role_server.permissions = permissions_mask
|
||||
Role_Servers.save(role_server)
|
||||
def get_role_permissions_dict(role_id):
|
||||
permissions_dict: t.Dict[str, t.List[EnumPermissionsServer]] = {}
|
||||
role_servers = RoleServers.select(
|
||||
RoleServers.server_id, RoleServers.permissions
|
||||
).where(RoleServers.role_id == role_id)
|
||||
for role_server in role_servers:
|
||||
permissions_dict[
|
||||
role_server.server_id_id
|
||||
] = PermissionsServers.get_permissions(role_server.permissions)
|
||||
return permissions_dict
|
||||
|
||||
@staticmethod
|
||||
def delete_roles_permissions(role_id, removed_servers=None):
|
||||
if removed_servers is None:
|
||||
removed_servers = {}
|
||||
return Role_Servers.delete().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id.in_(removed_servers)).execute()
|
||||
def update_role_permission(role_id, server_id, permissions_mask):
|
||||
RoleServers.update(permissions=permissions_mask).where(
|
||||
RoleServers.role_id == role_id, RoleServers.server_id == server_id
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def delete_roles_permissions(
|
||||
role_id: t.Union[str, int], removed_servers: t.Sequence[t.Union[str, int]]
|
||||
):
|
||||
return (
|
||||
RoleServers.delete()
|
||||
.where(RoleServers.role_id == role_id)
|
||||
.where(RoleServers.server_id.in_(removed_servers))
|
||||
.execute()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_roles_of_server(server_id):
|
||||
with database.atomic():
|
||||
return Role_Servers.delete().where(Role_Servers.server_id == server_id).execute()
|
||||
return RoleServers.delete().where(RoleServers.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_user_id_permissions_mask(user_id, server_id: str):
|
||||
user = users_helper.get_user_model(user_id)
|
||||
return server_permissions.get_user_permissions_mask(user, server_id)
|
||||
user = HelperUsers.get_user_model(user_id)
|
||||
return PermissionsServers.get_user_permissions_mask(user, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_permissions_mask(user: Users, server_id: str):
|
||||
if user.superuser:
|
||||
permissions_mask = '1' * len(server_permissions.get_permissions_list())
|
||||
permissions_mask = "1" * len(EnumPermissionsServer)
|
||||
else:
|
||||
roles_list = users_helper.get_user_roles_id(user.user_id)
|
||||
role_server = Role_Servers.select().where(Role_Servers.role_id.in_(roles_list)).where(Role_Servers.server_id == server_id).execute()
|
||||
roles_list = HelperUsers.get_user_roles_id(user.user_id)
|
||||
role_server = (
|
||||
RoleServers.select()
|
||||
.where(RoleServers.role_id.in_(roles_list))
|
||||
.where(RoleServers.server_id == server_id)
|
||||
.execute()
|
||||
)
|
||||
try:
|
||||
permissions_mask = role_server[0].permissions
|
||||
except IndexError:
|
||||
permissions_mask = '0' * len(server_permissions.get_permissions_list())
|
||||
permissions_mask = "0" * len(EnumPermissionsServer)
|
||||
return permissions_mask
|
||||
|
||||
@staticmethod
|
||||
def get_server_user_list(server_id):
|
||||
final_users = []
|
||||
server_roles = Role_Servers.select().where(Role_Servers.server_id == server_id)
|
||||
# pylint: disable=singleton-comparison
|
||||
super_users = Users.select().where(Users.superuser == True)
|
||||
server_roles = RoleServers.select().where(RoleServers.server_id == server_id)
|
||||
super_users = Users.select(Users.user_id).where(
|
||||
Users.superuser == True # pylint: disable=singleton-comparison
|
||||
)
|
||||
for role in server_roles:
|
||||
users = User_Roles.select().where(User_Roles.role_id == role.role_id)
|
||||
users = UserRoles.select(UserRoles.user_id).where(
|
||||
UserRoles.role_id == role.role_id
|
||||
)
|
||||
for user in users:
|
||||
if user.user_id.user_id not in final_users:
|
||||
final_users.append(user.user_id.user_id)
|
||||
if user.user_id_id not in final_users:
|
||||
final_users.append(user.user_id_id)
|
||||
for suser in super_users:
|
||||
if suser.user_id not in final_users:
|
||||
final_users.append(suser.user_id)
|
||||
@ -189,36 +241,48 @@ class Permissions_Servers:
|
||||
|
||||
@staticmethod
|
||||
def get_user_id_permissions_list(user_id, server_id: str):
|
||||
user = users_helper.get_user_model(user_id)
|
||||
return server_permissions.get_user_permissions_list(user, server_id)
|
||||
user = HelperUsers.get_user_model(user_id)
|
||||
return PermissionsServers.get_user_permissions_list(user, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_permissions_list(user: Users, server_id: str):
|
||||
if user.superuser:
|
||||
permissions_list = server_permissions.get_permissions_list()
|
||||
permissions_list = PermissionsServers.get_permissions_list()
|
||||
else:
|
||||
permissions_mask = server_permissions.get_user_permissions_mask(user, server_id)
|
||||
permissions_list = server_permissions.get_permissions(permissions_mask)
|
||||
permissions_mask = PermissionsServers.get_user_permissions_mask(
|
||||
user, server_id
|
||||
)
|
||||
permissions_list = PermissionsServers.get_permissions(permissions_mask)
|
||||
return permissions_list
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_id_permissions_list(key_id, server_id: str):
|
||||
key = ApiKeys.get(ApiKeys.token_id == key_id)
|
||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
||||
return PermissionsServers.get_api_key_permissions_list(key, server_id)
|
||||
|
||||
@staticmethod
|
||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
||||
user = key.user
|
||||
if user.superuser and key.superuser:
|
||||
return server_permissions.get_permissions_list()
|
||||
user = HelperUsers.get_user(key.user_id)
|
||||
if user["superuser"] and key.superuser:
|
||||
return PermissionsServers.get_permissions_list()
|
||||
else:
|
||||
roles_list = users_helper.get_user_roles_id(user['user_id'])
|
||||
role_server = Role_Servers.select().where(Role_Servers.role_id.in_(roles_list)).where(Role_Servers.server_id == server_id).execute()
|
||||
user_permissions_mask = role_server[0].permissions
|
||||
roles_list = HelperUsers.get_user_roles_id(user["user_id"])
|
||||
role_server = (
|
||||
RoleServers.select()
|
||||
.where(RoleServers.role_id.in_(roles_list))
|
||||
.where(RoleServers.server_id == server_id)
|
||||
.execute()
|
||||
)
|
||||
try:
|
||||
user_permissions_mask = role_server[0].permissions
|
||||
except:
|
||||
if user["superuser"]:
|
||||
user_permissions_mask = "11111111"
|
||||
else:
|
||||
user_permissions_mask = "00000000"
|
||||
key_permissions_mask = key.server_permissions
|
||||
permissions_mask = permission_helper.combine_masks(user_permissions_mask, key_permissions_mask)
|
||||
permissions_list = server_permissions.get_permissions(permissions_mask)
|
||||
permissions_mask = PermissionHelper.combine_masks(
|
||||
user_permissions_mask, key_permissions_mask
|
||||
)
|
||||
permissions_list = PermissionsServers.get_permissions(permissions_mask)
|
||||
return permissions_list
|
||||
|
||||
|
||||
server_permissions = Permissions_Servers()
|
||||
|
340
app/classes/models/server_stats.py
Normal file
340
app/classes/models/server_stats.py
Normal file
@ -0,0 +1,340 @@
|
||||
import os
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
from app.classes.models.servers import Servers, HelperServers
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_models import DatabaseShortcuts
|
||||
from app.classes.shared.migration import MigrationManager
|
||||
|
||||
try:
|
||||
from peewee import (
|
||||
SqliteDatabase,
|
||||
Model,
|
||||
ForeignKeyField,
|
||||
CharField,
|
||||
AutoField,
|
||||
DateTimeField,
|
||||
BooleanField,
|
||||
IntegerField,
|
||||
FloatField,
|
||||
DoesNotExist,
|
||||
)
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
Helpers.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger("peewee")
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
|
||||
# **********************************************************************************
|
||||
# Servers Stats Class
|
||||
# **********************************************************************************
|
||||
class ServerStats(Model):
|
||||
stats_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
server_id = ForeignKeyField(Servers, backref="server", index=True)
|
||||
started = CharField(default="")
|
||||
running = BooleanField(default=False)
|
||||
cpu = FloatField(default=0)
|
||||
mem = FloatField(default=0)
|
||||
mem_percent = FloatField(default=0)
|
||||
world_name = CharField(default="")
|
||||
world_size = CharField(default="")
|
||||
server_port = IntegerField(default=25565)
|
||||
int_ping_results = CharField(default="")
|
||||
online = IntegerField(default=0)
|
||||
max = IntegerField(default=0)
|
||||
players = CharField(default="")
|
||||
desc = CharField(default="Unable to Connect")
|
||||
version = CharField(default="")
|
||||
updating = BooleanField(default=False)
|
||||
waiting_start = BooleanField(default=False)
|
||||
first_run = BooleanField(default=True)
|
||||
crashed = BooleanField(default=False)
|
||||
downloading = BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table_name = "server_stats"
|
||||
|
||||
|
||||
# **********************************************************************************
|
||||
# Servers_Stats Methods
|
||||
# **********************************************************************************
|
||||
class HelperServerStats:
|
||||
server_id: int
|
||||
database = None
|
||||
|
||||
def __init__(self, server_id):
|
||||
self.server_id = int(server_id)
|
||||
self.init_database(self.server_id)
|
||||
|
||||
def init_database(self, server_id):
|
||||
try:
|
||||
server = HelperServers.get_server_data_by_id(server_id)
|
||||
db_folder = os.path.join(f"{server['path']}", "db_stats")
|
||||
db_file = os.path.join(
|
||||
db_folder,
|
||||
"crafty_server_stats.sqlite",
|
||||
)
|
||||
self.database = SqliteDatabase(
|
||||
db_file, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||
)
|
||||
if not os.path.exists(db_file):
|
||||
try:
|
||||
os.mkdir(db_folder)
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
f"Error try to create the db_stats folder for server : {ex}"
|
||||
)
|
||||
helper_stats = Helpers()
|
||||
helper_stats.migration_dir = os.path.join(
|
||||
f"{helper_stats.migration_dir}", "stats"
|
||||
)
|
||||
helper_stats.db_path = db_file
|
||||
migration_manager = MigrationManager(self.database, helper_stats)
|
||||
migration_manager.up() # Automatically runs migrations
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
f"Error try to look for the db_stats files for server : {ex}"
|
||||
)
|
||||
return None
|
||||
|
||||
def select_database(self):
|
||||
try:
|
||||
server = HelperServers.get_server_data_by_id(self.server_id)
|
||||
db_file = os.path.join(
|
||||
f"{server['path']}",
|
||||
"db_stats",
|
||||
"crafty_server_stats.sqlite",
|
||||
)
|
||||
self.database = SqliteDatabase(
|
||||
db_file, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.warning(
|
||||
f"Error try to look for the db_stats files for server : {ex}"
|
||||
)
|
||||
return None
|
||||
|
||||
def get_all_servers_stats(self):
|
||||
servers = HelperServers.get_all_defined_servers()
|
||||
server_data = []
|
||||
try:
|
||||
for server in servers:
|
||||
latest = self.get_latest_server_stats()
|
||||
server_data.append(
|
||||
{
|
||||
"server_data": server,
|
||||
"stats": latest,
|
||||
"user_command_permission": True,
|
||||
}
|
||||
)
|
||||
except IndexError as ex:
|
||||
logger.error(
|
||||
f"Stats collection failed with error: {ex}. Was a server just created?"
|
||||
)
|
||||
return server_data
|
||||
|
||||
def insert_server_stats(self, server_stats):
|
||||
server_id = server_stats.get("id", 0)
|
||||
|
||||
if server_id == 0:
|
||||
logger.warning("Stats saving failed with error: Server unknown (id = 0)")
|
||||
return
|
||||
|
||||
ServerStats.insert(
|
||||
{
|
||||
ServerStats.server_id: server_stats.get("id", 0),
|
||||
ServerStats.started: server_stats.get("started", ""),
|
||||
ServerStats.running: server_stats.get("running", False),
|
||||
ServerStats.cpu: server_stats.get("cpu", 0),
|
||||
ServerStats.mem: server_stats.get("mem", 0),
|
||||
ServerStats.mem_percent: server_stats.get("mem_percent", 0),
|
||||
ServerStats.world_name: server_stats.get("world_name", ""),
|
||||
ServerStats.world_size: server_stats.get("world_size", ""),
|
||||
ServerStats.server_port: server_stats.get("server_port", 0),
|
||||
ServerStats.int_ping_results: server_stats.get(
|
||||
"int_ping_results", False
|
||||
),
|
||||
ServerStats.online: server_stats.get("online", False),
|
||||
ServerStats.max: server_stats.get("max", False),
|
||||
ServerStats.players: server_stats.get("players", False),
|
||||
ServerStats.desc: server_stats.get("desc", False),
|
||||
ServerStats.version: server_stats.get("version", False),
|
||||
}
|
||||
).execute(self.database)
|
||||
|
||||
def remove_old_stats(self, last_week):
|
||||
# self.select_database(self.server_id)
|
||||
ServerStats.delete().where(ServerStats.created < last_week).execute(
|
||||
self.database
|
||||
)
|
||||
|
||||
def get_latest_server_stats(self):
|
||||
latest = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.order_by(ServerStats.created.desc())
|
||||
.limit(1)
|
||||
.get(self.database)
|
||||
)
|
||||
try:
|
||||
return DatabaseShortcuts.get_data_obj(latest)
|
||||
except IndexError:
|
||||
return {}
|
||||
|
||||
def get_server_stats(self):
|
||||
stats = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.order_by(ServerStats.created.desc())
|
||||
.limit(1)
|
||||
.first(self.database)
|
||||
)
|
||||
return DatabaseShortcuts.get_data_obj(stats)
|
||||
|
||||
def server_id_exists(self):
|
||||
# self.select_database(self.server_id)
|
||||
if not HelperServers.get_server_data_by_id(self.server_id):
|
||||
return False
|
||||
return True
|
||||
|
||||
def sever_crashed(self):
|
||||
# self.select_database(self.server_id)
|
||||
ServerStats.update(crashed=True).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def set_download(self):
|
||||
# self.select_database(self.server_id)
|
||||
ServerStats.update(downloading=True).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def finish_download(self):
|
||||
# self.select_database(self.server_id)
|
||||
ServerStats.update(downloading=False).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def get_download_status(self):
|
||||
# self.select_database(self.server_id)
|
||||
download_status = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.get(self.database)
|
||||
)
|
||||
return download_status.downloading
|
||||
|
||||
def server_crash_reset(self):
|
||||
if self.server_id is None:
|
||||
return
|
||||
|
||||
# self.select_database(self.server_id)
|
||||
ServerStats.update(crashed=False).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def is_crashed(self):
|
||||
# self.select_database(self.server_id)
|
||||
svr: ServerStats = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.get(self.database)
|
||||
)
|
||||
return svr.crashed
|
||||
|
||||
def set_update(self, value):
|
||||
if self.server_id is None:
|
||||
return
|
||||
|
||||
# self.select_database(self.server_id)
|
||||
try:
|
||||
# Checks if server even exists
|
||||
ServerStats.select().where(ServerStats.server_id == self.server_id).execute(
|
||||
self.database
|
||||
)
|
||||
except DoesNotExist as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
return
|
||||
ServerStats.update(updating=value).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def get_update_status(self):
|
||||
# self.select_database(self.server_id)
|
||||
update_status = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.get(self.database)
|
||||
)
|
||||
return update_status.updating
|
||||
|
||||
def set_first_run(self):
|
||||
# self.select_database(self.server_id)
|
||||
# Sets first run to false
|
||||
try:
|
||||
# Checks if server even exists
|
||||
ServerStats.select().where(ServerStats.server_id == self.server_id).execute(
|
||||
self.database
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
return
|
||||
ServerStats.update(first_run=False).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def get_first_run(self):
|
||||
# self.select_database(self.server_id)
|
||||
first_run = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.get(self.database)
|
||||
)
|
||||
return first_run.first_run
|
||||
|
||||
def get_ttl_without_player(self):
|
||||
# self.select_database(self.server_id)
|
||||
last_stat = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.order_by(ServerStats.created.desc())
|
||||
.first(self.database)
|
||||
)
|
||||
last_stat_with_player = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.where(ServerStats.online > 0)
|
||||
.order_by(ServerStats.created.desc())
|
||||
.first(self.database)
|
||||
)
|
||||
return last_stat.created - last_stat_with_player.created
|
||||
|
||||
def can_stop_no_players(self, time_limit):
|
||||
ttl_no_players = self.get_ttl_without_player()
|
||||
return (time_limit == -1) or (ttl_no_players > time_limit)
|
||||
|
||||
def set_waiting_start(self, value):
|
||||
# self.select_database(self.server_id)
|
||||
try:
|
||||
# Checks if server even exists
|
||||
ServerStats.select().where(ServerStats.server_id == self.server_id).execute(
|
||||
self.database
|
||||
)
|
||||
except DoesNotExist as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
return
|
||||
ServerStats.update(waiting_start=value).where(
|
||||
ServerStats.server_id == self.server_id
|
||||
).execute(self.database)
|
||||
|
||||
def get_waiting_start(self):
|
||||
waiting_start = (
|
||||
ServerStats.select()
|
||||
.where(ServerStats.server_id == self.server_id)
|
||||
.get(self.database)
|
||||
)
|
||||
return waiting_start.waiting_start
|
@ -1,26 +1,24 @@
|
||||
import logging
|
||||
import datetime
|
||||
import typing as t
|
||||
from peewee import (
|
||||
CharField,
|
||||
AutoField,
|
||||
DateTimeField,
|
||||
BooleanField,
|
||||
IntegerField,
|
||||
)
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.main_models import db_helper
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, IntegerField, FloatField
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.shared.main_models import DatabaseShortcuts
|
||||
from app.classes.models.base_model import BaseModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
#************************************************************************************************
|
||||
# Servers Class
|
||||
#************************************************************************************************
|
||||
class Servers(Model):
|
||||
# **********************************************************************************
|
||||
# Servers Model
|
||||
# **********************************************************************************
|
||||
class Servers(BaseModel):
|
||||
server_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
server_uuid = CharField(default="", index=True)
|
||||
@ -42,50 +40,18 @@ class Servers(Model):
|
||||
|
||||
class Meta:
|
||||
table_name = "servers"
|
||||
database = database
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# Servers Stats Class
|
||||
#************************************************************************************************
|
||||
class Server_Stats(Model):
|
||||
stats_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
server_id = ForeignKeyField(Servers, backref='server', index=True)
|
||||
started = CharField(default="")
|
||||
running = BooleanField(default=False)
|
||||
cpu = FloatField(default=0)
|
||||
mem = FloatField(default=0)
|
||||
mem_percent = FloatField(default=0)
|
||||
world_name = CharField(default="")
|
||||
world_size = CharField(default="")
|
||||
server_port = IntegerField(default=25565)
|
||||
int_ping_results = CharField(default="")
|
||||
online = IntegerField(default=0)
|
||||
max = IntegerField(default=0)
|
||||
players = CharField(default="")
|
||||
desc = CharField(default="Unable to Connect")
|
||||
version = CharField(default="")
|
||||
updating = BooleanField(default=False)
|
||||
waiting_start = BooleanField(default=False)
|
||||
first_run = BooleanField(default=True)
|
||||
crashed = BooleanField(default=False)
|
||||
downloading = BooleanField(default=False)
|
||||
|
||||
|
||||
class Meta:
|
||||
table_name = "server_stats"
|
||||
database = database
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Servers Class
|
||||
#************************************************************************************************
|
||||
class helper_servers:
|
||||
# **********************************************************************************
|
||||
class HelperServers:
|
||||
def __init__(self, database):
|
||||
self.database = database
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Generic Servers Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def create_server(
|
||||
name: str,
|
||||
@ -97,23 +63,48 @@ class helper_servers:
|
||||
server_log_file: str,
|
||||
server_stop: str,
|
||||
server_type: str,
|
||||
server_port=25565):
|
||||
return Servers.insert({
|
||||
Servers.server_name: name,
|
||||
Servers.server_uuid: server_uuid,
|
||||
Servers.path: server_dir,
|
||||
Servers.executable: server_file,
|
||||
Servers.execution_command: server_command,
|
||||
Servers.auto_start: False,
|
||||
Servers.auto_start_delay: 10,
|
||||
Servers.crash_detection: False,
|
||||
Servers.log_path: server_log_file,
|
||||
Servers.server_port: server_port,
|
||||
Servers.stop_command: server_stop,
|
||||
Servers.backup_path: backup_path,
|
||||
Servers.type: server_type
|
||||
}).execute()
|
||||
server_port: int = 25565,
|
||||
server_host: str = "127.0.0.1",
|
||||
) -> int:
|
||||
"""Create a server in the database
|
||||
|
||||
Args:
|
||||
name: The name of the server
|
||||
server_uuid: This is the UUID of the server
|
||||
server_dir: The directory where the server is located
|
||||
backup_path: The path to the backup folder
|
||||
server_command: The command to start the server
|
||||
server_file: The name of the server file
|
||||
server_log_file: The path to the server log file
|
||||
server_stop: This is the command to stop the server
|
||||
server_type: This is the type of server you're creating.
|
||||
server_port: The port the server will be monitored on, defaults to 25565
|
||||
server_host: The host the server will be monitored on, defaults to 127.0.0.1
|
||||
|
||||
Returns:
|
||||
int: The new server's id
|
||||
|
||||
Raises:
|
||||
PeeweeException: If the server already exists
|
||||
"""
|
||||
return Servers.insert(
|
||||
{
|
||||
Servers.server_name: name,
|
||||
Servers.server_uuid: server_uuid,
|
||||
Servers.path: server_dir,
|
||||
Servers.executable: server_file,
|
||||
Servers.execution_command: server_command,
|
||||
Servers.auto_start: False,
|
||||
Servers.auto_start_delay: 10,
|
||||
Servers.crash_detection: False,
|
||||
Servers.log_path: server_log_file,
|
||||
Servers.server_port: server_port,
|
||||
Servers.server_ip: server_host,
|
||||
Servers.stop_command: server_stop,
|
||||
Servers.backup_path: backup_path,
|
||||
Servers.type: server_type,
|
||||
}
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_server_obj(server_id):
|
||||
@ -128,163 +119,52 @@ class helper_servers:
|
||||
def update_server(server_obj):
|
||||
return server_obj.save()
|
||||
|
||||
@staticmethod
|
||||
def remove_server(server_id):
|
||||
with database.atomic():
|
||||
Servers.delete().where(Servers.server_id == server_id).execute()
|
||||
def remove_server(self, server_id):
|
||||
Servers.delete().where(Servers.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_server_data_by_id(server_id):
|
||||
query = Servers.select().where(Servers.server_id == server_id).limit(1)
|
||||
try:
|
||||
return db_helper.return_rows(query)[0]
|
||||
return DatabaseShortcuts.return_rows(query)[0]
|
||||
except IndexError:
|
||||
return {}
|
||||
|
||||
#************************************************************************************************
|
||||
@staticmethod
|
||||
def get_server_columns(
|
||||
server_id: t.Union[str, int], column_names: t.List[str]
|
||||
) -> t.List[t.Any]:
|
||||
columns = [getattr(Servers, column) for column in column_names]
|
||||
return model_to_dict(
|
||||
Servers.select(*columns).where(Servers.server_id == server_id).get(),
|
||||
only=columns,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_server_column(server_id: t.Union[str, int], column_name: str) -> t.Any:
|
||||
column = getattr(Servers, column_name)
|
||||
return getattr(
|
||||
Servers.select(column).where(Servers.server_id == server_id).get(),
|
||||
column_name,
|
||||
)
|
||||
|
||||
# **********************************************************************************
|
||||
# Servers Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def get_all_defined_servers():
|
||||
query = Servers.select()
|
||||
return db_helper.return_rows(query)
|
||||
return DatabaseShortcuts.return_rows(query)
|
||||
|
||||
@staticmethod
|
||||
def get_all_servers_stats():
|
||||
servers = servers_helper.get_all_defined_servers()
|
||||
server_data = []
|
||||
try:
|
||||
for s in servers:
|
||||
latest = Server_Stats.select().where(Server_Stats.server_id == s.get('server_id')).order_by(Server_Stats.created.desc()).limit(1)
|
||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0], "user_command_permission":True})
|
||||
except IndexError as ex:
|
||||
logger.error(f"Stats collection failed with error: {ex}. Was a server just created?")
|
||||
return server_data
|
||||
def get_all_server_ids() -> t.List[int]:
|
||||
return [server.server_id for server in Servers.select(Servers.server_id)]
|
||||
|
||||
@staticmethod
|
||||
def get_server_friendly_name(server_id):
|
||||
server_data = servers_helper.get_server_data_by_id(server_id)
|
||||
friendly_name = f"{server_data.get('server_name', None)} with ID: {server_data.get('server_id', 0)}"
|
||||
server_data = HelperServers.get_server_data_by_id(server_id)
|
||||
friendly_name = (
|
||||
f"{server_data.get('server_name', None)} "
|
||||
f"with ID: {server_data.get('server_id', 0)}"
|
||||
)
|
||||
return friendly_name
|
||||
|
||||
#************************************************************************************************
|
||||
# Servers_Stats Methods
|
||||
#************************************************************************************************
|
||||
@staticmethod
|
||||
def get_latest_server_stats(server_id):
|
||||
return Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).limit(1)
|
||||
|
||||
@staticmethod
|
||||
def get_server_stats_by_id(server_id):
|
||||
stats = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).limit(1)
|
||||
return db_helper.return_rows(stats)[0]
|
||||
|
||||
@staticmethod
|
||||
def server_id_exists(server_id):
|
||||
if not servers_helper.get_server_data_by_id(server_id):
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def sever_crashed(server_id):
|
||||
with database.atomic():
|
||||
Server_Stats.update(crashed=True).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def set_download(server_id):
|
||||
with database.atomic():
|
||||
Server_Stats.update(downloading=True).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def finish_download(server_id):
|
||||
with database.atomic():
|
||||
Server_Stats.update(downloading=False).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_download_status(server_id):
|
||||
download_status = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||
return download_status.downloading
|
||||
|
||||
|
||||
@staticmethod
|
||||
def server_crash_reset(server_id):
|
||||
with database.atomic():
|
||||
Server_Stats.update(crashed=False).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def is_crashed(server_id):
|
||||
svr = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||
#pylint: disable=singleton-comparison
|
||||
if svr.crashed == True:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def set_update(server_id, value):
|
||||
try:
|
||||
#Checks if server even exists
|
||||
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||
except Exception as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
with database.atomic():
|
||||
Server_Stats.update(updating=value).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_update_status(server_id):
|
||||
update_status = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||
return update_status.updating
|
||||
|
||||
@staticmethod
|
||||
def set_first_run(server_id):
|
||||
#Sets first run to false
|
||||
try:
|
||||
#Checks if server even exists
|
||||
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||
except Exception as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
return
|
||||
with database.atomic():
|
||||
Server_Stats.update(first_run=False).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_first_run(server_id):
|
||||
first_run = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||
return first_run.first_run
|
||||
|
||||
@staticmethod
|
||||
def get_TTL_without_player(server_id):
|
||||
last_stat = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).first()
|
||||
last_stat_with_player = (Server_Stats
|
||||
.select()
|
||||
.where(Server_Stats.server_id == server_id)
|
||||
.where(Server_Stats.online > 0)
|
||||
.order_by(Server_Stats.created.desc())
|
||||
.first())
|
||||
return last_stat.created - last_stat_with_player.created
|
||||
|
||||
@staticmethod
|
||||
def can_stop_no_players(server_id, time_limit):
|
||||
can = False
|
||||
ttl_no_players = servers_helper.get_TTL_without_player(server_id)
|
||||
if (time_limit == -1) or (ttl_no_players > time_limit):
|
||||
can = True
|
||||
return can
|
||||
|
||||
@staticmethod
|
||||
def set_waiting_start(server_id, value):
|
||||
try:
|
||||
# Checks if server even exists
|
||||
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||
except Exception as ex:
|
||||
logger.error(f"Database entry not found! {ex}")
|
||||
with database.atomic():
|
||||
Server_Stats.update(waiting_start=value).where(Server_Stats.server_id == server_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_waiting_start(server_id):
|
||||
waiting_start = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||
return waiting_start.waiting_start
|
||||
|
||||
|
||||
servers_helper = helper_servers()
|
||||
|
@ -1,28 +1,29 @@
|
||||
import logging
|
||||
import datetime
|
||||
from typing import Optional, Union
|
||||
import typing as t
|
||||
|
||||
from app.classes.models.roles import Roles, roles_helper
|
||||
from app.classes.shared.helpers import helper
|
||||
from peewee import (
|
||||
ForeignKeyField,
|
||||
CharField,
|
||||
AutoField,
|
||||
DateTimeField,
|
||||
BooleanField,
|
||||
CompositeKey,
|
||||
DoesNotExist,
|
||||
JOIN,
|
||||
)
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
try:
|
||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, CompositeKey, DoesNotExist, JOIN
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.models.base_model import BaseModel
|
||||
from app.classes.models.roles import Roles, HelperRoles
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# Users Class
|
||||
#************************************************************************************************
|
||||
class Users(Model):
|
||||
# **********************************************************************************
|
||||
class Users(BaseModel):
|
||||
user_id = AutoField()
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
last_login = DateTimeField(default=datetime.datetime.now)
|
||||
@ -34,48 +35,60 @@ class Users(Model):
|
||||
enabled = BooleanField(default=True)
|
||||
superuser = BooleanField(default=False)
|
||||
lang = CharField(default="en_EN")
|
||||
support_logs = CharField(default = '')
|
||||
support_logs = CharField(default="")
|
||||
valid_tokens_from = DateTimeField(default=datetime.datetime.now)
|
||||
server_order = CharField(default="")
|
||||
preparing = BooleanField(default=False)
|
||||
hints = BooleanField(default=True)
|
||||
|
||||
class Meta:
|
||||
table_name = "users"
|
||||
database = database
|
||||
|
||||
|
||||
# ************************************************************************************************
|
||||
PUBLIC_USER_ATTRS: t.Final = [
|
||||
"user_id",
|
||||
"created",
|
||||
"username",
|
||||
"enabled",
|
||||
"superuser",
|
||||
"lang", # maybe remove?
|
||||
]
|
||||
|
||||
# **********************************************************************************
|
||||
# API Keys Class
|
||||
# ************************************************************************************************
|
||||
class ApiKeys(Model):
|
||||
# **********************************************************************************
|
||||
class ApiKeys(BaseModel):
|
||||
token_id = AutoField()
|
||||
name = CharField(default='', unique=True, index=True)
|
||||
name = CharField(default="", unique=True, index=True)
|
||||
created = DateTimeField(default=datetime.datetime.now)
|
||||
user_id = ForeignKeyField(Users, backref='api_token', index=True)
|
||||
server_permissions = CharField(default='00000000')
|
||||
crafty_permissions = CharField(default='000')
|
||||
user_id = ForeignKeyField(Users, backref="api_token", index=True)
|
||||
server_permissions = CharField(default="00000000")
|
||||
crafty_permissions = CharField(default="000")
|
||||
superuser = BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
table_name = 'api_keys'
|
||||
database = database
|
||||
table_name = "api_keys"
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# User Roles Class
|
||||
#************************************************************************************************
|
||||
class User_Roles(Model):
|
||||
user_id = ForeignKeyField(Users, backref='user_role')
|
||||
role_id = ForeignKeyField(Roles, backref='user_role')
|
||||
# **********************************************************************************
|
||||
class UserRoles(BaseModel):
|
||||
user_id = ForeignKeyField(Users, backref="user_role")
|
||||
role_id = ForeignKeyField(Roles, backref="user_role")
|
||||
|
||||
class Meta:
|
||||
table_name = 'user_roles'
|
||||
primary_key = CompositeKey('user_id', 'role_id')
|
||||
database = database
|
||||
table_name = "user_roles"
|
||||
primary_key = CompositeKey("user_id", "role_id")
|
||||
|
||||
#************************************************************************************************
|
||||
|
||||
# **********************************************************************************
|
||||
# Users Helpers
|
||||
#************************************************************************************************
|
||||
class helper_users:
|
||||
# **********************************************************************************
|
||||
class HelperUsers:
|
||||
def __init__(self, database, helper):
|
||||
self.database = database
|
||||
self.helper = helper
|
||||
|
||||
@staticmethod
|
||||
def get_by_id(user_id):
|
||||
@ -86,6 +99,15 @@ class helper_users:
|
||||
query = Users.select().where(Users.username != "system")
|
||||
return query
|
||||
|
||||
@staticmethod
|
||||
def get_all_user_ids() -> t.List[int]:
|
||||
return [
|
||||
user.user_id
|
||||
for user in Users.select(Users.user_id)
|
||||
.where(Users.username != "system")
|
||||
.execute()
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_user_lang_by_id(user_id):
|
||||
return Users.get(Users.user_id == user_id).lang
|
||||
@ -106,59 +128,96 @@ class helper_users:
|
||||
def get_user(user_id):
|
||||
if user_id == 0:
|
||||
return {
|
||||
'user_id': 0,
|
||||
'created': '10/24/2019, 11:34:00',
|
||||
'last_login': '10/24/2019, 11:34:00',
|
||||
'last_update': '10/24/2019, 11:34:00',
|
||||
'last_ip': "127.27.23.89",
|
||||
'username': "SYSTEM",
|
||||
'password': None,
|
||||
'email': "default@example.com",
|
||||
'enabled': True,
|
||||
'superuser': True,
|
||||
'roles': [],
|
||||
'servers': [],
|
||||
'support_logs': '',
|
||||
"user_id": 0,
|
||||
"created": "10/24/2019, 11:34:00",
|
||||
"last_login": "10/24/2019, 11:34:00",
|
||||
"last_update": "10/24/2019, 11:34:00",
|
||||
"last_ip": "127.27.23.89",
|
||||
"username": "SYSTEM",
|
||||
"password": None,
|
||||
"email": "default@example.com",
|
||||
"enabled": True,
|
||||
"superuser": True,
|
||||
"roles": [],
|
||||
"servers": [],
|
||||
"support_logs": "",
|
||||
}
|
||||
user = model_to_dict(Users.get(Users.user_id == user_id))
|
||||
|
||||
if user:
|
||||
# I know it should apply it without setting it but I'm just making sure
|
||||
user = users_helper.add_user_roles(user)
|
||||
user = HelperUsers.add_user_roles(user)
|
||||
return user
|
||||
else:
|
||||
#logger.debug("user: ({}) {}".format(user_id, {}))
|
||||
# logger.debug("user: ({}) {}".format(user_id, {}))
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def check_system_user(user_id):
|
||||
try:
|
||||
result = Users.get(Users.user_id == user_id).user_id == user_id
|
||||
if result:
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
def get_user_columns(
|
||||
user_id: t.Union[str, int], column_names: t.List[str]
|
||||
) -> t.List[t.Any]:
|
||||
columns = [getattr(Users, column) for column in column_names]
|
||||
return model_to_dict(
|
||||
Users.select(*columns).where(Users.user_id == user_id).get(),
|
||||
only=columns,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user_column(user_id: t.Union[str, int], column_name: str) -> t.Any:
|
||||
column = getattr(Users, column_name)
|
||||
return getattr(
|
||||
Users.select(column).where(Users.user_id == user_id).get(),
|
||||
column_name,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user_model(user_id: str) -> Users:
|
||||
user = Users.get(Users.user_id == user_id)
|
||||
user = users_helper.add_user_roles(user)
|
||||
user = HelperUsers.add_user_roles(user)
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def add_user(username: str, password: Optional[str] = None, email: Optional[str] = None, enabled: bool = True, superuser: bool = False) -> str:
|
||||
def add_user(
|
||||
self,
|
||||
username: str,
|
||||
password: str = None,
|
||||
email: t.Optional[str] = None,
|
||||
enabled: bool = True,
|
||||
superuser: bool = False,
|
||||
) -> str:
|
||||
if password is not None:
|
||||
pw_enc = helper.encode_pass(password)
|
||||
pw_enc = self.helper.encode_pass(password)
|
||||
else:
|
||||
pw_enc = None
|
||||
user_id = Users.insert({
|
||||
Users.username: username.lower(),
|
||||
Users.password: pw_enc,
|
||||
Users.email: email,
|
||||
Users.enabled: enabled,
|
||||
Users.superuser: superuser,
|
||||
Users.created: helper.get_time_as_string()
|
||||
}).execute()
|
||||
user_id = Users.insert(
|
||||
{
|
||||
Users.username: username.lower(),
|
||||
Users.password: pw_enc,
|
||||
Users.email: email,
|
||||
Users.enabled: enabled,
|
||||
Users.superuser: superuser,
|
||||
Users.created: Helpers.get_time_as_string(),
|
||||
}
|
||||
).execute()
|
||||
return user_id
|
||||
|
||||
@staticmethod
|
||||
def add_rawpass_user(
|
||||
username: str,
|
||||
password: str = None,
|
||||
email: t.Optional[str] = None,
|
||||
enabled: bool = True,
|
||||
superuser: bool = False,
|
||||
) -> str:
|
||||
user_id = Users.insert(
|
||||
{
|
||||
Users.username: username.lower(),
|
||||
Users.password: password,
|
||||
Users.email: email,
|
||||
Users.enabled: enabled,
|
||||
Users.superuser: superuser,
|
||||
Users.created: Helpers.get_time_as_string(),
|
||||
}
|
||||
).execute()
|
||||
return user_id
|
||||
|
||||
@staticmethod
|
||||
@ -170,7 +229,9 @@ class helper_users:
|
||||
|
||||
@staticmethod
|
||||
def update_server_order(user_id, user_server_order):
|
||||
Users.update(server_order = user_server_order).where(Users.user_id == user_id).execute()
|
||||
Users.update(server_order=user_server_order).where(
|
||||
Users.user_id == user_id
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def get_server_order(user_id):
|
||||
@ -178,104 +239,123 @@ class helper_users:
|
||||
|
||||
@staticmethod
|
||||
def get_super_user_list():
|
||||
final_users = []
|
||||
# pylint: disable=singleton-comparison
|
||||
super_users = Users.select().where(Users.superuser == True)
|
||||
final_users: t.List[int] = []
|
||||
super_users = Users.select().where(
|
||||
Users.superuser == True # pylint: disable=singleton-comparison
|
||||
)
|
||||
for suser in super_users:
|
||||
if suser.user_id not in final_users:
|
||||
final_users.append(suser.user_id)
|
||||
return final_users
|
||||
|
||||
@staticmethod
|
||||
def remove_user(user_id):
|
||||
with database.atomic():
|
||||
User_Roles.delete().where(User_Roles.user_id == user_id).execute()
|
||||
user = Users.get(Users.user_id == user_id)
|
||||
return user.delete_instance()
|
||||
def remove_user(self, user_id):
|
||||
with self.database.atomic():
|
||||
UserRoles.delete().where(UserRoles.user_id == user_id).execute()
|
||||
return Users.delete().where(Users.user_id == user_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def set_support_path(user_id, support_path):
|
||||
Users.update(support_logs = support_path).where(Users.user_id == user_id).execute()
|
||||
Users.update(support_logs=support_path).where(
|
||||
Users.user_id == user_id
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def set_prepare(user_id):
|
||||
Users.update(preparing=True).where(Users.user_id == user_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def stop_prepare(user_id):
|
||||
Users.update(preparing=False).where(Users.user_id == user_id).execute()
|
||||
|
||||
@staticmethod
|
||||
def clear_support_status():
|
||||
Users.update(preparing=False).where(
|
||||
Users.preparing == True # pylint: disable=singleton-comparison
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def user_id_exists(user_id):
|
||||
if not users_helper.get_user(user_id):
|
||||
return False
|
||||
return True
|
||||
return Users.select().where(Users.user_id == user_id).exists()
|
||||
|
||||
#************************************************************************************************
|
||||
# User_Roles Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
# User_Roles Methods
|
||||
# **********************************************************************************
|
||||
|
||||
@staticmethod
|
||||
def get_or_create(user_id, role_id):
|
||||
return User_Roles.get_or_create(user_id=user_id, role_id=role_id)
|
||||
return UserRoles.get_or_create(user_id=user_id, role_id=role_id)
|
||||
|
||||
@staticmethod
|
||||
def get_user_roles_id(user_id):
|
||||
roles_list = []
|
||||
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
||||
roles = UserRoles.select().where(UserRoles.user_id == user_id)
|
||||
for r in roles:
|
||||
roles_list.append(roles_helper.get_role(r.role_id)['role_id'])
|
||||
roles_list.append(HelperRoles.get_role(r.role_id)["role_id"])
|
||||
return roles_list
|
||||
|
||||
@staticmethod
|
||||
def get_user_roles_names(user_id):
|
||||
roles_list = []
|
||||
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
||||
for r in roles:
|
||||
roles_list.append(roles_helper.get_role(r.role_id)['role_name'])
|
||||
return roles_list
|
||||
roles = UserRoles.select(UserRoles.role_id).where(UserRoles.user_id == user_id)
|
||||
return [
|
||||
HelperRoles.get_role_column(role.role_id, "role_name") for role in roles
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def add_role_to_user(user_id, role_id):
|
||||
User_Roles.insert({
|
||||
User_Roles.user_id: user_id,
|
||||
User_Roles.role_id: role_id
|
||||
}).execute()
|
||||
UserRoles.insert(
|
||||
{UserRoles.user_id: user_id, UserRoles.role_id: role_id}
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def add_user_roles(user: Union[dict, Users]):
|
||||
def add_user_roles(user: t.Union[dict, Users]):
|
||||
if isinstance(user, dict):
|
||||
user_id = user['user_id']
|
||||
user_id = user["user_id"]
|
||||
else:
|
||||
user_id = user.user_id
|
||||
|
||||
# I just copied this code from get_user, it had those TODOs & comments made by mac - Lukas
|
||||
# I just copied this code from get_user,
|
||||
# it had those TODOs & comments made by mac - Lukas
|
||||
|
||||
roles_query = User_Roles.select().join(Roles, JOIN.INNER).where(User_Roles.user_id == user_id)
|
||||
# TODO: this query needs to be narrower
|
||||
roles = set()
|
||||
for r in roles_query:
|
||||
roles.add(r.role_id.role_id)
|
||||
roles_query = (
|
||||
UserRoles.select()
|
||||
.join(Roles, JOIN.INNER)
|
||||
.where(UserRoles.user_id == user_id)
|
||||
)
|
||||
roles = {r.role_id_id for r in roles_query}
|
||||
|
||||
if isinstance(user, dict):
|
||||
user['roles'] = roles
|
||||
user["roles"] = roles
|
||||
else:
|
||||
user.roles = roles
|
||||
|
||||
#logger.debug("user: ({}) {}".format(user_id, user))
|
||||
# logger.debug("user: ({}) {}".format(user_id, user))
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def user_role_query(user_id):
|
||||
user_query = User_Roles.select().where(User_Roles.user_id == user_id)
|
||||
user_query = UserRoles.select().where(UserRoles.user_id == user_id)
|
||||
query = Roles.select().where(Roles.role_id == -1)
|
||||
for u in user_query:
|
||||
query = query + Roles.select().where(Roles.role_id == u.role_id)
|
||||
for user in user_query:
|
||||
query = query + Roles.select().where(Roles.role_id == user.role_id)
|
||||
return query
|
||||
|
||||
@staticmethod
|
||||
def delete_user_roles(user_id, removed_roles):
|
||||
User_Roles.delete().where(User_Roles.user_id == user_id).where(User_Roles.role_id.in_(removed_roles)).execute()
|
||||
UserRoles.delete().where(UserRoles.user_id == user_id).where(
|
||||
UserRoles.role_id.in_(removed_roles)
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def remove_roles_from_role_id(role_id):
|
||||
User_Roles.delete().where(User_Roles.role_id == role_id).execute()
|
||||
UserRoles.delete().where(UserRoles.role_id == role_id).execute()
|
||||
|
||||
# ************************************************************************************************
|
||||
# ApiKeys Methods
|
||||
# ************************************************************************************************
|
||||
@staticmethod
|
||||
def get_users_from_role(role_id):
|
||||
UserRoles.select().where(UserRoles.role_id == role_id).execute()
|
||||
|
||||
# **********************************************************************************
|
||||
# ApiKeys Methods
|
||||
# **********************************************************************************
|
||||
|
||||
@staticmethod
|
||||
def get_user_api_keys(user_id: str):
|
||||
@ -287,18 +367,29 @@ class helper_users:
|
||||
|
||||
@staticmethod
|
||||
def add_user_api_key(
|
||||
name: str,
|
||||
user_id: str,
|
||||
superuser: bool = False,
|
||||
server_permissions_mask: Optional[str] = None,
|
||||
crafty_permissions_mask: Optional[str] = None):
|
||||
return ApiKeys.insert({
|
||||
ApiKeys.name: name,
|
||||
ApiKeys.user_id: user_id,
|
||||
**({ApiKeys.server_permissions: server_permissions_mask} if server_permissions_mask is not None else {}),
|
||||
**({ApiKeys.crafty_permissions: crafty_permissions_mask} if crafty_permissions_mask is not None else {}),
|
||||
ApiKeys.superuser: superuser
|
||||
}).execute()
|
||||
name: str,
|
||||
user_id: str,
|
||||
superuser: bool = False,
|
||||
server_permissions_mask: t.Optional[str] = None,
|
||||
crafty_permissions_mask: t.Optional[str] = None,
|
||||
):
|
||||
return ApiKeys.insert(
|
||||
{
|
||||
ApiKeys.name: name,
|
||||
ApiKeys.user_id: user_id,
|
||||
**(
|
||||
{ApiKeys.server_permissions: server_permissions_mask}
|
||||
if server_permissions_mask is not None
|
||||
else {}
|
||||
),
|
||||
**(
|
||||
{ApiKeys.crafty_permissions: crafty_permissions_mask}
|
||||
if crafty_permissions_mask is not None
|
||||
else {}
|
||||
),
|
||||
ApiKeys.superuser: superuser,
|
||||
}
|
||||
).execute()
|
||||
|
||||
@staticmethod
|
||||
def delete_user_api_keys(user_id: str):
|
||||
@ -307,7 +398,3 @@ class helper_users:
|
||||
@staticmethod
|
||||
def delete_user_api_key(key_id: str):
|
||||
ApiKeys.delete().where(ApiKeys.token_id == key_id).execute()
|
||||
|
||||
|
||||
|
||||
users_helper = helper_users()
|
||||
|
@ -1,79 +1,81 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, Dict, Any, Tuple
|
||||
import jwt
|
||||
from jwt import PyJWTError
|
||||
|
||||
from app.classes.models.users import users_helper, ApiKeys
|
||||
from app.classes.shared.helpers import helper
|
||||
|
||||
try:
|
||||
import jwt
|
||||
from jwt import PyJWTError
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.models.users import HelperUsers, ApiKeys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Authentication:
|
||||
def __init__(self):
|
||||
def __init__(self, helper):
|
||||
self.helper = helper
|
||||
self.secret = "my secret"
|
||||
self.secret = helper.get_setting('apikey_secret', None)
|
||||
self.secret = self.helper.get_setting("apikey_secret", None)
|
||||
|
||||
if self.secret is None or self.secret == 'random':
|
||||
self.secret = helper.random_string_generator(64)
|
||||
if self.secret is None or self.secret == "random":
|
||||
self.secret = self.helper.random_string_generator(64)
|
||||
self.helper.set_setting("apikey_secret", self.secret)
|
||||
|
||||
@staticmethod
|
||||
def generate(user_id, extra=None):
|
||||
def generate(self, user_id, extra=None):
|
||||
if extra is None:
|
||||
extra = {}
|
||||
return jwt.encode(
|
||||
{
|
||||
'user_id': user_id,
|
||||
'iat': int(time.time()),
|
||||
**extra
|
||||
},
|
||||
authentication.secret,
|
||||
algorithm="HS256"
|
||||
jwt_encoded = jwt.encode(
|
||||
{"user_id": user_id, "iat": int(time.time()), **extra},
|
||||
self.secret,
|
||||
algorithm="HS256",
|
||||
)
|
||||
return jwt_encoded
|
||||
|
||||
@staticmethod
|
||||
def read(token):
|
||||
return jwt.decode(token, authentication.secret, algorithms=["HS256"])
|
||||
def read(self, token):
|
||||
return jwt.decode(token, self.secret, algorithms=["HS256"])
|
||||
|
||||
@staticmethod
|
||||
def check_no_iat(token) -> Optional[Dict[str, Any]]:
|
||||
def check_no_iat(self, token) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
return jwt.decode(token, authentication.secret, algorithms=["HS256"])
|
||||
return jwt.decode(str(token), self.secret, algorithms=["HS256"])
|
||||
except PyJWTError as error:
|
||||
logger.debug("Error while checking JWT token: ", exc_info=error)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def check(token) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
||||
def check(
|
||||
self,
|
||||
token,
|
||||
) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
||||
try:
|
||||
data = jwt.decode(token, authentication.secret, algorithms=["HS256"])
|
||||
data = jwt.decode(str(token), self.secret, algorithms=["HS256"])
|
||||
except PyJWTError as error:
|
||||
logger.debug("Error while checking JWT token: ", exc_info=error)
|
||||
return None
|
||||
iat: int = data['iat']
|
||||
iat: int = data["iat"]
|
||||
key: Optional[ApiKeys] = None
|
||||
if 'token_id' in data:
|
||||
key_id = data['token_id']
|
||||
key = users_helper.get_user_api_key(key_id)
|
||||
if "token_id" in data:
|
||||
key_id = data["token_id"]
|
||||
key = HelperUsers.get_user_api_key(key_id)
|
||||
if key is None:
|
||||
return None
|
||||
user_id: str = data['user_id']
|
||||
user = users_helper.get_user(user_id)
|
||||
# TODO: Have a cache or something so we don't constantly have to query the database
|
||||
if int(user.get('valid_tokens_from').timestamp()) < iat:
|
||||
user_id: str = data["user_id"]
|
||||
user = HelperUsers.get_user(user_id)
|
||||
# TODO: Have a cache or something so we don't constantly
|
||||
# have to query the database
|
||||
if int(user.get("valid_tokens_from").timestamp()) < iat:
|
||||
# Success!
|
||||
return key, data, user
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def check_bool(token) -> bool:
|
||||
return authentication.check(token) is not None
|
||||
def check_err(
|
||||
self,
|
||||
token,
|
||||
) -> Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]:
|
||||
# Without this function there would be runtime exceptions like the following:
|
||||
# "None" object is not iterable
|
||||
|
||||
output = self.check(token)
|
||||
if output is None:
|
||||
raise Exception("Invalid token")
|
||||
return output
|
||||
|
||||
authentication = Authentication()
|
||||
def check_bool(self, token) -> bool:
|
||||
return self.check(token) is not None
|
||||
|
@ -3,75 +3,129 @@ import cmd
|
||||
import time
|
||||
import threading
|
||||
import logging
|
||||
import getpass
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.import3 import Import3
|
||||
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.tasks import TasksManager
|
||||
from app.classes.shared.migration import MigrationManager
|
||||
from app.classes.shared.main_controller import Controller
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MainPrompt(cmd.Cmd):
|
||||
|
||||
def __init__(self, tasks_manager, migration_manager):
|
||||
def __init__(self, helper, tasks_manager, migration_manager, main_controller):
|
||||
super().__init__()
|
||||
self.tasks_manager = tasks_manager
|
||||
self.migration_manager = migration_manager
|
||||
self.helper: Helpers = helper
|
||||
self.tasks_manager: TasksManager = tasks_manager
|
||||
self.migration_manager: MigrationManager = migration_manager
|
||||
self.controller: Controller = main_controller
|
||||
|
||||
# overrides the default Prompt
|
||||
prompt = f"Crafty Controller v{helper.get_version_string()} > "
|
||||
# overrides the default Prompt
|
||||
self.prompt = f"Crafty Controller v{self.helper.get_version_string()} > "
|
||||
|
||||
@staticmethod
|
||||
def emptyline():
|
||||
def emptyline(self):
|
||||
pass
|
||||
|
||||
#pylint: disable=unused-argument
|
||||
def do_exit(self, line):
|
||||
def do_exit(self, _line):
|
||||
self.tasks_manager._main_graceful_exit()
|
||||
self.universal_exit()
|
||||
|
||||
def do_migrations(self, line):
|
||||
if line == 'up':
|
||||
if line == "up":
|
||||
self.migration_manager.up()
|
||||
elif line == 'down':
|
||||
elif line == "down":
|
||||
self.migration_manager.down()
|
||||
elif line == 'done':
|
||||
console.info(self.migration_manager.done)
|
||||
elif line == 'todo':
|
||||
console.info(self.migration_manager.todo)
|
||||
elif line == 'diff':
|
||||
console.info(self.migration_manager.diff)
|
||||
elif line == 'info':
|
||||
console.info(f'Done: {self.migration_manager.done}')
|
||||
console.info(f'FS: {self.migration_manager.todo}')
|
||||
console.info(f'Todo: {self.migration_manager.diff}')
|
||||
elif line.startswith('add '):
|
||||
migration_name = line[len('add '):]
|
||||
elif line == "done":
|
||||
Console.info(self.migration_manager.done)
|
||||
elif line == "todo":
|
||||
Console.info(self.migration_manager.todo)
|
||||
elif line == "diff":
|
||||
Console.info(self.migration_manager.diff)
|
||||
elif line == "info":
|
||||
Console.info(f"Done: {self.migration_manager.done}")
|
||||
Console.info(f"FS: {self.migration_manager.todo}")
|
||||
Console.info(f"Todo: {self.migration_manager.diff}")
|
||||
elif line.startswith("add "):
|
||||
migration_name = line[len("add ") :]
|
||||
self.migration_manager.create(migration_name, False)
|
||||
else:
|
||||
console.info('Unknown migration command')
|
||||
Console.info("Unknown migration command")
|
||||
|
||||
def do_set_passwd(self, line):
|
||||
|
||||
try:
|
||||
username = str(line).lower()
|
||||
# If no user is found it returns None
|
||||
user_id = self.controller.users.get_id_by_name(username)
|
||||
if not username:
|
||||
Console.error("You must enter a username. Ex: `set_passwd admin'")
|
||||
return False
|
||||
if not user_id:
|
||||
Console.error(f"No user found by the name of {username}")
|
||||
return False
|
||||
except:
|
||||
Console.error(f"User: {line} Not Found")
|
||||
return False
|
||||
new_pass = getpass.getpass(prompt=f"NEW password for: {username} > ")
|
||||
new_pass_conf = getpass.getpass(prompt="Re-enter your password: > ")
|
||||
|
||||
if new_pass != new_pass_conf:
|
||||
Console.error("Passwords do not match. Please try again.")
|
||||
return False
|
||||
|
||||
if len(new_pass) > 512:
|
||||
Console.warning("Passwords must be greater than 6char long and under 512")
|
||||
return False
|
||||
|
||||
if len(new_pass) < 6:
|
||||
Console.warning("Passwords must be greater than 6char long and under 512")
|
||||
return False
|
||||
self.controller.users.update_user(user_id, {"password": new_pass})
|
||||
|
||||
@staticmethod
|
||||
def do_threads(_line):
|
||||
for thread in threading.enumerate():
|
||||
if sys.version_info >= (3, 8):
|
||||
print(f'Name: {thread.name} Identifier: {thread.ident} TID/PID: {thread.native_id}')
|
||||
print(
|
||||
f"Name: {thread.name}\tIdentifier: "
|
||||
f"{thread.ident}\tTID/PID: {thread.native_id}"
|
||||
)
|
||||
else:
|
||||
print(f'Name: {thread.name} Identifier: {thread.ident}')
|
||||
print(f"Name: {thread.name}\tIdentifier: {thread.ident}")
|
||||
|
||||
def print_prompt(self):
|
||||
self.stdout.write(self.prompt)
|
||||
self.stdout.flush()
|
||||
|
||||
def do_import3(self, _line):
|
||||
Import3.start_import()
|
||||
|
||||
def universal_exit(self):
|
||||
logger.info("Stopping all server daemons / threads")
|
||||
console.info("Stopping all server daemons / threads - This may take a few seconds")
|
||||
websocket_helper.disconnect_all()
|
||||
console.info('Waiting for main thread to stop')
|
||||
Console.info(
|
||||
"Stopping all server daemons / threads - This may take a few seconds"
|
||||
)
|
||||
self.helper.websocket_helper.disconnect_all()
|
||||
Console.info("Waiting for main thread to stop")
|
||||
while True:
|
||||
if self.tasks_manager.get_main_thread_run_status():
|
||||
sys.exit(0)
|
||||
time.sleep(1)
|
||||
|
||||
@staticmethod
|
||||
def help_exit():
|
||||
console.help("Stops the server if running, Exits the program")
|
||||
def help_exit(self):
|
||||
Console.help("Stops the server if running, Exits the program")
|
||||
|
||||
@staticmethod
|
||||
def help_migrations():
|
||||
console.help("Only for advanced users. Use with caution")
|
||||
def help_migrations(self):
|
||||
Console.help("Only for advanced users. Use with caution")
|
||||
|
||||
def help_import3(self):
|
||||
Console.help("Import users and servers from Crafty 3")
|
||||
|
||||
def help_set_passwd(self):
|
||||
Console.help("Set a user's password. Example: set_passwd admin")
|
||||
|
||||
def help_threads(self):
|
||||
Console.help("Get all of the Python threads used by Crafty")
|
||||
|
@ -1,8 +1,10 @@
|
||||
import datetime
|
||||
import logging
|
||||
import sys
|
||||
import locale
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
locale.setlocale(locale.LC_ALL, "") # Get the locale from the environment
|
||||
|
||||
try:
|
||||
from colorama import init
|
||||
@ -12,61 +14,77 @@ except ModuleNotFoundError as ex:
|
||||
logger.critical(f"Import Error: Unable to load {ex.name} module", exc_info=True)
|
||||
print(f"Import Error: Unable to load {ex.name} module")
|
||||
from app.classes.shared.installer import installer
|
||||
installer.do_install()
|
||||
class Console:
|
||||
|
||||
installer.do_install()
|
||||
|
||||
|
||||
class Console:
|
||||
def __init__(self):
|
||||
if 'colorama' in sys.modules:
|
||||
if "colorama" in sys.modules:
|
||||
init()
|
||||
|
||||
@staticmethod
|
||||
def get_fmt_date_time():
|
||||
# This will use the local date (%x) and time (%X) formatting
|
||||
return datetime.datetime.now().strftime("%x %X")
|
||||
|
||||
@staticmethod
|
||||
def do_print(message, color):
|
||||
if 'termcolor' in sys.modules or 'colorama' in sys.modules:
|
||||
if "termcolor" in sys.modules or "colorama" in sys.modules:
|
||||
print(colored(message, color))
|
||||
else:
|
||||
print(message)
|
||||
|
||||
def magenta(self, message):
|
||||
self.do_print(message, "magenta")
|
||||
@staticmethod
|
||||
def magenta(message):
|
||||
Console.do_print(message, "magenta")
|
||||
|
||||
def cyan(self, message):
|
||||
self.do_print(message, "cyan")
|
||||
@staticmethod
|
||||
def cyan(message):
|
||||
Console.do_print(message, "cyan")
|
||||
|
||||
def yellow(self, message):
|
||||
self.do_print(message, "yellow")
|
||||
@staticmethod
|
||||
def yellow(message):
|
||||
Console.do_print(message, "yellow")
|
||||
|
||||
def red(self, message):
|
||||
self.do_print(message, "red")
|
||||
@staticmethod
|
||||
def red(message):
|
||||
Console.do_print(message, "red")
|
||||
|
||||
def green(self, message):
|
||||
self.do_print(message, "green")
|
||||
@staticmethod
|
||||
def green(message):
|
||||
Console.do_print(message, "green")
|
||||
|
||||
def white(self, message):
|
||||
self.do_print(message, "white")
|
||||
@staticmethod
|
||||
def white(message):
|
||||
Console.do_print(message, "white")
|
||||
|
||||
def debug(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.magenta(f"[+] Crafty: {dt} - DEBUG:\t{message}")
|
||||
@staticmethod
|
||||
def debug(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.magenta(f"[+] Crafty: {date_time} - DEBUG:\t{message}")
|
||||
|
||||
def info(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.white(f"[+] Crafty: {dt} - INFO:\t{message}")
|
||||
@staticmethod
|
||||
def info(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.white(f"[+] Crafty: {date_time} - INFO:\t{message}")
|
||||
|
||||
def warning(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.cyan(f"[+] Crafty: {dt} - WARNING:\t{message}")
|
||||
@staticmethod
|
||||
def warning(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.cyan(f"[+] Crafty: {date_time} - WARNING:\t{message}")
|
||||
|
||||
def error(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.yellow(f"[+] Crafty: {dt} - ERROR:\t{message}")
|
||||
@staticmethod
|
||||
def error(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.yellow(f"[+] Crafty: {date_time} - ERROR:\t{message}")
|
||||
|
||||
def critical(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.red(f"[+] Crafty: {dt} - CRITICAL:\t{message}")
|
||||
@staticmethod
|
||||
def critical(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.red(f"[+] Crafty: {date_time} - CRITICAL:\t{message}")
|
||||
|
||||
def help(self, message):
|
||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
self.green(f"[+] Crafty: {dt} - HELP:\t{message}")
|
||||
|
||||
|
||||
console = Console()
|
||||
@staticmethod
|
||||
def help(message):
|
||||
date_time = Console.get_fmt_date_time()
|
||||
Console.green(f"[+] Crafty: {date_time} - HELP:\t{message}")
|
||||
|
@ -1,8 +1,10 @@
|
||||
class CraftyException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseException(CraftyException):
|
||||
pass
|
||||
|
||||
|
||||
class SchemaError(DatabaseException):
|
||||
pass
|
||||
|
@ -6,19 +6,17 @@ from zipfile import ZipFile, ZIP_DEFLATED
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class FileHelpers:
|
||||
allowed_quotes = [
|
||||
"\"",
|
||||
"'",
|
||||
"`"
|
||||
]
|
||||
|
||||
def del_dirs(self, path):
|
||||
class FileHelpers:
|
||||
allowed_quotes = ['"', "'", "`"]
|
||||
|
||||
@staticmethod
|
||||
def del_dirs(path):
|
||||
path = pathlib.Path(path)
|
||||
for sub in path.iterdir():
|
||||
if sub.is_dir():
|
||||
# Delete folder if it is a folder
|
||||
self.del_dirs(sub)
|
||||
FileHelpers.del_dirs(sub)
|
||||
else:
|
||||
# Delete file if it is a file:
|
||||
sub.unlink()
|
||||
@ -32,7 +30,7 @@ class FileHelpers:
|
||||
path = pathlib.Path(path)
|
||||
try:
|
||||
logger.debug(f"Deleting file: {path}")
|
||||
#Remove the file
|
||||
# Remove the file
|
||||
os.remove(path)
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
@ -48,54 +46,70 @@ class FileHelpers:
|
||||
def copy_file(src_path, dest_path):
|
||||
shutil.copy(src_path, dest_path)
|
||||
|
||||
def move_dir(self, src_path, dest_path):
|
||||
self.copy_dir(src_path, dest_path)
|
||||
self.del_dirs(src_path)
|
||||
@staticmethod
|
||||
def move_dir(src_path, dest_path):
|
||||
FileHelpers.copy_dir(src_path, dest_path)
|
||||
FileHelpers.del_dirs(src_path)
|
||||
|
||||
def move_file(self, src_path, dest_path):
|
||||
self.copy_file(src_path, dest_path)
|
||||
self.del_file(src_path)
|
||||
@staticmethod
|
||||
def move_file(src_path, dest_path):
|
||||
FileHelpers.copy_file(src_path, dest_path)
|
||||
FileHelpers.del_file(src_path)
|
||||
|
||||
@staticmethod
|
||||
def make_archive(path_to_destination, path_to_zip):
|
||||
# create a ZipFile object
|
||||
path_to_destination += '.zip'
|
||||
with ZipFile(path_to_destination, 'w') as z:
|
||||
path_to_destination += ".zip"
|
||||
with ZipFile(path_to_destination, "w") as zip_file:
|
||||
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
||||
ziproot = path_to_zip
|
||||
for file in files:
|
||||
try:
|
||||
logger.info(f"backing up: {os.path.join(root, file)}")
|
||||
if os.name == "nt":
|
||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, ""), file))
|
||||
zip_file.write(
|
||||
os.path.join(root, file),
|
||||
os.path.join(root.replace(ziproot, ""), file),
|
||||
)
|
||||
else:
|
||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, "/"), file))
|
||||
zip_file.write(
|
||||
os.path.join(root, file),
|
||||
os.path.join(root.replace(ziproot, "/"), file),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error backing up: {os.path.join(root, file)}! - Error was: {e}")
|
||||
|
||||
logger.warning(
|
||||
f"Error backing up: {os.path.join(root, file)}!"
|
||||
f" - Error was: {e}"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def make_compressed_archive(path_to_destination, path_to_zip):
|
||||
# create a ZipFile object
|
||||
path_to_destination += '.zip'
|
||||
with ZipFile(path_to_destination, 'w', ZIP_DEFLATED) as z:
|
||||
path_to_destination += ".zip"
|
||||
with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
|
||||
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
||||
ziproot = path_to_zip
|
||||
for file in files:
|
||||
try:
|
||||
logger.info(f"backing up: {os.path.join(root, file)}")
|
||||
if os.name == "nt":
|
||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, ""), file))
|
||||
zip_file.write(
|
||||
os.path.join(root, file),
|
||||
os.path.join(root.replace(ziproot, ""), file),
|
||||
)
|
||||
else:
|
||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, "/"), file))
|
||||
zip_file.write(
|
||||
os.path.join(root, file),
|
||||
os.path.join(root.replace(ziproot, "/"), file),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error backing up: {os.path.join(root, file)}! - Error was: {e}")
|
||||
|
||||
logger.warning(
|
||||
f"Error backing up: {os.path.join(root, file)}!"
|
||||
f" - Error was: {e}"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
file_helper = FileHelpers()
|
||||
|
File diff suppressed because it is too large
Load Diff
90
app/classes/shared/import3.py
Normal file
90
app/classes/shared/import3.py
Normal file
@ -0,0 +1,90 @@
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
|
||||
from app.classes.controllers.users_controller import HelperUsers
|
||||
from app.classes.shared.console import Console
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Import3:
|
||||
def __init__(self, helper, controller):
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
|
||||
def start_import(self):
|
||||
folder = os.path.normpath(
|
||||
input(
|
||||
"Please input the path to the migrations folder "
|
||||
"in your installation of Crafty 3: "
|
||||
)
|
||||
)
|
||||
if not os.path.exists(folder):
|
||||
Console.info(
|
||||
"Crafty cannot find the path you entered. "
|
||||
"Does Crafty's user have permission to access it?"
|
||||
)
|
||||
Console.info("Please run the import3 command again and enter a valid path.")
|
||||
else:
|
||||
with open(os.path.join(folder, "users.json"), encoding="utf-8") as f:
|
||||
user_json = json.loads(f.read())
|
||||
with open(os.path.join(folder, "mc_settings.json"), encoding="utf-8") as f:
|
||||
servers_json = json.loads(f.read())
|
||||
self.import_users(user_json)
|
||||
self.import_servers(servers_json, self.controller)
|
||||
|
||||
def import_users(self, json_data):
|
||||
# If there is only one user to import json needs to call the data differently
|
||||
if isinstance(json_data, list):
|
||||
for user in json_data:
|
||||
HelperUsers.add_rawpass_user(user["username"], user["password"])
|
||||
Console.info(f"Imported user {user['username']} from Crafty 3")
|
||||
logger.info(f"Imported user {user['username']} from Crafty 3")
|
||||
else:
|
||||
Console.info(
|
||||
"There is only one user detected. "
|
||||
"Cannot create duplicate Admin account."
|
||||
)
|
||||
logger.info(
|
||||
"There is only one user detected. "
|
||||
"Cannot create duplicate Admin account."
|
||||
)
|
||||
|
||||
def import_servers(self, json_data, controller):
|
||||
# If there is only one server to import json needs to call the data differently
|
||||
if isinstance(json_data, list):
|
||||
for server in json_data:
|
||||
new_server_id = controller.import_jar_server(
|
||||
server_name=server["server_name"],
|
||||
server_path=server["server_path"],
|
||||
server_jar=server["server_jar"],
|
||||
min_mem=(int(server["memory_min"]) / 1000),
|
||||
max_mem=(int(server["memory_max"]) / 1000),
|
||||
port=server["server_port"],
|
||||
)
|
||||
Console.info(
|
||||
f"Imported server {server['server_name']}[{server['id']}] "
|
||||
f"from Crafty 3 to new server id {new_server_id}"
|
||||
)
|
||||
logger.info(
|
||||
f"Imported server {server['server_name']}[{server['id']}] "
|
||||
f"from Crafty 3 to new server id {new_server_id}"
|
||||
)
|
||||
else:
|
||||
new_server_id = controller.import_jar_server(
|
||||
server_name=json_data["server_name"],
|
||||
server_path=json_data["server_path"],
|
||||
server_jar=json_data["server_jar"],
|
||||
min_mem=(int(json_data["memory_min"]) / 1000),
|
||||
max_mem=(int(json_data["memory_max"]) / 1000),
|
||||
port=json_data["server_port"],
|
||||
)
|
||||
Console.info(
|
||||
f"Imported server {json_data['server_name']}[{json_data['id']}] "
|
||||
f"from Crafty 3 to new server id {new_server_id}"
|
||||
)
|
||||
logger.info(
|
||||
f"Imported server {json_data['server_name']}[{json_data['id']}] "
|
||||
f"from Crafty 3 to new server id {new_server_id}"
|
||||
)
|
@ -1,12 +1,13 @@
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
class install:
|
||||
|
||||
class Install:
|
||||
@staticmethod
|
||||
def is_venv():
|
||||
return (hasattr(sys, 'real_prefix') or
|
||||
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
|
||||
return hasattr(sys, "real_prefix") or (
|
||||
hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix
|
||||
)
|
||||
|
||||
def do_install(self):
|
||||
|
||||
@ -16,8 +17,11 @@ class install:
|
||||
sys.exit(1)
|
||||
|
||||
# do our pip install
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", 'requirements.txt'])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "install", "-r", "requirements.txt"]
|
||||
)
|
||||
print("Crafty has installed it's dependencies, please restart Crafty")
|
||||
sys.exit(0)
|
||||
|
||||
installer = install()
|
||||
|
||||
installer = Install()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,55 +1,48 @@
|
||||
import logging
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from app.classes.models.users import Users, users_helper
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.console import console
|
||||
|
||||
# To disable warning about unused import ; Users is imported from here in other places
|
||||
# pylint: disable=self-assigning-variable
|
||||
Users = Users
|
||||
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
from peewee import SqliteDatabase, fn
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
except ModuleNotFoundError as err:
|
||||
helper.auto_installer_fix(err)
|
||||
from app.classes.shared.helpers import Helpers # pylint: disable=unused-import
|
||||
from app.classes.shared.console import Console
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
peewee_logger = logging.getLogger('peewee')
|
||||
peewee_logger.setLevel(logging.INFO)
|
||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
||||
'journal_mode': 'wal',
|
||||
'cache_size': -1024 * 10})
|
||||
|
||||
class db_builder:
|
||||
|
||||
@staticmethod
|
||||
def default_settings():
|
||||
class DatabaseBuilder:
|
||||
def __init__(self, database, helper, users_helper):
|
||||
self.database = database
|
||||
self.helper = helper
|
||||
self.users_helper = users_helper
|
||||
|
||||
def default_settings(self):
|
||||
logger.info("Fresh Install Detected - Creating Default Settings")
|
||||
console.info("Fresh Install Detected - Creating Default Settings")
|
||||
default_data = helper.find_default_password()
|
||||
Console.info("Fresh Install Detected - Creating Default Settings")
|
||||
default_data = self.helper.find_default_password()
|
||||
# Reset this value if the DB has been dumped
|
||||
self.helper.set_setting("apikey_secret", "random")
|
||||
|
||||
username = default_data.get("username", 'admin')
|
||||
password = default_data.get("password", 'crafty')
|
||||
username = default_data.get("username", "admin")
|
||||
password = default_data.get("password", "crafty")
|
||||
|
||||
users_helper.add_user(username=username, password=password, email="default@example.com", superuser=True)
|
||||
self.users_helper.add_user(
|
||||
username=username,
|
||||
password=password,
|
||||
email="default@example.com",
|
||||
superuser=True,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def is_fresh_install():
|
||||
def is_fresh_install(self):
|
||||
try:
|
||||
user = users_helper.get_by_id(1)
|
||||
user = self.users_helper.get_by_id(1)
|
||||
if user:
|
||||
return False
|
||||
except:
|
||||
return True
|
||||
|
||||
class db_shortcuts:
|
||||
|
||||
#************************************************************************************************
|
||||
class DatabaseShortcuts:
|
||||
# **********************************************************************************
|
||||
# Generic Databse Methods
|
||||
#************************************************************************************************
|
||||
# **********************************************************************************
|
||||
@staticmethod
|
||||
def return_rows(query):
|
||||
rows = []
|
||||
@ -68,9 +61,6 @@ class db_shortcuts:
|
||||
data = [model_to_dict(row) for row in model]
|
||||
return data
|
||||
|
||||
|
||||
#************************************************************************************************
|
||||
# Static Accessors
|
||||
#************************************************************************************************
|
||||
installer = db_builder()
|
||||
db_helper = db_shortcuts()
|
||||
@staticmethod
|
||||
def get_data_obj(obj):
|
||||
return model_to_dict(obj)
|
||||
|
@ -1,447 +1,483 @@
|
||||
# pylint: skip-file
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import typing as t
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from functools import wraps
|
||||
from functools import cached_property
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.console import console
|
||||
|
||||
try:
|
||||
import peewee
|
||||
from playhouse.migrate import (
|
||||
SqliteMigrator,
|
||||
Operation, SQL, SqliteDatabase,
|
||||
make_index_name
|
||||
)
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MIGRATE_TABLE = 'migratehistory'
|
||||
MIGRATE_TEMPLATE = '''# Generated by database migrator
|
||||
import peewee
|
||||
|
||||
def migrate(migrator, db):
|
||||
"""
|
||||
Write your migrations here.
|
||||
"""
|
||||
{migrate}
|
||||
|
||||
def rollback(migrator, db):
|
||||
"""
|
||||
Write your rollback migrations here.
|
||||
"""
|
||||
{rollback}'''
|
||||
|
||||
|
||||
class MigrateHistory(peewee.Model):
|
||||
"""
|
||||
Presents the migration history in a database.
|
||||
"""
|
||||
|
||||
name = peewee.CharField(unique=True)
|
||||
migrated_at = peewee.DateTimeField(default=datetime.utcnow)
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
def __unicode__(self) -> str:
|
||||
"""
|
||||
String representation of this migration
|
||||
"""
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
table_name = MIGRATE_TABLE
|
||||
|
||||
|
||||
def get_model(method):
|
||||
"""
|
||||
Convert string to model class.
|
||||
"""
|
||||
|
||||
@wraps(method)
|
||||
def wrapper(migrator, model, *args, **kwargs):
|
||||
if isinstance(model, str):
|
||||
return method(migrator, migrator.table_dict[model], *args, **kwargs)
|
||||
return method(migrator, model, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
class Migrator(object):
|
||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
||||
"""
|
||||
Initializes the migrator
|
||||
"""
|
||||
if isinstance(database, peewee.Proxy):
|
||||
database = database.obj
|
||||
self.database: SqliteDatabase = database
|
||||
self.table_dict: t.Dict[str, peewee.Model] = {}
|
||||
self.operations: t.List[t.Union[Operation, callable]] = []
|
||||
self.migrator = SqliteMigrator(database)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Runs operations.
|
||||
"""
|
||||
for op in self.operations:
|
||||
if isinstance(op, Operation):
|
||||
op.run()
|
||||
else:
|
||||
op()
|
||||
self.clean()
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Cleans the operations.
|
||||
"""
|
||||
self.operations = list()
|
||||
|
||||
def sql(self, sql: str, *params):
|
||||
"""
|
||||
Executes raw SQL.
|
||||
"""
|
||||
self.operations.append(SQL(sql, *params))
|
||||
|
||||
def create_table(self, model: peewee.Model) -> peewee.Model:
|
||||
"""
|
||||
Creates model and table in database.
|
||||
"""
|
||||
self.table_dict[model._meta.table_name] = model
|
||||
model._meta.database = self.database
|
||||
self.operations.append(model.create_table)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_table(self, model: peewee.Model):
|
||||
"""
|
||||
Drops model and table from database.
|
||||
"""
|
||||
del self.table_dict[model._meta.table_name]
|
||||
self.operations.append(lambda: model.drop_table(cascade=False))
|
||||
|
||||
@get_model
|
||||
def add_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model:
|
||||
"""
|
||||
Creates new fields.
|
||||
"""
|
||||
for name, field in fields.items():
|
||||
model._meta.add_field(name, field)
|
||||
self.operations.append(self.migrator.add_column(
|
||||
model._meta.table_name, field.column_name, field))
|
||||
if field.unique:
|
||||
self.operations.append(self.migrator.add_index(
|
||||
model._meta.table_name, (field.column_name,), unique=True))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_columns(self, model: peewee.Model, names: str) -> peewee.Model:
|
||||
"""
|
||||
Removes fields from model.
|
||||
"""
|
||||
fields = [field for field in model._meta.fields.values()
|
||||
if field.name in names]
|
||||
for field in fields:
|
||||
self.__del_field__(model, field)
|
||||
if field.unique:
|
||||
# Drop unique index
|
||||
index_name = make_index_name(
|
||||
model._meta.table_name, [field.column_name])
|
||||
self.operations.append(self.migrator.drop_index(
|
||||
model._meta.table_name, index_name))
|
||||
self.operations.append(
|
||||
self.migrator.drop_column(
|
||||
model._meta.table_name, field.column_name, cascade=False))
|
||||
return model
|
||||
|
||||
def __del_field__(self, model: peewee.Model, field: peewee.Field):
|
||||
"""
|
||||
Deletes field from model.
|
||||
"""
|
||||
model._meta.remove_field(field.name)
|
||||
delattr(model, field.name)
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
obj_id_name = field.column_name
|
||||
if field.column_name == field.name:
|
||||
obj_id_name += '_id'
|
||||
delattr(model, obj_id_name)
|
||||
delattr(field.rel_model, field.backref)
|
||||
|
||||
@get_model
|
||||
def rename_column(self, model: peewee.Model, old_name: str, new_name: str) -> peewee.Model:
|
||||
"""
|
||||
Renames field in model.
|
||||
"""
|
||||
field = model._meta.fields[old_name]
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
old_name = field.column_name
|
||||
self.__del_field__(model, field)
|
||||
field.name = field.column_name = new_name
|
||||
model._meta.add_field(new_name, field)
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
field.column_name = new_name = field.column_name + '_id'
|
||||
self.operations.append(self.migrator.rename_column(
|
||||
model._meta.table_name, old_name, new_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
|
||||
"""
|
||||
Renames table in database.
|
||||
"""
|
||||
old_name = model._meta.table_name
|
||||
del self.table_dict[model._meta.table_name]
|
||||
model._meta.table_name = new_name
|
||||
self.table_dict[model._meta.table_name] = model
|
||||
self.operations.append(self.migrator.rename_table(old_name, new_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_index(self, model: peewee.Model, *columns: str, unique=False) -> peewee.Model:
|
||||
"""Create indexes."""
|
||||
model._meta.indexes.append((columns, unique))
|
||||
columns_ = []
|
||||
for col in columns:
|
||||
field = model._meta.fields.get(col)
|
||||
|
||||
if len(columns) == 1:
|
||||
field.unique = unique
|
||||
field.index = not unique
|
||||
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
col = col + '_id'
|
||||
|
||||
columns_.append(col)
|
||||
self.operations.append(self.migrator.add_index(
|
||||
model._meta.table_name, columns_, unique=unique))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_index(self, model: peewee.Model, *columns: str) -> peewee.Model:
|
||||
"""Drop indexes."""
|
||||
columns_ = []
|
||||
for col in columns:
|
||||
field = model._meta.fields.get(col)
|
||||
if not field:
|
||||
continue
|
||||
|
||||
if len(columns) == 1:
|
||||
field.unique = field.index = False
|
||||
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
col = col + '_id'
|
||||
columns_.append(col)
|
||||
index_name = make_index_name(model._meta.table_name, columns_)
|
||||
model._meta.indexes = [(cols, _) for (
|
||||
cols, _) in model._meta.indexes if columns != cols]
|
||||
self.operations.append(self.migrator.drop_index(
|
||||
model._meta.table_name, index_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
|
||||
"""Add not null."""
|
||||
for name in names:
|
||||
field = model._meta.fields[name]
|
||||
field.null = False
|
||||
self.operations.append(self.migrator.add_not_null(
|
||||
model._meta.table_name, field.column_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
|
||||
"""Drop not null."""
|
||||
for name in names:
|
||||
field = model._meta.fields[name]
|
||||
field.null = True
|
||||
self.operations.append(self.migrator.drop_not_null(
|
||||
model._meta.table_name, field.column_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_default(self, model: peewee.Model, name: str, default: t.Any) -> peewee.Model:
|
||||
"""Add default."""
|
||||
field = model._meta.fields[name]
|
||||
model._meta.defaults[field] = field.default = default
|
||||
self.operations.append(self.migrator.apply_default(
|
||||
model._meta.table_name, name, field))
|
||||
return model
|
||||
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
class MigrationManager(object):
|
||||
filemask = re.compile(r"[\d]+_[^\.]+\.py$")
|
||||
|
||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
||||
"""
|
||||
Initializes the migration manager.
|
||||
"""
|
||||
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
||||
raise RuntimeError('Invalid database: {}'.format(database))
|
||||
self.database = database
|
||||
|
||||
@cached_property
|
||||
def model(self) -> t.Type[MigrateHistory]:
|
||||
"""
|
||||
Initialize and cache the MigrationHistory model.
|
||||
"""
|
||||
MigrateHistory._meta.database = self.database
|
||||
MigrateHistory._meta.table_name = 'migratehistory'
|
||||
MigrateHistory._meta.schema = None
|
||||
MigrateHistory.create_table(True)
|
||||
return MigrateHistory
|
||||
|
||||
@property
|
||||
def done(self) -> t.List[str]:
|
||||
"""
|
||||
Scans migrations in the database.
|
||||
"""
|
||||
return [mm.name for mm in self.model.select().order_by(self.model.id)]
|
||||
|
||||
@property
|
||||
def todo(self):
|
||||
"""
|
||||
Scans migrations in the file system.
|
||||
"""
|
||||
if not os.path.exists(helper.migration_dir):
|
||||
logger.warning('Migration directory: {} does not exist.'.format(
|
||||
helper.migration_dir))
|
||||
os.makedirs(helper.migration_dir)
|
||||
return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f))
|
||||
|
||||
@property
|
||||
def diff(self) -> t.List[str]:
|
||||
"""
|
||||
Calculates difference between the filesystem and the database.
|
||||
"""
|
||||
done = set(self.done)
|
||||
return [name for name in self.todo if name not in done]
|
||||
|
||||
@cached_property
|
||||
def migrator(self) -> Migrator:
|
||||
"""
|
||||
Create migrator and setup it with fake migrations.
|
||||
"""
|
||||
migrator = Migrator(self.database)
|
||||
for name in self.done:
|
||||
self.up_one(name, migrator, True)
|
||||
return migrator
|
||||
|
||||
def compile(self, name, migrate='', rollback=''):
|
||||
"""
|
||||
Compiles a migration.
|
||||
"""
|
||||
name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name
|
||||
filename = name + '.py'
|
||||
path = os.path.join(helper.migration_dir, filename)
|
||||
with open(path, 'w') as f:
|
||||
f.write(MIGRATE_TEMPLATE.format(
|
||||
migrate=migrate, rollback=rollback, name=filename))
|
||||
|
||||
return name
|
||||
|
||||
def create(self, name: str = 'auto', auto: bool = False) -> t.Optional[str]:
|
||||
"""
|
||||
Creates a migration.
|
||||
"""
|
||||
migrate = rollback = ''
|
||||
if auto:
|
||||
raise NotImplementedError
|
||||
|
||||
logger.info('Creating migration "{}"'.format(name))
|
||||
name = self.compile(name, migrate, rollback)
|
||||
logger.info('Migration has been created as "{}"'.format(name))
|
||||
return name
|
||||
|
||||
def clear(self):
|
||||
"""Clear migrations."""
|
||||
self.model.delete().execute()
|
||||
|
||||
def up(self, name: t.Optional[str] = None):
|
||||
"""
|
||||
Runs all unapplied migrations.
|
||||
"""
|
||||
logger.info('Starting migrations')
|
||||
console.info('Starting migrations')
|
||||
|
||||
done = []
|
||||
diff = self.diff
|
||||
if not diff:
|
||||
logger.info('There is nothing to migrate')
|
||||
console.info('There is nothing to migrate')
|
||||
return done
|
||||
|
||||
migrator = self.migrator
|
||||
for mname in diff:
|
||||
done.append(self.up_one(mname, self.migrator))
|
||||
if name and name == mname:
|
||||
break
|
||||
|
||||
return done
|
||||
|
||||
def read(self, name: str):
|
||||
"""
|
||||
Reads a migration from a file.
|
||||
"""
|
||||
call_params = dict()
|
||||
if helper.is_os_windows() and sys.version_info >= (3, 0):
|
||||
# if system is windows - force utf-8 encoding
|
||||
call_params['encoding'] = 'utf-8'
|
||||
with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f:
|
||||
code = f.read()
|
||||
scope = {}
|
||||
code = compile(code, '<string>', 'exec', dont_inherit=True)
|
||||
exec(code, scope, None)
|
||||
return scope.get('migrate', lambda m, d: None), scope.get('rollback', lambda m, d: None)
|
||||
|
||||
def up_one(self, name: str, migrator: Migrator,
|
||||
fake: bool = False, rollback: bool = False) -> str:
|
||||
"""
|
||||
Runs a migration with a given name.
|
||||
"""
|
||||
try:
|
||||
migrate_fn, rollback_fn = self.read(name)
|
||||
if fake:
|
||||
migrate_fn(migrator, self.database)
|
||||
migrator.clean()
|
||||
return name
|
||||
with self.database.transaction():
|
||||
if rollback:
|
||||
logger.info('Rolling back "{}"'.format(name))
|
||||
rollback_fn(migrator, self.database)
|
||||
migrator.run()
|
||||
self.model.delete().where(self.model.name == name).execute()
|
||||
else:
|
||||
logger.info('Migrate "{}"'.format(name))
|
||||
migrate_fn(migrator, self.database)
|
||||
migrator.run()
|
||||
if name not in self.done:
|
||||
self.model.create(name=name)
|
||||
|
||||
logger.info('Done "{}"'.format(name))
|
||||
return name
|
||||
|
||||
except Exception:
|
||||
self.database.rollback()
|
||||
operation_name = 'Rollback' if rollback else 'Migration'
|
||||
logger.exception('{} failed: {}'.format(operation_name, name))
|
||||
raise
|
||||
|
||||
def down(self):
|
||||
"""
|
||||
Rolls back migrations.
|
||||
"""
|
||||
if not self.done:
|
||||
raise RuntimeError('No migrations are found.')
|
||||
|
||||
name = self.done[-1]
|
||||
|
||||
migrator = self.migrator
|
||||
self.up_one(name, migrator, False, True)
|
||||
logger.warning('Rolled back migration: {}'.format(name))
|
||||
# pylint: skip-file
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import typing as t
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
from functools import wraps
|
||||
from functools import cached_property
|
||||
import peewee
|
||||
from playhouse.migrate import (
|
||||
SqliteMigrator,
|
||||
Operation,
|
||||
SQL,
|
||||
SqliteDatabase,
|
||||
make_index_name,
|
||||
)
|
||||
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MIGRATE_TABLE = "migratehistory"
|
||||
MIGRATE_TEMPLATE = '''# Generated by database migrator
|
||||
import peewee
|
||||
|
||||
def migrate(migrator, db):
|
||||
"""
|
||||
Write your migrations here.
|
||||
"""
|
||||
{migrate}
|
||||
|
||||
def rollback(migrator, db):
|
||||
"""
|
||||
Write your rollback migrations here.
|
||||
"""
|
||||
{rollback}'''
|
||||
|
||||
|
||||
class MigrateHistory(peewee.Model):
|
||||
"""
|
||||
Presents the migration history in a database.
|
||||
"""
|
||||
|
||||
name = peewee.CharField(unique=True)
|
||||
migrated_at = peewee.DateTimeField(default=datetime.utcnow)
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
def __unicode__(self) -> str:
|
||||
"""
|
||||
String representation of this migration
|
||||
"""
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
table_name = MIGRATE_TABLE
|
||||
|
||||
|
||||
def get_model(method):
|
||||
"""
|
||||
Convert string to model class.
|
||||
"""
|
||||
|
||||
@wraps(method)
|
||||
def wrapper(migrator, model, *args, **kwargs):
|
||||
if isinstance(model, str):
|
||||
return method(migrator, migrator.table_dict[model], *args, **kwargs)
|
||||
return method(migrator, model, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
class Migrator(object):
|
||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
||||
"""
|
||||
Initializes the migrator
|
||||
"""
|
||||
if isinstance(database, peewee.Proxy):
|
||||
database = database.obj
|
||||
self.database: SqliteDatabase = database
|
||||
self.table_dict: t.Dict[str, peewee.Model] = {}
|
||||
self.operations: t.List[t.Union[Operation, t.Callable]] = []
|
||||
self.migrator = SqliteMigrator(database)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Runs operations.
|
||||
"""
|
||||
for op in self.operations:
|
||||
if isinstance(op, Operation):
|
||||
op.run()
|
||||
else:
|
||||
op()
|
||||
self.clean()
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Cleans the operations.
|
||||
"""
|
||||
self.operations = list()
|
||||
|
||||
def sql(self, sql: str, *params):
|
||||
"""
|
||||
Executes raw SQL.
|
||||
"""
|
||||
self.operations.append(SQL(sql, *params))
|
||||
|
||||
def create_table(self, model: peewee.Model) -> peewee.Model:
|
||||
"""
|
||||
Creates model and table in database.
|
||||
"""
|
||||
self.table_dict[model._meta.table_name] = model
|
||||
model._meta.database = self.database
|
||||
self.operations.append(model.create_table)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_table(self, model: peewee.Model):
|
||||
"""
|
||||
Drops model and table from database.
|
||||
"""
|
||||
del self.table_dict[model._meta.table_name]
|
||||
self.operations.append(lambda: model.drop_table(cascade=False))
|
||||
|
||||
@get_model
|
||||
def add_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model:
|
||||
"""
|
||||
Creates new fields.
|
||||
"""
|
||||
for name, field in fields.items():
|
||||
model._meta.add_field(name, field)
|
||||
self.operations.append(
|
||||
self.migrator.add_column(
|
||||
model._meta.table_name, field.column_name, field
|
||||
)
|
||||
)
|
||||
if field.unique:
|
||||
self.operations.append(
|
||||
self.migrator.add_index(
|
||||
model._meta.table_name, (field.column_name,), unique=True
|
||||
)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_columns(self, model: peewee.Model, names: str) -> peewee.Model:
|
||||
"""
|
||||
Removes fields from model.
|
||||
"""
|
||||
fields = [field for field in model._meta.fields.values() if field.name in names]
|
||||
for field in fields:
|
||||
self.__del_field__(model, field)
|
||||
if field.unique:
|
||||
# Drop unique index
|
||||
index_name = make_index_name(
|
||||
model._meta.table_name, [field.column_name]
|
||||
)
|
||||
self.operations.append(
|
||||
self.migrator.drop_index(model._meta.table_name, index_name)
|
||||
)
|
||||
self.operations.append(
|
||||
self.migrator.drop_column(
|
||||
model._meta.table_name, field.column_name, cascade=False
|
||||
)
|
||||
)
|
||||
return model
|
||||
|
||||
def __del_field__(self, model: peewee.Model, field: peewee.Field):
|
||||
"""
|
||||
Deletes field from model.
|
||||
"""
|
||||
model._meta.remove_field(field.name)
|
||||
delattr(model, field.name)
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
obj_id_name = field.column_name
|
||||
if field.column_name == field.name:
|
||||
obj_id_name += "_id"
|
||||
delattr(model, obj_id_name)
|
||||
delattr(field.rel_model, field.backref)
|
||||
|
||||
@get_model
|
||||
def rename_column(
|
||||
self, model: peewee.Model, old_name: str, new_name: str
|
||||
) -> peewee.Model:
|
||||
"""
|
||||
Renames field in model.
|
||||
"""
|
||||
field = model._meta.fields[old_name]
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
old_name = field.column_name
|
||||
self.__del_field__(model, field)
|
||||
field.name = field.column_name = new_name
|
||||
model._meta.add_field(new_name, field)
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
field.column_name = new_name = field.column_name + "_id"
|
||||
self.operations.append(
|
||||
self.migrator.rename_column(model._meta.table_name, old_name, new_name)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
|
||||
"""
|
||||
Renames table in database.
|
||||
"""
|
||||
old_name = model._meta.table_name
|
||||
del self.table_dict[model._meta.table_name]
|
||||
model._meta.table_name = new_name
|
||||
self.table_dict[model._meta.table_name] = model
|
||||
self.operations.append(self.migrator.rename_table(old_name, new_name))
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_index(
|
||||
self, model: peewee.Model, *columns: str, unique=False
|
||||
) -> peewee.Model:
|
||||
"""Create indexes."""
|
||||
model._meta.indexes.append((columns, unique))
|
||||
columns_ = []
|
||||
for col in columns:
|
||||
field = model._meta.fields.get(col)
|
||||
|
||||
if len(columns) == 1:
|
||||
field.unique = unique
|
||||
field.index = not unique
|
||||
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
col = col + "_id"
|
||||
|
||||
columns_.append(col)
|
||||
self.operations.append(
|
||||
self.migrator.add_index(model._meta.table_name, columns_, unique=unique)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_index(self, model: peewee.Model, *columns: str) -> peewee.Model:
|
||||
"""Drop indexes."""
|
||||
columns_ = []
|
||||
for col in columns:
|
||||
field = model._meta.fields.get(col)
|
||||
if not field:
|
||||
continue
|
||||
|
||||
if len(columns) == 1:
|
||||
field.unique = field.index = False
|
||||
|
||||
if isinstance(field, peewee.ForeignKeyField):
|
||||
col = col + "_id"
|
||||
columns_.append(col)
|
||||
index_name = make_index_name(model._meta.table_name, columns_)
|
||||
model._meta.indexes = [
|
||||
(cols, _) for (cols, _) in model._meta.indexes if columns != cols
|
||||
]
|
||||
self.operations.append(
|
||||
self.migrator.drop_index(model._meta.table_name, index_name)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
|
||||
"""Add not null."""
|
||||
for name in names:
|
||||
field = model._meta.fields[name]
|
||||
field.null = False
|
||||
self.operations.append(
|
||||
self.migrator.add_not_null(model._meta.table_name, field.column_name)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def drop_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
|
||||
"""Drop not null."""
|
||||
for name in names:
|
||||
field = model._meta.fields[name]
|
||||
field.null = True
|
||||
self.operations.append(
|
||||
self.migrator.drop_not_null(model._meta.table_name, field.column_name)
|
||||
)
|
||||
return model
|
||||
|
||||
@get_model
|
||||
def add_default(
|
||||
self, model: peewee.Model, name: str, default: t.Any
|
||||
) -> peewee.Model:
|
||||
"""Add default."""
|
||||
field = model._meta.fields[name]
|
||||
model._meta.defaults[field] = field.default = default
|
||||
self.operations.append(
|
||||
self.migrator.apply_default(model._meta.table_name, name, field)
|
||||
)
|
||||
return model
|
||||
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
class MigrationManager(object):
|
||||
filemask = re.compile(r"[\d]+_[^\.]+\.py$")
|
||||
|
||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy], helper):
|
||||
"""
|
||||
Initializes the migration manager.
|
||||
"""
|
||||
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
||||
raise RuntimeError("Invalid database: {}".format(database))
|
||||
self.database = database
|
||||
self.helper = helper
|
||||
|
||||
@cached_property
|
||||
def model(self) -> t.Type[MigrateHistory]:
|
||||
"""
|
||||
Initialize and cache the MigrationHistory model.
|
||||
"""
|
||||
MigrateHistory._meta.database = self.database
|
||||
MigrateHistory._meta.table_name = "migratehistory"
|
||||
MigrateHistory._meta.schema = None
|
||||
MigrateHistory.create_table(True)
|
||||
return MigrateHistory
|
||||
|
||||
@property
|
||||
def done(self) -> t.List[str]:
|
||||
"""
|
||||
Scans migrations in the database.
|
||||
"""
|
||||
return [mm.name for mm in self.model.select().order_by(self.model.id)]
|
||||
|
||||
@property
|
||||
def todo(self):
|
||||
"""
|
||||
Scans migrations in the file system.
|
||||
"""
|
||||
if not os.path.exists(self.helper.migration_dir):
|
||||
logger.warning(
|
||||
"Migration directory: {} does not exist.".format(
|
||||
self.helper.migration_dir
|
||||
)
|
||||
)
|
||||
os.makedirs(self.helper.migration_dir)
|
||||
return sorted(
|
||||
f[:-3]
|
||||
for f in os.listdir(self.helper.migration_dir)
|
||||
if self.filemask.match(f)
|
||||
)
|
||||
|
||||
@property
|
||||
def diff(self) -> t.List[str]:
|
||||
"""
|
||||
Calculates difference between the filesystem and the database.
|
||||
"""
|
||||
done = set(self.done)
|
||||
return [name for name in self.todo if name not in done]
|
||||
|
||||
@cached_property
|
||||
def migrator(self) -> Migrator:
|
||||
"""
|
||||
Create migrator and setup it with fake migrations.
|
||||
"""
|
||||
migrator = Migrator(self.database)
|
||||
for name in self.done:
|
||||
self.up_one(name, migrator, True)
|
||||
return migrator
|
||||
|
||||
def compile(self, name, migrate="", rollback=""):
|
||||
"""
|
||||
Compiles a migration.
|
||||
"""
|
||||
name = datetime.utcnow().strftime("%Y%m%d%H%M%S") + "_" + name
|
||||
filename = name + ".py"
|
||||
path = os.path.join(self.helper.migration_dir, filename)
|
||||
with open(path, "w") as f:
|
||||
f.write(
|
||||
MIGRATE_TEMPLATE.format(
|
||||
migrate=migrate, rollback=rollback, name=filename
|
||||
)
|
||||
)
|
||||
|
||||
return name
|
||||
|
||||
def create(self, name: str = "auto", auto: bool = False) -> t.Optional[str]:
|
||||
"""
|
||||
Creates a migration.
|
||||
"""
|
||||
migrate = rollback = ""
|
||||
if auto:
|
||||
raise NotImplementedError
|
||||
|
||||
logger.info('Creating migration "{}"'.format(name))
|
||||
name = self.compile(name, migrate, rollback)
|
||||
logger.info('Migration has been created as "{}"'.format(name))
|
||||
return name
|
||||
|
||||
def clear(self):
|
||||
"""Clear migrations."""
|
||||
self.model.delete().execute()
|
||||
|
||||
def up(self, name: t.Optional[str] = None):
|
||||
"""
|
||||
Runs all unapplied migrations.
|
||||
"""
|
||||
logger.info("Starting migrations")
|
||||
Console.info("Starting migrations")
|
||||
|
||||
done = []
|
||||
diff = self.diff
|
||||
if not diff:
|
||||
logger.info("There is nothing to migrate")
|
||||
Console.info("There is nothing to migrate")
|
||||
return done
|
||||
|
||||
migrator = self.migrator
|
||||
for mname in diff:
|
||||
done.append(self.up_one(mname, self.migrator))
|
||||
if name and name == mname:
|
||||
break
|
||||
|
||||
return done
|
||||
|
||||
def read(self, name: str):
|
||||
"""
|
||||
Reads a migration from a file.
|
||||
"""
|
||||
call_params = dict()
|
||||
if Helpers.is_os_windows() and sys.version_info >= (3, 0):
|
||||
# if system is windows - force utf-8 encoding
|
||||
call_params["encoding"] = "utf-8"
|
||||
with open(
|
||||
os.path.join(self.helper.migration_dir, name + ".py"), **call_params
|
||||
) as f:
|
||||
code = f.read()
|
||||
scope = {}
|
||||
code = compile(code, "<string>", "exec", dont_inherit=True)
|
||||
exec(code, scope, None)
|
||||
return scope.get("migrate", lambda m, d: None), scope.get(
|
||||
"rollback", lambda m, d: None
|
||||
)
|
||||
|
||||
def up_one(
|
||||
self, name: str, migrator: Migrator, fake: bool = False, rollback: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
Runs a migration with a given name.
|
||||
"""
|
||||
try:
|
||||
migrate_fn, rollback_fn = self.read(name)
|
||||
if fake:
|
||||
migrate_fn(migrator, self.database)
|
||||
migrator.clean()
|
||||
return name
|
||||
with self.database.transaction():
|
||||
if rollback:
|
||||
logger.info('Rolling back "{}"'.format(name))
|
||||
rollback_fn(migrator, self.database)
|
||||
migrator.run()
|
||||
self.model.delete().where(self.model.name == name).execute()
|
||||
else:
|
||||
logger.info('Migrate "{}"'.format(name))
|
||||
migrate_fn(migrator, self.database)
|
||||
migrator.run()
|
||||
if name not in self.done:
|
||||
self.model.create(name=name)
|
||||
|
||||
logger.info('Done "{}"'.format(name))
|
||||
return name
|
||||
|
||||
except Exception:
|
||||
self.database.rollback()
|
||||
operation_name = "Rollback" if rollback else "Migration"
|
||||
logger.exception("{} failed: {}".format(operation_name, name))
|
||||
raise
|
||||
|
||||
def down(self):
|
||||
"""
|
||||
Rolls back migrations.
|
||||
"""
|
||||
if not self.done:
|
||||
raise RuntimeError("No migrations are found.")
|
||||
|
||||
name = self.done[-1]
|
||||
|
||||
migrator = self.migrator
|
||||
self.up_one(name, migrator, False, True)
|
||||
logger.warning("Rolled back migration: {}".format(name))
|
||||
|
12
app/classes/shared/null_writer.py
Normal file
12
app/classes/shared/null_writer.py
Normal file
@ -0,0 +1,12 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NullWriter:
|
||||
def write(self, data):
|
||||
if os.environ.get("CRAFTY_LOG_NULLWRITER", "false") == "true":
|
||||
logger.debug(data)
|
||||
if os.environ.get("CRAFTY_PRINT_NULLWRITER", "false") == "true":
|
||||
print(data)
|
@ -1,22 +1,27 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class PermissionHelper:
|
||||
@staticmethod
|
||||
def both_have_perm(a: str, b: str, permission_tested: Enum):
|
||||
return permission_helper.combine_perm_bool(a[permission_tested.value], b[permission_tested.value])
|
||||
def both_have_perm(
|
||||
permission_mask_a: str, permission_mask_b: str, permission_tested: Enum
|
||||
):
|
||||
return PermissionHelper.combine_perm_bool(
|
||||
permission_mask_a[permission_tested.value],
|
||||
permission_mask_b[permission_tested.value],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def combine_perm(a: str, b: str) -> str:
|
||||
return '1' if (a == '1' and b == '1') else '0'
|
||||
def combine_perm(permission_mask_a: str, permission_mask_b: str) -> str:
|
||||
return "1" if (permission_mask_a == "1" and permission_mask_b == "1") else "0"
|
||||
|
||||
@staticmethod
|
||||
def combine_perm_bool(a: str, b: str) -> bool:
|
||||
return a == '1' and b == '1'
|
||||
def combine_perm_bool(permission_mask_a: str, permission_mask_b: str) -> bool:
|
||||
return permission_mask_a == "1" and permission_mask_b == "1"
|
||||
|
||||
@staticmethod
|
||||
def combine_masks(permission_mask_a: str, permission_mask_b: str) -> str:
|
||||
both_masks = zip(list(permission_mask_a), list(permission_mask_b))
|
||||
return ''.join(map(lambda x: permission_helper.combine_perm(x[0], x[1]), both_masks))
|
||||
|
||||
|
||||
permission_helper = PermissionHelper()
|
||||
return "".join(
|
||||
map(lambda x: PermissionHelper.combine_perm(x[0], x[1]), both_masks)
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
7
app/classes/shared/singleton.py
Normal file
7
app/classes/shared/singleton.py
Normal file
@ -0,0 +1,7 @@
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
@ -4,71 +4,78 @@ import logging
|
||||
import threading
|
||||
import asyncio
|
||||
import datetime
|
||||
from tzlocal import get_localzone
|
||||
from apscheduler.events import EVENT_JOB_EXECUTED
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from app.classes.controllers.users_controller import UsersController
|
||||
|
||||
from app.classes.controllers.users_controller import Users_Controller
|
||||
from app.classes.minecraft.serverjars import server_jar_obj
|
||||
from app.classes.models.management import management_helper
|
||||
from app.classes.models.users import users_helper
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.models.management import HelpersManagement
|
||||
from app.classes.models.users import HelperUsers
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_controller import Controller
|
||||
from app.classes.web.tornado_handler import Webserver
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
|
||||
try:
|
||||
from tzlocal import get_localzone
|
||||
from apscheduler.events import EVENT_JOB_EXECUTED
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
logger = logging.getLogger("apscheduler")
|
||||
scheduler_intervals = {
|
||||
"seconds",
|
||||
"minutes",
|
||||
"hours",
|
||||
"days",
|
||||
"weeks",
|
||||
"monday",
|
||||
"tuesday",
|
||||
"wednesday",
|
||||
"thursday",
|
||||
"friday",
|
||||
"saturday",
|
||||
"sunday",
|
||||
}
|
||||
|
||||
except ModuleNotFoundError as err:
|
||||
helper.auto_installer_fix(err)
|
||||
|
||||
logger = logging.getLogger('apscheduler')
|
||||
scheduler_intervals = { 'seconds',
|
||||
'minutes',
|
||||
'hours',
|
||||
'days',
|
||||
'weeks',
|
||||
'monday',
|
||||
'tuesday',
|
||||
'wednesday',
|
||||
'thursday',
|
||||
'friday',
|
||||
'saturday',
|
||||
'sunday'
|
||||
}
|
||||
|
||||
class TasksManager:
|
||||
controller: Controller
|
||||
|
||||
def __init__(self, controller):
|
||||
self.controller = controller
|
||||
self.tornado = Webserver(controller, self)
|
||||
def __init__(self, helper, controller):
|
||||
self.helper: Helpers = helper
|
||||
self.controller: Controller = controller
|
||||
self.tornado: Webserver = Webserver(helper, controller, self)
|
||||
|
||||
self.tz = get_localzone()
|
||||
self.scheduler = BackgroundScheduler(timezone=str(self.tz))
|
||||
|
||||
self.users_controller = Users_Controller()
|
||||
self.users_controller: UsersController = self.controller.users
|
||||
|
||||
self.webserver_thread = threading.Thread(target=self.tornado.run_tornado, daemon=True, name='tornado_thread')
|
||||
self.webserver_thread = threading.Thread(
|
||||
target=self.tornado.run_tornado, daemon=True, name="tornado_thread"
|
||||
)
|
||||
|
||||
self.main_thread_exiting = False
|
||||
|
||||
self.schedule_thread = threading.Thread(target=self.scheduler_thread, daemon=True, name="scheduler")
|
||||
self.schedule_thread = threading.Thread(
|
||||
target=self.scheduler_thread, daemon=True, name="scheduler"
|
||||
)
|
||||
|
||||
self.log_watcher_thread = threading.Thread(target=self.log_watcher, daemon=True, name="log_watcher")
|
||||
self.log_watcher_thread = threading.Thread(
|
||||
target=self.log_watcher, daemon=True, name="log_watcher"
|
||||
)
|
||||
|
||||
self.command_thread = threading.Thread(target=self.command_watcher, daemon=True, name="command_watcher")
|
||||
self.command_thread = threading.Thread(
|
||||
target=self.command_watcher, daemon=True, name="command_watcher"
|
||||
)
|
||||
|
||||
self.realtime_thread = threading.Thread(target=self.realtime, daemon=True, name="realtime")
|
||||
self.realtime_thread = threading.Thread(
|
||||
target=self.realtime, daemon=True, name="realtime"
|
||||
)
|
||||
|
||||
self.reload_schedule_from_db()
|
||||
|
||||
|
||||
def get_main_thread_run_status(self):
|
||||
return self.main_thread_exiting
|
||||
|
||||
def reload_schedule_from_db(self):
|
||||
jobs = management_helper.get_schedules_enabled()
|
||||
jobs = HelpersManagement.get_schedules_enabled()
|
||||
logger.info("Reload from DB called. Current enabled schedules: ")
|
||||
for item in jobs:
|
||||
logger.info(f"JOB: {item}")
|
||||
@ -76,26 +83,43 @@ class TasksManager:
|
||||
def command_watcher(self):
|
||||
while True:
|
||||
# select any commands waiting to be processed
|
||||
commands = management_helper.get_unactioned_commands()
|
||||
for c in commands:
|
||||
commands = HelpersManagement.get_unactioned_commands()
|
||||
for cmd in commands:
|
||||
try:
|
||||
svr = self.controller.get_server_obj(c.server_id)
|
||||
svr = self.controller.servers.get_server_instance_by_id(
|
||||
cmd.server_id.server_id
|
||||
)
|
||||
except:
|
||||
logger.error("Server value requested does note exist purging item from waiting commands.")
|
||||
management_helper.mark_command_complete(c.command_id)
|
||||
logger.error(
|
||||
"Server value requested does not exist! "
|
||||
"Purging item from waiting commands."
|
||||
)
|
||||
HelpersManagement.mark_command_complete(cmd.command_id)
|
||||
continue
|
||||
|
||||
user_id = c.user_id
|
||||
command = c.command
|
||||
user_id = cmd.user_id
|
||||
command = cmd.command
|
||||
|
||||
if command == 'start_server':
|
||||
if command == "start_server":
|
||||
svr.run_threaded_server(user_id)
|
||||
|
||||
elif command == 'stop_server':
|
||||
elif command == "stop_server":
|
||||
svr.stop_threaded_server()
|
||||
|
||||
elif command == "restart_server":
|
||||
svr.restart_threaded_server(user_id)
|
||||
|
||||
elif command == "kill_server":
|
||||
try:
|
||||
svr.kill()
|
||||
time.sleep(5)
|
||||
svr.cleanup_server_object()
|
||||
svr.record_server_stats()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Could not find PID for requested termsig. Full error: {e}"
|
||||
)
|
||||
|
||||
elif command == "backup_server":
|
||||
svr.backup_server()
|
||||
|
||||
@ -103,19 +127,20 @@ class TasksManager:
|
||||
svr.jar_update()
|
||||
else:
|
||||
svr.send_command(command)
|
||||
management_helper.mark_command_complete(c.command_id)
|
||||
|
||||
HelpersManagement.mark_command_complete(cmd.command_id)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
def _main_graceful_exit(self):
|
||||
try:
|
||||
os.remove(helper.session_file)
|
||||
self.controller.stop_all_servers()
|
||||
os.remove(self.helper.session_file)
|
||||
self.controller.servers.stop_all_servers()
|
||||
except:
|
||||
logger.info("Caught error during shutdown", exc_info=True)
|
||||
|
||||
logger.info("***** Crafty Shutting Down *****\n\n")
|
||||
console.info("***** Crafty Shutting Down *****\n\n")
|
||||
Console.info("***** Crafty Shutting Down *****\n\n")
|
||||
self.main_thread_exiting = True
|
||||
|
||||
def start_webserver(self):
|
||||
@ -123,9 +148,11 @@ class TasksManager:
|
||||
|
||||
def reload_webserver(self):
|
||||
self.tornado.stop_web_server()
|
||||
console.info("Waiting 3 seconds")
|
||||
Console.info("Waiting 3 seconds")
|
||||
time.sleep(3)
|
||||
self.webserver_thread = threading.Thread(target=self.tornado.run_tornado, daemon=True, name='tornado_thread')
|
||||
self.webserver_thread = threading.Thread(
|
||||
target=self.tornado.run_tornado, daemon=True, name="tornado_thread"
|
||||
)
|
||||
self.start_webserver()
|
||||
|
||||
def stop_webserver(self):
|
||||
@ -133,79 +160,96 @@ class TasksManager:
|
||||
|
||||
def start_scheduler(self):
|
||||
logger.info("Launching Scheduler Thread...")
|
||||
console.info("Launching Scheduler Thread...")
|
||||
Console.info("Launching Scheduler Thread...")
|
||||
self.schedule_thread.start()
|
||||
logger.info("Launching command thread...")
|
||||
console.info("Launching command thread...")
|
||||
Console.info("Launching command thread...")
|
||||
self.command_thread.start()
|
||||
logger.info("Launching log watcher...")
|
||||
console.info("Launching log watcher...")
|
||||
Console.info("Launching log watcher...")
|
||||
self.log_watcher_thread.start()
|
||||
logger.info("Launching realtime thread...")
|
||||
console.info("Launching realtime thread...")
|
||||
Console.info("Launching realtime thread...")
|
||||
self.realtime_thread.start()
|
||||
|
||||
def scheduler_thread(self):
|
||||
schedules = management_helper.get_schedules_enabled()
|
||||
schedules = HelpersManagement.get_schedules_enabled()
|
||||
self.scheduler.add_listener(self.schedule_watcher, mask=EVENT_JOB_EXECUTED)
|
||||
#self.scheduler.add_job(self.scheduler.print_jobs, 'interval', seconds=10, id='-1')
|
||||
# self.scheduler.add_job(
|
||||
# self.scheduler.print_jobs, "interval", seconds=10, id="-1"
|
||||
# )
|
||||
|
||||
#load schedules from DB
|
||||
# load schedules from DB
|
||||
for schedule in schedules:
|
||||
if schedule.interval != 'reaction':
|
||||
if schedule.interval != "reaction":
|
||||
if schedule.cron_string != "":
|
||||
try:
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
CronTrigger.from_crontab(schedule.cron_string,
|
||||
timezone=str(self.tz)),
|
||||
id = str(schedule.schedule_id),
|
||||
args = [schedule.server_id,
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
schedule.command]
|
||||
)
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
CronTrigger.from_crontab(
|
||||
schedule.cron_string, timezone=str(self.tz)
|
||||
),
|
||||
id=str(schedule.schedule_id),
|
||||
args=[
|
||||
schedule.server_id,
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
schedule.command,
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
console.error(f"Failed to schedule task with error: {e}.")
|
||||
console.warning("Removing failed task from DB.")
|
||||
Console.error(f"Failed to schedule task with error: {e}.")
|
||||
Console.warning("Removing failed task from DB.")
|
||||
logger.error(f"Failed to schedule task with error: {e}.")
|
||||
logger.warning("Removing failed task from DB.")
|
||||
#remove items from DB if task fails to add to apscheduler
|
||||
management_helper.delete_scheduled_task(schedule.schedule_id)
|
||||
# remove items from DB if task fails to add to apscheduler
|
||||
self.controller.management_helper.delete_scheduled_task(
|
||||
schedule.schedule_id
|
||||
)
|
||||
else:
|
||||
if schedule.interval_type == 'hours':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = 0,
|
||||
hour = '*/'+str(schedule.interval),
|
||||
id = str(schedule.schedule_id),
|
||||
args = [schedule.server_id,
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
schedule.command]
|
||||
)
|
||||
elif schedule.interval_type == 'minutes':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = '*/'+str(schedule.interval),
|
||||
id = str(schedule.schedule_id),
|
||||
args = [schedule.server_id,
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
schedule.command]
|
||||
)
|
||||
elif schedule.interval_type == 'days':
|
||||
curr_time = schedule.start_time.split(':')
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
day = '*/'+str(schedule.interval),
|
||||
hour=curr_time[0],
|
||||
minute=curr_time[1],
|
||||
id=str(schedule.schedule_id),
|
||||
args=[schedule.server_id,
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
schedule.command]
|
||||
)
|
||||
if schedule.interval_type == "hours":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute=0,
|
||||
hour="*/" + str(schedule.interval),
|
||||
id=str(schedule.schedule_id),
|
||||
args=[
|
||||
schedule.server_id,
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
schedule.command,
|
||||
],
|
||||
)
|
||||
elif schedule.interval_type == "minutes":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute="*/" + str(schedule.interval),
|
||||
id=str(schedule.schedule_id),
|
||||
args=[
|
||||
schedule.server_id,
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
schedule.command,
|
||||
],
|
||||
)
|
||||
elif schedule.interval_type == "days":
|
||||
curr_time = schedule.start_time.split(":")
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
day="*/" + str(schedule.interval),
|
||||
hour=curr_time[0],
|
||||
minute=curr_time[1],
|
||||
id=str(schedule.schedule_id),
|
||||
args=[
|
||||
schedule.server_id,
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
schedule.command,
|
||||
],
|
||||
)
|
||||
self.scheduler.start()
|
||||
jobs = self.scheduler.get_jobs()
|
||||
logger.info("Loaded schedules. Current enabled schedules: ")
|
||||
@ -213,240 +257,332 @@ class TasksManager:
|
||||
logger.info(f"JOB: {item}")
|
||||
|
||||
def schedule_job(self, job_data):
|
||||
sch_id = management_helper.create_scheduled_task(
|
||||
job_data['server_id'],
|
||||
job_data['action'],
|
||||
job_data['interval'],
|
||||
job_data['interval_type'],
|
||||
job_data['start_time'],
|
||||
job_data['command'],
|
||||
sch_id = HelpersManagement.create_scheduled_task(
|
||||
job_data["server_id"],
|
||||
job_data["action"],
|
||||
job_data["interval"],
|
||||
job_data["interval_type"],
|
||||
job_data["start_time"],
|
||||
job_data["command"],
|
||||
"None",
|
||||
job_data['enabled'],
|
||||
job_data['one_time'],
|
||||
job_data['cron_string'],
|
||||
job_data['parent'],
|
||||
job_data['delay'])
|
||||
#Checks to make sure some doofus didn't actually make the newly created task a child of itself.
|
||||
if str(job_data['parent']) == str(sch_id):
|
||||
management_helper.update_scheduled_task(sch_id, {'parent':None})
|
||||
#Check to see if it's enabled and is not a chain reaction.
|
||||
if job_data['enabled'] and job_data['interval_type'] != 'reaction':
|
||||
if job_data['cron_string'] != "":
|
||||
job_data["enabled"],
|
||||
job_data["one_time"],
|
||||
job_data["cron_string"],
|
||||
job_data["parent"],
|
||||
job_data["delay"],
|
||||
)
|
||||
# Checks to make sure some doofus didn't actually make the newly
|
||||
# created task a child of itself.
|
||||
if str(job_data["parent"]) == str(sch_id):
|
||||
HelpersManagement.update_scheduled_task(sch_id, {"parent": None})
|
||||
# Check to see if it's enabled and is not a chain reaction.
|
||||
if job_data["enabled"] and job_data["interval_type"] != "reaction":
|
||||
if job_data["cron_string"] != "":
|
||||
try:
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
CronTrigger.from_crontab(job_data['cron_string'],
|
||||
timezone=str(self.tz)),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
CronTrigger.from_crontab(
|
||||
job_data["cron_string"], timezone=str(self.tz)
|
||||
),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
console.error(f"Failed to schedule task with error: {e}.")
|
||||
console.warning("Removing failed task from DB.")
|
||||
Console.error(f"Failed to schedule task with error: {e}.")
|
||||
Console.warning("Removing failed task from DB.")
|
||||
logger.error(f"Failed to schedule task with error: {e}.")
|
||||
logger.warning("Removing failed task from DB.")
|
||||
#remove items from DB if task fails to add to apscheduler
|
||||
management_helper.delete_scheduled_task(sch_id)
|
||||
# remove items from DB if task fails to add to apscheduler
|
||||
self.controller.management_helper.delete_scheduled_task(sch_id)
|
||||
else:
|
||||
if job_data['interval_type'] == 'hours':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = 0,
|
||||
hour = '*/'+str(job_data['interval']),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
elif job_data['interval_type'] == 'minutes':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = '*/'+str(job_data['interval']),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
elif job_data['interval_type'] == 'days':
|
||||
curr_time = job_data['start_time'].split(':')
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
day = '*/'+str(job_data['interval']),
|
||||
hour = curr_time[0],
|
||||
minute = curr_time[1],
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']],
|
||||
)
|
||||
if job_data["interval_type"] == "hours":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute=0,
|
||||
hour="*/" + str(job_data["interval"]),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
elif job_data["interval_type"] == "minutes":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute="*/" + str(job_data["interval"]),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
elif job_data["interval_type"] == "days":
|
||||
curr_time = job_data["start_time"].split(":")
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
day="*/" + str(job_data["interval"]),
|
||||
hour=curr_time[0],
|
||||
minute=curr_time[1],
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
logger.info("Added job. Current enabled schedules: ")
|
||||
jobs = self.scheduler.get_jobs()
|
||||
for item in jobs:
|
||||
logger.info(f"JOB: {item}")
|
||||
|
||||
def remove_all_server_tasks(self, server_id):
|
||||
schedules = management_helper.get_schedules_by_server(server_id)
|
||||
schedules = HelpersManagement.get_schedules_by_server(server_id)
|
||||
for schedule in schedules:
|
||||
if schedule.interval != 'reaction':
|
||||
if schedule.interval != "reaction":
|
||||
self.remove_job(schedule.schedule_id)
|
||||
|
||||
def remove_job(self, sch_id):
|
||||
job = management_helper.get_scheduled_task_model(sch_id)
|
||||
for schedule in management_helper.get_child_schedules(sch_id):
|
||||
management_helper.update_scheduled_task(schedule.schedule_id, {'parent':None})
|
||||
management_helper.delete_scheduled_task(sch_id)
|
||||
if job.enabled and job.interval_type != 'reaction':
|
||||
job = HelpersManagement.get_scheduled_task_model(sch_id)
|
||||
for schedule in HelpersManagement.get_child_schedules(sch_id):
|
||||
self.controller.management_helper.update_scheduled_task(
|
||||
schedule.schedule_id, {"parent": None}
|
||||
)
|
||||
self.controller.management_helper.delete_scheduled_task(sch_id)
|
||||
if job.enabled and job.interval_type != "reaction":
|
||||
self.scheduler.remove_job(str(sch_id))
|
||||
logger.info(f"Job with ID {sch_id} was deleted.")
|
||||
else:
|
||||
logger.info(f"Job with ID {sch_id} was deleted from DB, but was not enabled."
|
||||
+ "Not going to try removing something that doesn't exist from active schedules.")
|
||||
logger.info(
|
||||
f"Job with ID {sch_id} was deleted from DB, but was not enabled."
|
||||
f"Not going to try removing something "
|
||||
f"that doesn't exist from active schedules."
|
||||
)
|
||||
|
||||
def update_job(self, sch_id, job_data):
|
||||
management_helper.update_scheduled_task(sch_id, job_data)
|
||||
#Checks to make sure some doofus didn't actually make the newly created task a child of itself.
|
||||
if str(job_data['parent']) == str(sch_id):
|
||||
management_helper.update_scheduled_task(sch_id, {'parent':None})
|
||||
HelpersManagement.update_scheduled_task(sch_id, job_data)
|
||||
# Checks to make sure some doofus didn't actually make the newly
|
||||
# created task a child of itself.
|
||||
if str(job_data["parent"]) == str(sch_id):
|
||||
HelpersManagement.update_scheduled_task(sch_id, {"parent": None})
|
||||
try:
|
||||
if job_data['interval'] != 'reaction':
|
||||
if job_data["interval"] != "reaction":
|
||||
self.scheduler.remove_job(str(sch_id))
|
||||
except:
|
||||
logger.info("No job found in update job. Assuming it was previously disabled. Starting new job.")
|
||||
logger.info(
|
||||
"No job found in update job. "
|
||||
"Assuming it was previously disabled. Starting new job."
|
||||
)
|
||||
|
||||
if job_data['enabled']:
|
||||
if job_data['interval'] != 'reaction':
|
||||
if job_data['cron_string'] != "":
|
||||
if job_data["enabled"]:
|
||||
if job_data["interval"] != "reaction":
|
||||
if job_data["cron_string"] != "":
|
||||
try:
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
CronTrigger.from_crontab(job_data['cron_string'],
|
||||
timezone=str(self.tz)),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
CronTrigger.from_crontab(
|
||||
job_data["cron_string"], timezone=str(self.tz)
|
||||
),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
console.error(f"Failed to schedule task with error: {e}.")
|
||||
console.info("Removing failed task from DB.")
|
||||
management_helper.delete_scheduled_task(sch_id)
|
||||
Console.error(f"Failed to schedule task with error: {e}.")
|
||||
Console.info("Removing failed task from DB.")
|
||||
self.controller.management_helper.delete_scheduled_task(sch_id)
|
||||
else:
|
||||
if job_data['interval_type'] == 'hours':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = 0,
|
||||
hour = '*/'+str(job_data['interval']),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
elif job_data['interval_type'] == 'minutes':
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
minute = '*/'+str(job_data['interval']),
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
elif job_data['interval_type'] == 'days':
|
||||
curr_time = job_data['start_time'].split(':')
|
||||
self.scheduler.add_job(management_helper.add_command,
|
||||
'cron',
|
||||
day = '*/'+str(job_data['interval']),
|
||||
hour = curr_time[0],
|
||||
minute = curr_time[1],
|
||||
id=str(sch_id),
|
||||
args=[job_data['server_id'],
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
job_data['command']]
|
||||
)
|
||||
if job_data["interval_type"] == "hours":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute=0,
|
||||
hour="*/" + str(job_data["interval"]),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
elif job_data["interval_type"] == "minutes":
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
minute="*/" + str(job_data["interval"]),
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
elif job_data["interval_type"] == "days":
|
||||
curr_time = job_data["start_time"].split(":")
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"cron",
|
||||
day="*/" + str(job_data["interval"]),
|
||||
hour=curr_time[0],
|
||||
minute=curr_time[1],
|
||||
id=str(sch_id),
|
||||
args=[
|
||||
job_data["server_id"],
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
job_data["command"],
|
||||
],
|
||||
)
|
||||
else:
|
||||
try:
|
||||
self.scheduler.get_job(str(sch_id))
|
||||
self.scheduler.remove_job(str(sch_id))
|
||||
except:
|
||||
logger.info(f"APScheduler found no scheduled job on schedule update for schedule with id: {sch_id} Assuming it was already disabled.")
|
||||
logger.info(
|
||||
f"APScheduler found no scheduled job on schedule update for "
|
||||
f"schedule with id: {sch_id} Assuming it was already disabled."
|
||||
)
|
||||
|
||||
def schedule_watcher(self, event):
|
||||
if not event.exception:
|
||||
if str(event.job_id).isnumeric():
|
||||
task = management_helper.get_scheduled_task_model(int(event.job_id))
|
||||
management_helper.add_to_audit_log_raw('system', users_helper.get_user_id_by_name('system'), task.server_id,
|
||||
f"Task with id {task.schedule_id} completed successfully", '127.0.0.1')
|
||||
#check if the task is a single run.
|
||||
task = self.controller.management.get_scheduled_task_model(
|
||||
int(event.job_id)
|
||||
)
|
||||
self.controller.management.add_to_audit_log_raw(
|
||||
"system",
|
||||
HelperUsers.get_user_id_by_name("system"),
|
||||
task.server_id,
|
||||
f"Task with id {task.schedule_id} completed successfully",
|
||||
"127.0.0.1",
|
||||
)
|
||||
# check if the task is a single run.
|
||||
if task.one_time:
|
||||
self.remove_job(task.schedule_id)
|
||||
logger.info("one time task detected. Deleting...")
|
||||
#check for any child tasks for this. It's kind of backward, but this makes DB management a lot easier. One to one instead of one to many.
|
||||
for schedule in management_helper.get_child_schedules_by_server(task.schedule_id, task.server_id):
|
||||
#event job ID's are strings so we need to look at this as the same data type.
|
||||
# check for any child tasks for this. It's kind of backward,
|
||||
# but this makes DB management a lot easier. One to one
|
||||
# instead of one to many.
|
||||
for schedule in HelpersManagement.get_child_schedules_by_server(
|
||||
task.schedule_id, task.server_id
|
||||
):
|
||||
# event job ID's are strings so we need to look at
|
||||
# this as the same data type.
|
||||
if str(schedule.parent) == str(event.job_id):
|
||||
if schedule.enabled:
|
||||
delaytime = datetime.datetime.now() + datetime.timedelta(seconds=schedule.delay)
|
||||
self.scheduler.add_job(management_helper.add_command, 'date', run_date=delaytime, id=str(schedule.schedule_id),
|
||||
args=[schedule.server_id,
|
||||
self.users_controller.get_id_by_name('system'),
|
||||
'127.0.0.1',
|
||||
schedule.command])
|
||||
delaytime = datetime.datetime.now() + datetime.timedelta(
|
||||
seconds=schedule.delay
|
||||
)
|
||||
self.scheduler.add_job(
|
||||
HelpersManagement.add_command,
|
||||
"date",
|
||||
run_date=delaytime,
|
||||
id=str(schedule.schedule_id),
|
||||
args=[
|
||||
schedule.server_id,
|
||||
self.users_controller.get_id_by_name("system"),
|
||||
"127.0.0.1",
|
||||
schedule.command,
|
||||
],
|
||||
)
|
||||
else:
|
||||
logger.info("Event job ID is not numerical. Assuming it's stats - not stored in DB. Moving on.")
|
||||
logger.info(
|
||||
"Event job ID is not numerical. Assuming it's stats "
|
||||
"- not stored in DB. Moving on."
|
||||
)
|
||||
else:
|
||||
logger.error(f"Task failed with error: {event.exception}")
|
||||
|
||||
def start_stats_recording(self):
|
||||
stats_update_frequency = helper.get_setting('stats_update_frequency')
|
||||
logger.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
||||
console.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
||||
stats_update_frequency = self.helper.get_setting("stats_update_frequency")
|
||||
logger.info(
|
||||
f"Stats collection frequency set to {stats_update_frequency} seconds"
|
||||
)
|
||||
Console.info(
|
||||
f"Stats collection frequency set to {stats_update_frequency} seconds"
|
||||
)
|
||||
|
||||
# one for now,
|
||||
self.controller.stats.record_stats()
|
||||
self.controller.servers.stats.record_stats()
|
||||
# one for later
|
||||
self.scheduler.add_job(self.controller.stats.record_stats, 'interval', seconds=stats_update_frequency, id="stats")
|
||||
|
||||
self.scheduler.add_job(
|
||||
self.controller.servers.stats.record_stats,
|
||||
"interval",
|
||||
seconds=stats_update_frequency,
|
||||
id="stats",
|
||||
)
|
||||
|
||||
def serverjar_cache_refresher(self):
|
||||
logger.info("Refreshing serverjars.com cache on start")
|
||||
server_jar_obj.refresh_cache()
|
||||
self.controller.server_jars.refresh_cache()
|
||||
|
||||
logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
|
||||
self.scheduler.add_job(server_jar_obj.refresh_cache, 'interval', hours=12, id="serverjars")
|
||||
self.scheduler.add_job(
|
||||
self.controller.server_jars.refresh_cache,
|
||||
"interval",
|
||||
hours=12,
|
||||
id="serverjars",
|
||||
)
|
||||
|
||||
def realtime(self):
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
host_stats = management_helper.get_latest_hosts_stats()
|
||||
host_stats = HelpersManagement.get_latest_hosts_stats()
|
||||
|
||||
while True:
|
||||
|
||||
if host_stats.get('cpu_usage') != \
|
||||
management_helper.get_latest_hosts_stats().get('cpu_usage') or \
|
||||
host_stats.get('mem_percent') != \
|
||||
management_helper.get_latest_hosts_stats().get('mem_percent'):
|
||||
if host_stats.get(
|
||||
"cpu_usage"
|
||||
) != HelpersManagement.get_latest_hosts_stats().get(
|
||||
"cpu_usage"
|
||||
) or host_stats.get(
|
||||
"mem_percent"
|
||||
) != HelpersManagement.get_latest_hosts_stats().get(
|
||||
"mem_percent"
|
||||
):
|
||||
# Stats are different
|
||||
|
||||
host_stats = management_helper.get_latest_hosts_stats()
|
||||
if len(websocket_helper.clients) > 0:
|
||||
host_stats = HelpersManagement.get_latest_hosts_stats()
|
||||
if len(self.helper.websocket_helper.clients) > 0:
|
||||
# There are clients
|
||||
websocket_helper.broadcast_page('/panel/dashboard', 'update_host_stats', {
|
||||
'cpu_usage': host_stats.get('cpu_usage'),
|
||||
'cpu_cores': host_stats.get('cpu_cores'),
|
||||
'cpu_cur_freq': host_stats.get('cpu_cur_freq'),
|
||||
'cpu_max_freq': host_stats.get('cpu_max_freq'),
|
||||
'mem_percent': host_stats.get('mem_percent'),
|
||||
'mem_usage': host_stats.get('mem_usage')
|
||||
})
|
||||
self.helper.websocket_helper.broadcast_page(
|
||||
"/panel/dashboard",
|
||||
"update_host_stats",
|
||||
{
|
||||
"cpu_usage": host_stats.get("cpu_usage"),
|
||||
"cpu_cores": host_stats.get("cpu_cores"),
|
||||
"cpu_cur_freq": host_stats.get("cpu_cur_freq"),
|
||||
"cpu_max_freq": host_stats.get("cpu_max_freq"),
|
||||
"mem_percent": host_stats.get("mem_percent"),
|
||||
"mem_usage": host_stats.get("mem_usage"),
|
||||
},
|
||||
)
|
||||
time.sleep(1)
|
||||
|
||||
def log_watcher(self):
|
||||
self.controller.servers.check_for_old_logs()
|
||||
self.scheduler.add_job(self.controller.servers.check_for_old_logs, 'interval', hours=6, id="log-mgmt")
|
||||
self.scheduler.add_job(
|
||||
self.controller.servers.check_for_old_logs,
|
||||
"interval",
|
||||
hours=6,
|
||||
id="log-mgmt",
|
||||
)
|
||||
|
@ -1,75 +1,91 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import typing as t
|
||||
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.helpers import helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Translation:
|
||||
def __init__(self):
|
||||
self.translations_path = os.path.join(helper.root_dir, 'app', 'translations')
|
||||
self.cached_translation = None
|
||||
self.cached_translation_lang = None
|
||||
|
||||
def get_language_file(self, language: str):
|
||||
return os.path.join(self.translations_path, str(language) + '.json')
|
||||
|
||||
def translate(self, page, word, language):
|
||||
fallback_language = 'en_EN'
|
||||
|
||||
translated_word = self.translate_inner(page, word, language)
|
||||
if translated_word is None:
|
||||
translated_word = self.translate_inner(page, word, fallback_language)
|
||||
|
||||
if translated_word:
|
||||
if isinstance(translated_word, dict):
|
||||
# JSON objects
|
||||
return json.dumps(translated_word)
|
||||
elif isinstance(translated_word, str):
|
||||
# Basic strings
|
||||
return translated_word
|
||||
elif hasattr(translated_word, '__iter__'):
|
||||
# Multiline strings
|
||||
return '\n'.join(translated_word)
|
||||
return 'Error while getting translation'
|
||||
|
||||
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
||||
language_file = self.get_language_file(language)
|
||||
try:
|
||||
if not self.cached_translation:
|
||||
with open(language_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
self.cached_translation = data
|
||||
elif self.cached_translation_lang != language:
|
||||
with open(language_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
self.cached_translation = data
|
||||
self.cached_translation_lang = language
|
||||
else:
|
||||
data = self.cached_translation
|
||||
|
||||
try:
|
||||
translated_page = data[page]
|
||||
except KeyError:
|
||||
logger.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
||||
console.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
||||
return None
|
||||
|
||||
try:
|
||||
translated_word = translated_page[word]
|
||||
return translated_word
|
||||
except KeyError:
|
||||
logger.error(f'Translation File Error: word {word} does not exist on page {page} for lang {language}')
|
||||
console.error(f'Translation File Error: word {word} does not exist on page {page} for lang {language}')
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f'Translation File Error: Unable to read {language_file} due to {e}')
|
||||
console.critical(f'Translation File Error: Unable to read {language_file} due to {e}')
|
||||
return None
|
||||
|
||||
|
||||
translation = Translation()
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import typing as t
|
||||
|
||||
from app.classes.shared.console import Console
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Translation:
|
||||
def __init__(self, helper):
|
||||
self.helper = helper
|
||||
self.translations_path = os.path.join(
|
||||
self.helper.root_dir, "app", "translations"
|
||||
)
|
||||
self.cached_translation = None
|
||||
self.cached_translation_lang = None
|
||||
|
||||
def get_language_file(self, language: str):
|
||||
return os.path.join(self.translations_path, str(language) + ".json")
|
||||
|
||||
def translate(self, page, word, language):
|
||||
fallback_language = "en_EN"
|
||||
|
||||
translated_word = self.translate_inner(page, word, language)
|
||||
if translated_word is None:
|
||||
translated_word = self.translate_inner(page, word, fallback_language)
|
||||
|
||||
if translated_word:
|
||||
if isinstance(translated_word, dict):
|
||||
# JSON objects
|
||||
return json.dumps(translated_word)
|
||||
elif isinstance(translated_word, str):
|
||||
# Basic strings
|
||||
return translated_word
|
||||
elif hasattr(translated_word, "__iter__"):
|
||||
# Multiline strings
|
||||
return "\n".join(translated_word)
|
||||
return "Error while getting translation"
|
||||
|
||||
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
||||
language_file = self.get_language_file(language)
|
||||
try:
|
||||
if not self.cached_translation:
|
||||
with open(language_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
self.cached_translation = data
|
||||
elif self.cached_translation_lang != language:
|
||||
with open(language_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
self.cached_translation = data
|
||||
self.cached_translation_lang = language
|
||||
else:
|
||||
data = self.cached_translation
|
||||
|
||||
try:
|
||||
translated_page = data[page]
|
||||
except KeyError:
|
||||
logger.error(
|
||||
f"Translation File Error: page {page} "
|
||||
f"does not exist for lang {language}"
|
||||
)
|
||||
Console.error(
|
||||
f"Translation File Error: page {page} "
|
||||
f"does not exist for lang {language}"
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
translated_word = translated_page[word]
|
||||
return translated_word
|
||||
except KeyError:
|
||||
logger.error(
|
||||
f"Translation File Error: word {word} does not exist on page "
|
||||
f"{page} for lang {language}"
|
||||
)
|
||||
Console.error(
|
||||
f"Translation File Error: word {word} does not exist on page "
|
||||
f"{page} for lang {language}"
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(
|
||||
f"Translation File Error: Unable to read {language_file} due to {e}"
|
||||
)
|
||||
Console.critical(
|
||||
f"Translation File Error: Unable to read {language_file} due to {e}"
|
||||
)
|
||||
return None
|
||||
|
@ -3,27 +3,20 @@ import html
|
||||
import re
|
||||
import logging
|
||||
import time
|
||||
import bleach
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
|
||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.translation import translation
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.server import ServerOutBuf
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import bleach
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
|
||||
except ModuleNotFoundError as ex:
|
||||
helper.auto_installer_fix(ex)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class AjaxHandler(BaseHandler):
|
||||
|
||||
class AjaxHandler(BaseHandler):
|
||||
def render_page(self, template, page_data):
|
||||
self.render(
|
||||
template,
|
||||
@ -34,22 +27,19 @@ class AjaxHandler(BaseHandler):
|
||||
@tornado.web.authenticated
|
||||
def get(self, page):
|
||||
_, _, exec_user = self.current_user
|
||||
error = bleach.clean(self.get_argument('error', "WTF Error!"))
|
||||
error = bleach.clean(self.get_argument("error", "WTF Error!"))
|
||||
|
||||
template = "panel/denied.html"
|
||||
|
||||
page_data = {
|
||||
'user_data': exec_user,
|
||||
'error': error
|
||||
}
|
||||
page_data = {"user_data": exec_user, "error": error}
|
||||
|
||||
if page == "error":
|
||||
template = "public/error.html"
|
||||
self.render_page(template, page_data)
|
||||
|
||||
elif page == 'server_log':
|
||||
server_id = self.get_argument('id', None)
|
||||
full_log = self.get_argument('full', False)
|
||||
elif page == "server_log":
|
||||
server_id = self.get_argument("id", None)
|
||||
full_log = self.get_argument("full", False)
|
||||
|
||||
if server_id is None:
|
||||
logger.warning("Server ID not found in server_log ajax call")
|
||||
@ -64,50 +54,59 @@ class AjaxHandler(BaseHandler):
|
||||
self.redirect("/panel/error?error=Server ID Not Found")
|
||||
return
|
||||
|
||||
if not server_data['log_path']:
|
||||
logger.warning(f"Log path not found in server_log ajax call ({server_id})")
|
||||
if not server_data["log_path"]:
|
||||
logger.warning(
|
||||
f"Log path not found in server_log ajax call ({server_id})"
|
||||
)
|
||||
|
||||
if full_log:
|
||||
log_lines = helper.get_setting('max_log_lines')
|
||||
data = helper.tail_file(helper.get_os_understandable_path(server_data['log_path']), log_lines)
|
||||
log_lines = self.helper.get_setting("max_log_lines")
|
||||
data = Helpers.tail_file(
|
||||
Helpers.get_os_understandable_path(server_data["log_path"]),
|
||||
log_lines,
|
||||
)
|
||||
else:
|
||||
data = ServerOutBuf.lines.get(server_id, [])
|
||||
|
||||
|
||||
for d in data:
|
||||
for line in data:
|
||||
try:
|
||||
d = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', d)
|
||||
d = re.sub('[A-z]{2}\b\b', '', d)
|
||||
line = helper.log_colors(html.escape(d))
|
||||
self.write(f'{line}<br />')
|
||||
line = re.sub("(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)", "", line)
|
||||
line = re.sub("[A-z]{2}\b\b", "", line)
|
||||
line = self.helper.log_colors(html.escape(line))
|
||||
self.write(f"{line}<br />")
|
||||
# self.write(d.encode("utf-8"))
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Skipping Log Line due to error: {e}")
|
||||
|
||||
elif page == "announcements":
|
||||
data = helper.get_announcements()
|
||||
page_data['notify_data'] = data
|
||||
self.render_page('ajax/notify.html', page_data)
|
||||
|
||||
data = Helpers.get_announcements()
|
||||
page_data["notify_data"] = data
|
||||
self.render_page("ajax/notify.html", page_data)
|
||||
|
||||
elif page == "get_zip_tree":
|
||||
path = self.get_argument('path', None)
|
||||
path = self.get_argument("path", None)
|
||||
|
||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||
helper.generate_zip_tree(path))
|
||||
self.write(
|
||||
Helpers.get_os_understandable_path(path)
|
||||
+ "\n"
|
||||
+ Helpers.generate_zip_tree(path)
|
||||
)
|
||||
self.finish()
|
||||
|
||||
elif page == "get_zip_dir":
|
||||
path = self.get_argument('path', None)
|
||||
path = self.get_argument("path", None)
|
||||
|
||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||
helper.generate_zip_dir(path))
|
||||
self.write(
|
||||
Helpers.get_os_understandable_path(path)
|
||||
+ "\n"
|
||||
+ Helpers.generate_zip_dir(path)
|
||||
)
|
||||
self.finish()
|
||||
|
||||
elif page == "get_backup_tree":
|
||||
server_id = self.get_argument('id', None)
|
||||
folder = self.get_argument('path', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
folder = self.get_argument("path", None)
|
||||
|
||||
output = ""
|
||||
|
||||
@ -119,31 +118,31 @@ class AjaxHandler(BaseHandler):
|
||||
dir_list.append(item)
|
||||
else:
|
||||
unsorted_files.append(item)
|
||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
||||
output += \
|
||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
||||
|
||||
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||
unsorted_files, key=str.casefold
|
||||
)
|
||||
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||
for raw_filename in file_list:
|
||||
filename = html.escape(raw_filename)
|
||||
rel = os.path.join(folder, raw_filename)
|
||||
dpath = os.path.join(folder, filename)
|
||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(server_id):
|
||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
|
||||
server_id
|
||||
):
|
||||
if os.path.isdir(rel):
|
||||
output += \
|
||||
f"""<li class="tree-item" data-path="{dpath}">
|
||||
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}" checked>
|
||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||
<i class="far fa-folder"></i>
|
||||
<i class="far fa-folder-open"></i>
|
||||
<i style="color: #8862e0;" class="far fa-folder"></i>
|
||||
<i style="color: #8862e0;" class="far fa-folder-open"></i>
|
||||
<strong>{filename}</strong>
|
||||
</span>
|
||||
</input></div><li>
|
||||
\n"""\
|
||||
|
||||
\n"""
|
||||
else:
|
||||
output += f"""<li
|
||||
class="tree-nested d-block tree-ctx-item tree-file"
|
||||
class="d-block tree-ctx-item tree-file"
|
||||
data-path="{dpath}"
|
||||
data-name="{filename}"
|
||||
onclick=""><input type='checkbox' class="checkBoxClass" name='root_path' value="{dpath}" checked><span style="margin-right: 6px;">
|
||||
@ -151,32 +150,30 @@ class AjaxHandler(BaseHandler):
|
||||
|
||||
else:
|
||||
if os.path.isdir(rel):
|
||||
output += \
|
||||
f"""<li class="tree-item" data-path="{dpath}">
|
||||
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}">
|
||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||
<i class="far fa-folder"></i>
|
||||
<i class="far fa-folder-open"></i>
|
||||
<i style="color: #8862e0;" class="far fa-folder"></i>
|
||||
<i style="color: #8862e0;" class="far fa-folder-open"></i>
|
||||
<strong>{filename}</strong>
|
||||
</span>
|
||||
</input></div><li>
|
||||
\n"""\
|
||||
|
||||
\n"""
|
||||
else:
|
||||
output += f"""<li
|
||||
class="tree-nested d-block tree-ctx-item tree-file"
|
||||
class="d-block tree-ctx-item tree-file"
|
||||
data-path="{dpath}"
|
||||
data-name="{filename}"
|
||||
onclick=""><input type='checkbox' class="checkBoxClass" name='root_path' value="{dpath}">
|
||||
<span style="margin-right: 6px;"><i class="far fa-file"></i></span></input>{filename}</li>"""
|
||||
self.write(helper.get_os_understandable_path(folder) + '\n' +
|
||||
output)
|
||||
<span style="margin-right: 6px;"><i class="far fa-file">
|
||||
</i></span></input>{filename}</li>"""
|
||||
self.write(Helpers.get_os_understandable_path(folder) + "\n" + output)
|
||||
self.finish()
|
||||
|
||||
elif page == "get_backup_dir":
|
||||
server_id = self.get_argument('id', None)
|
||||
folder = self.get_argument('path', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
folder = self.get_argument("path", None)
|
||||
output = ""
|
||||
|
||||
dir_list = []
|
||||
@ -187,39 +184,38 @@ class AjaxHandler(BaseHandler):
|
||||
dir_list.append(item)
|
||||
else:
|
||||
unsorted_files.append(item)
|
||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
||||
output += \
|
||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
||||
|
||||
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||
unsorted_files, key=str.casefold
|
||||
)
|
||||
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||
for raw_filename in file_list:
|
||||
filename = html.escape(raw_filename)
|
||||
rel = os.path.join(folder, raw_filename)
|
||||
dpath = os.path.join(folder, filename)
|
||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(server_id):
|
||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
|
||||
server_id
|
||||
):
|
||||
if os.path.isdir(rel):
|
||||
output += \
|
||||
f"""<li class="tree-item" data-path="{dpath}">
|
||||
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||
<input type="checkbox" name="root_path" value="{dpath}">
|
||||
<input type="checkbox" name="root_path" value="{dpath}" checked>
|
||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||
<i class="far fa-folder"></i>
|
||||
<i class="far fa-folder-open"></i>
|
||||
<strong>{filename}</strong>
|
||||
</span>
|
||||
</input></div><li>"""\
|
||||
|
||||
</input></div><li>"""
|
||||
else:
|
||||
output += f"""<li
|
||||
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
||||
data-path="{dpath}"
|
||||
data-name="{filename}"
|
||||
onclick=""><input type='checkbox' name='root_path' value='{dpath}'><span style="margin-right: 6px;">
|
||||
onclick=""><input type='checkbox' name='root_path' value='{dpath}' checked><span style="margin-right: 6px;">
|
||||
<i class="far fa-file"></i></span></input>{filename}</li>"""
|
||||
|
||||
else:
|
||||
if os.path.isdir(rel):
|
||||
output += \
|
||||
f"""<li class="tree-item" data-path="{dpath}">
|
||||
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||
<input type="checkbox" name="root_path" value="{dpath}">
|
||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||
@ -227,272 +223,369 @@ class AjaxHandler(BaseHandler):
|
||||
<i class="far fa-folder-open"></i>
|
||||
<strong>{filename}</strong>
|
||||
</span>
|
||||
</input></div><li>"""\
|
||||
|
||||
</input></div><li>"""
|
||||
else:
|
||||
output += f"""<li
|
||||
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
||||
data-path="{dpath}"
|
||||
data-name="{filename}"
|
||||
onclick=""><input type='checkbox' name='root_path' value='{dpath}'>
|
||||
<span style="margin-right: 6px;"><i class="far fa-file"></i></span></input>{filename}</li>"""
|
||||
<span style="margin-right: 6px;"><i class="far fa-file">
|
||||
</i></span></input>{filename}</li>"""
|
||||
|
||||
self.write(helper.get_os_understandable_path(folder) + '\n' +
|
||||
output)
|
||||
self.write(Helpers.get_os_understandable_path(folder) + "\n" + output)
|
||||
self.finish()
|
||||
|
||||
elif page == "get_dir":
|
||||
server_id = self.get_argument('id', None)
|
||||
path = self.get_argument('path', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
path = self.get_argument("path", None)
|
||||
|
||||
if not self.check_server_id(server_id, 'get_tree'):
|
||||
if not self.check_server_id(server_id, "get_tree"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||
helper.generate_dir(path))
|
||||
if Helpers.validate_traversal(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||
):
|
||||
self.write(
|
||||
Helpers.get_os_understandable_path(path)
|
||||
+ "\n"
|
||||
+ Helpers.generate_dir(path)
|
||||
)
|
||||
self.finish()
|
||||
|
||||
@tornado.web.authenticated
|
||||
def post(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
|
||||
if page == "send_command":
|
||||
command = self.get_body_argument('command', default=None, strip=True)
|
||||
server_id = self.get_argument('id', None)
|
||||
command = self.get_body_argument("command", default=None, strip=True)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
if server_id is None:
|
||||
logger.warning("Server ID not found in send_command ajax call")
|
||||
console.warning("Server ID not found in send_command ajax call")
|
||||
Console.warning("Server ID not found in send_command ajax call")
|
||||
|
||||
srv_obj = self.controller.get_server_obj(server_id)
|
||||
srv_obj = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
|
||||
if command == srv_obj.settings['stop_command']:
|
||||
logger.info("Stop command detected as terminal input - intercepting." +
|
||||
f"Starting Crafty's stop process for server with id: {server_id}")
|
||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'stop_server')
|
||||
if command == srv_obj.settings["stop_command"]:
|
||||
logger.info(
|
||||
"Stop command detected as terminal input - intercepting."
|
||||
+ f"Starting Crafty's stop process for server with id: {server_id}"
|
||||
)
|
||||
self.controller.management.send_command(
|
||||
exec_user["user_id"], server_id, self.get_remote_ip(), "stop_server"
|
||||
)
|
||||
command = None
|
||||
elif command == 'restart':
|
||||
logger.info("Restart command detected as terminal input - intercepting." +
|
||||
f"Starting Crafty's stop process for server with id: {server_id}")
|
||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'restart_server')
|
||||
elif command == "restart":
|
||||
logger.info(
|
||||
"Restart command detected as terminal input - intercepting."
|
||||
+ f"Starting Crafty's stop process for server with id: {server_id}"
|
||||
)
|
||||
self.controller.management.send_command(
|
||||
exec_user["user_id"],
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
"restart_server",
|
||||
)
|
||||
command = None
|
||||
if command:
|
||||
if srv_obj.check_running():
|
||||
srv_obj.send_command(command)
|
||||
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"Sent command to {self.controller.servers.get_server_friendly_name(server_id)} terminal: {command}",
|
||||
server_id,
|
||||
self.get_remote_ip())
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f"Sent command to "
|
||||
f"{self.controller.servers.get_server_friendly_name(server_id)} "
|
||||
f"terminal: {command}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
elif page == "send_order":
|
||||
self.controller.users.update_server_order(exec_user['user_id'], bleach.clean(self.get_argument('order')))
|
||||
self.controller.users.update_server_order(
|
||||
exec_user["user_id"], bleach.clean(self.get_argument("order"))
|
||||
)
|
||||
return
|
||||
|
||||
elif page == "backup_now":
|
||||
server_id = self.get_argument("id", None)
|
||||
if server_id is None:
|
||||
logger.error("Server ID is none. Canceling backup!")
|
||||
return
|
||||
|
||||
server = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
self.controller.management.add_to_audit_log_raw(
|
||||
self.controller.users.get_user_by_id(exec_user["user_id"])["username"],
|
||||
exec_user["user_id"],
|
||||
server_id,
|
||||
f"Backup now executed for server {server_id} ",
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
server.backup_server()
|
||||
|
||||
elif page == "clear_comms":
|
||||
if exec_user['superuser']:
|
||||
if exec_user["superuser"]:
|
||||
self.controller.clear_unexecuted_commands()
|
||||
return
|
||||
|
||||
elif page == "kill":
|
||||
if not permissions['Commands'] in user_perms:
|
||||
if not permissions["Commands"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Commands")
|
||||
return
|
||||
server_id = self.get_argument('id', None)
|
||||
svr = self.controller.get_server_obj(server_id)
|
||||
server_id = self.get_argument("id", None)
|
||||
svr = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
try:
|
||||
svr.kill()
|
||||
time.sleep(5)
|
||||
svr.cleanup_server_object()
|
||||
svr.record_server_stats()
|
||||
except Exception as e:
|
||||
logger.error(f"Could not find PID for requested termsig. Full error: {e}")
|
||||
logger.error(
|
||||
f"Could not find PID for requested termsig. Full error: {e}"
|
||||
)
|
||||
return
|
||||
elif page == "eula":
|
||||
server_id = self.get_argument('id', None)
|
||||
svr = self.controller.get_server_obj(server_id)
|
||||
svr.agree_eula(exec_user['user_id'])
|
||||
server_id = self.get_argument("id", None)
|
||||
svr = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
svr.agree_eula(exec_user["user_id"])
|
||||
|
||||
elif page == "restore_backup":
|
||||
if not permissions['Backup'] in user_perms:
|
||||
if not permissions["Backup"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
||||
return
|
||||
server_id = bleach.clean(self.get_argument('id', None))
|
||||
zip_name = bleach.clean(self.get_argument('zip_file', None))
|
||||
server_id = bleach.clean(self.get_argument("id", None))
|
||||
zip_name = bleach.clean(self.get_argument("zip_file", None))
|
||||
svr_obj = self.controller.servers.get_server_obj(server_id)
|
||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||
if server_data['type'] == 'minecraft-java':
|
||||
if server_data["type"] == "minecraft-java":
|
||||
backup_path = svr_obj.backup_path
|
||||
if helper.validate_traversal(backup_path, zip_name):
|
||||
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
||||
new_server = self.controller.import_zip_server(svr_obj.server_name,
|
||||
tempDir,
|
||||
server_data['executable'],
|
||||
'1', '2',
|
||||
server_data['server_port'])
|
||||
if Helpers.validate_traversal(backup_path, zip_name):
|
||||
temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
|
||||
new_server = self.controller.import_zip_server(
|
||||
svr_obj.server_name,
|
||||
temp_dir,
|
||||
server_data["executable"],
|
||||
"1",
|
||||
"2",
|
||||
server_data["server_port"],
|
||||
)
|
||||
new_server_id = new_server
|
||||
new_server = self.controller.get_server_data(new_server)
|
||||
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
||||
new_server = self.controller.servers.get_server_data(new_server)
|
||||
self.controller.rename_backup_dir(
|
||||
server_id, new_server_id, new_server["server_uuid"]
|
||||
)
|
||||
try:
|
||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||
except:
|
||||
logger.info("No active tasks found for server")
|
||||
self.controller.remove_server(server_id, True)
|
||||
self.redirect('/panel/dashboard')
|
||||
self.redirect("/panel/dashboard")
|
||||
|
||||
else:
|
||||
backup_path = svr_obj.backup_path
|
||||
if helper.validate_traversal(backup_path, zip_name):
|
||||
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
||||
new_server = self.controller.import_bedrock_zip_server(svr_obj.server_name,
|
||||
tempDir,
|
||||
server_data['executable'],
|
||||
server_data['server_port'])
|
||||
if Helpers.validate_traversal(backup_path, zip_name):
|
||||
temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
|
||||
new_server = self.controller.import_bedrock_zip_server(
|
||||
svr_obj.server_name,
|
||||
temp_dir,
|
||||
server_data["executable"],
|
||||
server_data["server_port"],
|
||||
)
|
||||
new_server_id = new_server
|
||||
new_server = self.controller.get_server_data(new_server)
|
||||
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
||||
new_server = self.controller.servers.get_server_data(new_server)
|
||||
self.controller.rename_backup_dir(
|
||||
server_id, new_server_id, new_server["server_uuid"]
|
||||
)
|
||||
try:
|
||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||
except:
|
||||
logger.info("No active tasks found for server")
|
||||
self.controller.remove_server(server_id, True)
|
||||
self.redirect('/panel/dashboard')
|
||||
self.redirect("/panel/dashboard")
|
||||
|
||||
elif page == "unzip_server":
|
||||
path = self.get_argument('path', None)
|
||||
if helper.check_file_exists(path):
|
||||
helper.unzipServer(path, exec_user['user_id'])
|
||||
path = self.get_argument("path", None)
|
||||
if Helpers.check_file_exists(path):
|
||||
self.helper.unzip_server(path, exec_user["user_id"])
|
||||
else:
|
||||
user_id = exec_user['user_id']
|
||||
user_id = exec_user["user_id"]
|
||||
if user_id:
|
||||
time.sleep(5)
|
||||
user_lang = self.controller.users.get_user_lang_by_id(user_id)
|
||||
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
||||
'error': translation.translate('error', 'no-file', user_lang)
|
||||
})
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user_id,
|
||||
"send_start_error",
|
||||
{
|
||||
"error": self.helper.translation.translate(
|
||||
"error", "no-file", user_lang
|
||||
)
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
elif page == "backup_select":
|
||||
path = self.get_argument('path', None)
|
||||
helper.backup_select(path, exec_user['user_id'])
|
||||
path = self.get_argument("path", None)
|
||||
self.helper.backup_select(path, exec_user["user_id"])
|
||||
return
|
||||
|
||||
|
||||
@tornado.web.authenticated
|
||||
def delete(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
|
||||
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
if page == "del_task":
|
||||
if not permissions['Schedule'] in user_perms:
|
||||
if not permissions["Schedule"] in user_perms:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Tasks")
|
||||
else:
|
||||
sch_id = self.get_argument('schedule_id', '-404')
|
||||
sch_id = self.get_argument("schedule_id", "-404")
|
||||
self.tasks_manager.remove_job(sch_id)
|
||||
|
||||
if page == "del_backup":
|
||||
if not permissions['Backup'] in user_perms:
|
||||
if not permissions["Backup"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
||||
return
|
||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||
server_id = self.get_argument('id', None)
|
||||
file_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("file_path", default=None, strip=True)
|
||||
)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
console.warning(f"Delete {file_path} for server {server_id}")
|
||||
Console.warning(f"Delete {file_path} for server {server_id}")
|
||||
|
||||
if not self.check_server_id(server_id, 'del_backup'):
|
||||
if not self.check_server_id(server_id, "del_backup"):
|
||||
return
|
||||
else: server_id = bleach.clean(server_id)
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||
if not (
|
||||
Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(server_info["path"]), file_path
|
||||
)
|
||||
or Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(server_info["backup_path"]),
|
||||
file_path,
|
||||
)
|
||||
) or not Helpers.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
||||
console.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
||||
Console.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
||||
return
|
||||
|
||||
# Delete the file
|
||||
if helper.validate_traversal(helper.get_os_understandable_path(server_info['backup_path']), file_path):
|
||||
if Helpers.validate_traversal(
|
||||
Helpers.get_os_understandable_path(server_info["backup_path"]),
|
||||
file_path,
|
||||
):
|
||||
os.remove(file_path)
|
||||
|
||||
elif page == "delete_server":
|
||||
if not permissions['Config'] in user_perms:
|
||||
if not permissions["Config"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||
return
|
||||
server_id = self.get_argument('id', None)
|
||||
logger.info(f"Removing server from panel for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
||||
server_id = self.get_argument("id", None)
|
||||
logger.info(
|
||||
f"Removing server from panel for server: "
|
||||
f"{self.controller.servers.get_server_friendly_name(server_id)}"
|
||||
)
|
||||
|
||||
server_data = self.controller.get_server_data(server_id)
|
||||
server_name = server_data['server_name']
|
||||
server_data = self.controller.servers.get_server_data(server_id)
|
||||
server_name = server_data["server_name"]
|
||||
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"Deleted server {server_id} named {server_name}",
|
||||
server_id,
|
||||
self.get_remote_ip())
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f"Deleted server {server_id} named {server_name}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||
self.controller.remove_server(server_id, False)
|
||||
|
||||
elif page == "delete_server_files":
|
||||
if not permissions['Config'] in user_perms:
|
||||
if not permissions["Config"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||
return
|
||||
server_id = self.get_argument('id', None)
|
||||
logger.info(f"Removing server and all associated files for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
||||
server_id = self.get_argument("id", None)
|
||||
logger.info(
|
||||
f"Removing server and all associated files for server: "
|
||||
f"{self.controller.servers.get_server_friendly_name(server_id)}"
|
||||
)
|
||||
|
||||
server_data = self.controller.get_server_data(server_id)
|
||||
server_name = server_data['server_name']
|
||||
server_data = self.controller.servers.get_server_data(server_id)
|
||||
server_name = server_data["server_name"]
|
||||
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"Deleted server {server_id} named {server_name}",
|
||||
server_id,
|
||||
self.get_remote_ip())
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f"Deleted server {server_id} named {server_name}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||
self.controller.remove_server(server_id, True)
|
||||
|
||||
def check_server_id(self, server_id, page_name):
|
||||
if server_id is None:
|
||||
logger.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
||||
console.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
||||
logger.warning(
|
||||
f"Server ID not defined in {page_name} ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Server ID not defined in {page_name} ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
# does this server id exist?
|
||||
if not self.controller.servers.server_id_exists(server_id):
|
||||
logger.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
||||
console.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
||||
logger.warning(
|
||||
f"Server ID not found in {page_name} ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Server ID not found in {page_name} ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
return True
|
||||
|
@ -1,55 +1,94 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import re
|
||||
|
||||
from app.classes.controllers.crafty_perms_controller import EnumPermissionsCrafty
|
||||
from app.classes.controllers.server_perms_controller import EnumPermissionsServer
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
from app.classes.models.management import DatabaseShortcuts
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
bearer_pattern = re.compile(r'^Bearer', flags=re.IGNORECASE)
|
||||
bearer_pattern = re.compile(r"^Bearer", flags=re.IGNORECASE)
|
||||
|
||||
|
||||
class ApiHandler(BaseHandler):
|
||||
|
||||
def return_response(self, status: int, data: dict):
|
||||
# Define a standardized response
|
||||
self.set_status(status)
|
||||
self.write(data)
|
||||
|
||||
def access_denied(self, user, reason=''):
|
||||
def check_xsrf_cookie(self):
|
||||
# Disable CSRF protection on API routes
|
||||
pass
|
||||
|
||||
def access_denied(self, user, reason=""):
|
||||
if reason:
|
||||
reason = ' because ' + reason
|
||||
logger.info("User %s from IP %s was denied access to the API route " + self.request.path + reason, user, self.get_remote_ip())
|
||||
self.finish(self.return_response(403, {
|
||||
'error':'ACCESS_DENIED',
|
||||
'info':'You were denied access to the requested resource'
|
||||
}))
|
||||
reason = " because " + reason
|
||||
logger.info(
|
||||
"User %s from IP %s was denied access to the API route "
|
||||
+ self.request.path
|
||||
+ reason,
|
||||
user,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
self.finish(
|
||||
self.return_response(
|
||||
403,
|
||||
{
|
||||
"error": "ACCESS_DENIED",
|
||||
"info": "You were denied access to the requested resource",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
def authenticate_user(self) -> bool:
|
||||
self.permissions = {
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
"Server_Creation": EnumPermissionsCrafty.SERVER_CREATION,
|
||||
"User_Config": EnumPermissionsCrafty.USER_CONFIG,
|
||||
"Roles_Config": EnumPermissionsCrafty.ROLES_CONFIG,
|
||||
}
|
||||
try:
|
||||
logger.debug("Searching for specified token")
|
||||
|
||||
api_token = self.get_argument('token', '')
|
||||
if api_token is None and self.request.headers.get('Authorization'):
|
||||
api_token = bearer_pattern.sub('', self.request.headers.get('Authorization'))
|
||||
api_token = self.get_argument("token", "")
|
||||
self.api_token = api_token
|
||||
if api_token is None and self.request.headers.get("Authorization"):
|
||||
api_token = bearer_pattern.sub(
|
||||
"", self.request.headers.get("Authorization")
|
||||
)
|
||||
elif api_token is None:
|
||||
api_token = self.get_cookie('token')
|
||||
api_token = self.get_cookie("token")
|
||||
user_data = self.controller.users.get_user_by_api_token(api_token)
|
||||
|
||||
logger.debug("Checking results")
|
||||
if user_data:
|
||||
# Login successful! Check perms
|
||||
logger.info(f"User {user_data['username']} has authenticated to API")
|
||||
# TODO: Role check
|
||||
|
||||
return True # This is to set the "authenticated"
|
||||
return True # This is to set the "authenticated"
|
||||
else:
|
||||
logging.debug("Auth unsuccessful")
|
||||
self.access_denied("unknown", "the user provided an invalid token")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.warning("An error occured while authenticating an API user: %s", e)
|
||||
self.finish(self.return_response(403, {
|
||||
'error':'ACCESS_DENIED',
|
||||
'info':'An error occured while authenticating the user'
|
||||
}))
|
||||
self.finish(
|
||||
self.return_response(
|
||||
403,
|
||||
{
|
||||
"error": "ACCESS_DENIED",
|
||||
"info": "An error occured while authenticating the user",
|
||||
},
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
@ -57,12 +96,28 @@ class ServersStats(ApiHandler):
|
||||
def get(self):
|
||||
"""Get details about all servers"""
|
||||
authenticated = self.authenticate_user()
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
if not authenticated:
|
||||
return
|
||||
if user_obj["superuser"]:
|
||||
raw_stats = self.controller.servers.get_all_servers_stats()
|
||||
else:
|
||||
raw_stats = self.controller.servers.get_authorized_servers_stats(
|
||||
user_obj["user_id"]
|
||||
)
|
||||
stats = []
|
||||
for rs in raw_stats:
|
||||
s = {}
|
||||
for k, v in rs["server_data"].items():
|
||||
if isinstance(v, datetime):
|
||||
s[k] = v.timestamp()
|
||||
else:
|
||||
s[k] = v
|
||||
stats.append(s)
|
||||
|
||||
# Get server stats
|
||||
# TODO Check perms
|
||||
self.finish(self.write({"servers": self.controller.stats.get_servers_stats()}))
|
||||
self.finish(self.write({"servers": stats}))
|
||||
|
||||
|
||||
class NodeStats(ApiHandler):
|
||||
@ -73,6 +128,319 @@ class NodeStats(ApiHandler):
|
||||
return
|
||||
|
||||
# Get node stats
|
||||
node_stats = self.controller.stats.get_node_stats()
|
||||
node_stats.pop("servers")
|
||||
self.finish(self.write(node_stats))
|
||||
node_stats = self.controller.servers.stats.get_node_stats()
|
||||
self.return_response(200, {"code": node_stats["node_stats"]})
|
||||
|
||||
|
||||
class SendCommand(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
server_id = self.get_argument("id")
|
||||
|
||||
if (
|
||||
not user_obj["user_id"]
|
||||
in self.controller.server_perms.get_server_user_list(server_id)
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if not self.permissions[
|
||||
"Commands"
|
||||
] in self.controller.server_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token), server_id
|
||||
):
|
||||
self.access_denied(user)
|
||||
return
|
||||
|
||||
command = self.get_argument("command", default=None, strip=True)
|
||||
server_id = self.get_argument("id")
|
||||
if command:
|
||||
server = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
if server.check_running:
|
||||
server.send_command(command)
|
||||
self.return_response(200, {"run": True})
|
||||
else:
|
||||
self.return_response(200, {"error": "SER_NOT_RUNNING"})
|
||||
else:
|
||||
self.return_response(200, {"error": "NO_COMMAND"})
|
||||
|
||||
|
||||
class ServerBackup(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
server_id = self.get_argument("id")
|
||||
|
||||
if (
|
||||
not user_obj["user_id"]
|
||||
in self.controller.server_perms.get_server_user_list(server_id)
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if not self.permissions[
|
||||
"Backup"
|
||||
] in self.controller.server_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token), server_id
|
||||
):
|
||||
self.access_denied(user)
|
||||
return
|
||||
|
||||
server = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
|
||||
server.backup_server()
|
||||
|
||||
self.return_response(200, {"code": "SER_BAK_CALLED"})
|
||||
|
||||
|
||||
class StartServer(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
remote_ip = self.get_remote_ip()
|
||||
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
server_id = self.get_argument("id")
|
||||
|
||||
if (
|
||||
not user_obj["user_id"]
|
||||
in self.controller.server_perms.get_server_user_list(server_id)
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
elif not self.permissions[
|
||||
"Commands"
|
||||
] in self.controller.server_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token), server_id
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
server = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
|
||||
if not server.check_running():
|
||||
self.controller.management.send_command(
|
||||
user_obj["user_id"], server_id, remote_ip, "start_server"
|
||||
)
|
||||
self.return_response(200, {"code": "SER_START_CALLED"})
|
||||
else:
|
||||
self.return_response(500, {"error": "SER_RUNNING"})
|
||||
|
||||
|
||||
class StopServer(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
remote_ip = self.get_remote_ip()
|
||||
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
server_id = self.get_argument("id")
|
||||
|
||||
if (
|
||||
not user_obj["user_id"]
|
||||
in self.controller.server_perms.get_server_user_list(server_id)
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
|
||||
if not self.permissions[
|
||||
"Commands"
|
||||
] in self.controller.server_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token), server_id
|
||||
):
|
||||
self.access_denied(user)
|
||||
return
|
||||
|
||||
server = self.controller.servers.get_server_instance_by_id(server_id)
|
||||
|
||||
if server.check_running():
|
||||
self.controller.management.send_command(
|
||||
user, server_id, remote_ip, "stop_server"
|
||||
)
|
||||
|
||||
self.return_response(200, {"code": "SER_STOP_CALLED"})
|
||||
else:
|
||||
self.return_response(500, {"error": "SER_NOT_RUNNING"})
|
||||
|
||||
|
||||
class RestartServer(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
remote_ip = self.get_remote_ip()
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
server_id = self.get_argument("id")
|
||||
|
||||
if not user_obj["user_id"] in self.controller.server_perms.get_server_user_list(
|
||||
server_id
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
|
||||
if not self.permissions[
|
||||
"Commands"
|
||||
] in self.controller.server_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token), server_id
|
||||
):
|
||||
self.access_denied(user)
|
||||
|
||||
self.controller.management.send_command(
|
||||
user, server_id, remote_ip, "restart_server"
|
||||
)
|
||||
self.return_response(200, {"code": "SER_RESTART_CALLED"})
|
||||
|
||||
|
||||
class CreateUser(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
user_perms = self.controller.crafty_perms.get_crafty_permissions_list(
|
||||
user_obj["user_id"]
|
||||
)
|
||||
if (
|
||||
not self.permissions["User_Config"] in user_perms
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if not self.permissions[
|
||||
"User_Config"
|
||||
] in self.controller.crafty_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token)
|
||||
):
|
||||
self.access_denied(user)
|
||||
return
|
||||
|
||||
new_username = self.get_argument("username")
|
||||
new_pass = self.get_argument("password")
|
||||
|
||||
if new_username:
|
||||
self.controller.users.add_user(
|
||||
new_username, new_pass, "default@example.com", True, False
|
||||
)
|
||||
|
||||
self.return_response(
|
||||
200,
|
||||
{
|
||||
"code": "COMPLETE",
|
||||
"username": new_username,
|
||||
"password": new_pass,
|
||||
},
|
||||
)
|
||||
else:
|
||||
self.return_response(
|
||||
500,
|
||||
{
|
||||
"error": "MISSING_PARAMS",
|
||||
"info": "Some paramaters failed validation",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class DeleteUser(ApiHandler):
|
||||
def post(self):
|
||||
user = self.authenticate_user()
|
||||
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
user_perms = self.controller.crafty_perms.get_crafty_permissions_list(
|
||||
user_obj["user_id"]
|
||||
)
|
||||
|
||||
if (
|
||||
not self.permissions["User_Config"] in user_perms
|
||||
and not user_obj["superuser"]
|
||||
):
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if not self.permissions[
|
||||
"User_Config"
|
||||
] in self.controller.crafty_perms.get_api_key_permissions_list(
|
||||
self.controller.users.get_api_key_by_token(self.api_token)
|
||||
):
|
||||
self.access_denied(user)
|
||||
return
|
||||
|
||||
user_id = self.get_argument("user_id", None, True)
|
||||
user_to_del = self.controller.users.get_user_by_id(user_id)
|
||||
|
||||
if user_to_del["superuser"]:
|
||||
self.return_response(
|
||||
500,
|
||||
{"error": "NOT_ALLOWED", "info": "You cannot delete a super user"},
|
||||
)
|
||||
else:
|
||||
if user_id:
|
||||
self.controller.users.remove_user(user_id)
|
||||
self.return_response(200, {"code": "COMPLETED"})
|
||||
|
||||
|
||||
class ListServers(ApiHandler):
|
||||
def get(self):
|
||||
user = self.authenticate_user()
|
||||
user_obj = self.controller.users.get_user_by_api_token(self.api_token)
|
||||
|
||||
if user is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if self.api_token is None:
|
||||
self.access_denied("unknown")
|
||||
return
|
||||
|
||||
if user_obj["superuser"]:
|
||||
servers = self.controller.servers.get_all_defined_servers()
|
||||
servers = [str(i) for i in servers]
|
||||
else:
|
||||
servers = self.controller.servers.get_authorized_servers(
|
||||
user_obj["user_id"]
|
||||
)
|
||||
page_servers = []
|
||||
for server in servers:
|
||||
if server not in page_servers:
|
||||
page_servers.append(
|
||||
DatabaseShortcuts.get_data_obj(server.server_object)
|
||||
)
|
||||
servers = page_servers
|
||||
servers = [str(i) for i in servers]
|
||||
|
||||
self.return_response(
|
||||
200,
|
||||
{
|
||||
"code": "COMPLETED",
|
||||
"servers": servers,
|
||||
},
|
||||
)
|
||||
|
30
app/classes/web/base_api_handler.py
Normal file
30
app/classes/web/base_api_handler.py
Normal file
@ -0,0 +1,30 @@
|
||||
from typing import Awaitable, Callable, Optional
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
|
||||
class BaseApiHandler(BaseHandler):
|
||||
# {{{ Disable XSRF protection on API routes
|
||||
def check_xsrf_cookie(self) -> None:
|
||||
pass
|
||||
|
||||
# }}}
|
||||
|
||||
# {{{ 405 Method Not Allowed as JSON
|
||||
def _unimplemented_method(self, *_args: str, **_kwargs: str) -> None:
|
||||
self.finish_json(405, {"status": "error", "error": "METHOD_NOT_ALLOWED"})
|
||||
|
||||
head = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
get = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
post = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
delete = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
patch = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
put = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
|
||||
# }}}
|
||||
|
||||
def options(self, *_, **__):
|
||||
"""
|
||||
Fix CORS
|
||||
"""
|
||||
# no body
|
||||
self.set_status(204)
|
||||
self.finish()
|
@ -1,44 +1,87 @@
|
||||
import logging
|
||||
from typing import (
|
||||
Union,
|
||||
List,
|
||||
Optional, Tuple, Dict, Any
|
||||
)
|
||||
import re
|
||||
import typing as t
|
||||
import orjson
|
||||
import bleach
|
||||
import tornado.web
|
||||
|
||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||
from app.classes.models.users import ApiKeys
|
||||
from app.classes.shared.authentication import authentication
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_controller import Controller
|
||||
from app.classes.shared.helpers import helper
|
||||
|
||||
try:
|
||||
import tornado.web
|
||||
import bleach
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.shared.translation import Translation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
bearer_pattern = re.compile(r"^Bearer ", flags=re.IGNORECASE)
|
||||
|
||||
|
||||
class BaseHandler(tornado.web.RequestHandler):
|
||||
def set_default_headers(self) -> None:
|
||||
"""
|
||||
Fix CORS
|
||||
"""
|
||||
self.set_header("Access-Control-Allow-Origin", "*")
|
||||
self.set_header(
|
||||
"Access-Control-Allow-Headers",
|
||||
"Content-Type, x-requested-with, Authorization",
|
||||
)
|
||||
self.set_header(
|
||||
"Access-Control-Allow-Methods", "POST, GET, PUT, DELETE, OPTIONS"
|
||||
)
|
||||
|
||||
def options(self, *_, **__):
|
||||
"""
|
||||
Fix CORS
|
||||
"""
|
||||
# no body
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
nobleach = {bool, type(None)}
|
||||
redactables = ("pass", "api")
|
||||
|
||||
helper: Helpers
|
||||
controller: Controller
|
||||
translator: Translation
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
def initialize(self, controller: Controller = None, tasks_manager=None, translator=None):
|
||||
def initialize(
|
||||
self, helper=None, controller=None, tasks_manager=None, translator=None
|
||||
):
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
self.tasks_manager = tasks_manager
|
||||
self.translator = translator
|
||||
|
||||
def get_remote_ip(self):
|
||||
remote_ip = self.request.headers.get("X-Real-IP") or \
|
||||
self.request.headers.get("X-Forwarded-For") or \
|
||||
self.request.remote_ip
|
||||
remote_ip = (
|
||||
self.request.headers.get("X-Real-IP")
|
||||
or self.request.headers.get("X-Forwarded-For")
|
||||
or self.request.remote_ip
|
||||
)
|
||||
return remote_ip
|
||||
|
||||
current_user: Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]
|
||||
def get_current_user(self) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
||||
return authentication.check(self.get_cookie("token"))
|
||||
current_user: t.Tuple[t.Optional[ApiKeys], t.Dict[str, t.Any], t.Dict[str, t.Any]]
|
||||
"""
|
||||
A variable that contains the current user's data. Please see
|
||||
Please only use this with routes using the `@tornado.web.authenticated` decorator.
|
||||
"""
|
||||
|
||||
def get_current_user(
|
||||
self,
|
||||
) -> t.Optional[
|
||||
t.Tuple[t.Optional[ApiKeys], t.Dict[str, t.Any], t.Dict[str, t.Any]]
|
||||
]:
|
||||
"""
|
||||
Get the token's API key, the token's payload and user data.
|
||||
|
||||
Returns:
|
||||
t.Optional[ApiKeys]: The API key of the token.
|
||||
t.Dict[str, t.Any]: The token's payload.
|
||||
t.Dict[str, t.Any]: The user's data from the database.
|
||||
"""
|
||||
return self.controller.authentication.check(self.get_cookie("token"))
|
||||
|
||||
def autobleach(self, name, text):
|
||||
for r in self.redactables:
|
||||
@ -54,15 +97,17 @@ class BaseHandler(tornado.web.RequestHandler):
|
||||
return bleach.clean(text)
|
||||
|
||||
def get_argument(
|
||||
self,
|
||||
name: str,
|
||||
default: Union[None, str, tornado.web._ArgDefaultMarker] = tornado.web._ARG_DEFAULT,
|
||||
strip: bool = True,
|
||||
) -> Optional[str]:
|
||||
self,
|
||||
name: str,
|
||||
default: t.Union[
|
||||
None, str, tornado.web._ArgDefaultMarker
|
||||
] = tornado.web._ARG_DEFAULT,
|
||||
strip: bool = True,
|
||||
) -> t.Optional[str]:
|
||||
arg = self._get_argument(name, default, self.request.arguments, strip)
|
||||
return self.autobleach(name, arg)
|
||||
|
||||
def get_arguments(self, name: str, strip: bool = True) -> List[str]:
|
||||
def get_arguments(self, name: str, strip: bool = True) -> t.List[str]:
|
||||
if not isinstance(strip, bool):
|
||||
raise AssertionError
|
||||
args = self._get_arguments(name, self.request.arguments, strip)
|
||||
@ -70,3 +115,127 @@ class BaseHandler(tornado.web.RequestHandler):
|
||||
for arg in args:
|
||||
args_ret += self.autobleach(name, arg)
|
||||
return args_ret
|
||||
|
||||
def access_denied(self, user: t.Optional[str], reason: t.Optional[str]):
|
||||
ip = self.get_remote_ip()
|
||||
route = self.request.path
|
||||
if user is not None:
|
||||
user_data = f"User {user} from IP {ip}"
|
||||
else:
|
||||
user_data = f"An unknown user from IP {ip}"
|
||||
if reason:
|
||||
ending = f"to the API route {route} because {reason}"
|
||||
else:
|
||||
ending = f"to the API route {route}"
|
||||
logger.info(f"{user_data} was denied access {ending}")
|
||||
self.finish_json(
|
||||
403,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "ACCESS_DENIED",
|
||||
"info": "You were denied access to the requested resource",
|
||||
},
|
||||
)
|
||||
|
||||
def _auth_get_api_token(self) -> t.Optional[str]:
|
||||
"""Get an API token from the request
|
||||
|
||||
The API token is searched in the following order:
|
||||
1. The `token` query parameter
|
||||
2. The `Authorization` header
|
||||
3. The `token` cookie
|
||||
|
||||
Returns:
|
||||
t.Optional[str]: The API token or None if no token was found.
|
||||
"""
|
||||
logger.debug("Searching for specified token")
|
||||
api_token = self.get_query_argument("token", None)
|
||||
if api_token is None and self.request.headers.get("Authorization"):
|
||||
api_token = bearer_pattern.sub(
|
||||
"", self.request.headers.get("Authorization")
|
||||
)
|
||||
elif api_token is None:
|
||||
api_token = self.get_cookie("token")
|
||||
return api_token
|
||||
|
||||
def authenticate_user(
|
||||
self,
|
||||
) -> t.Optional[
|
||||
t.Tuple[
|
||||
t.List,
|
||||
t.List[EnumPermissionsCrafty],
|
||||
t.List[str],
|
||||
bool,
|
||||
t.Dict[str, t.Any],
|
||||
]
|
||||
]:
|
||||
try:
|
||||
api_key, _token_data, user = self.controller.authentication.check_err(
|
||||
self._auth_get_api_token()
|
||||
)
|
||||
|
||||
superuser = user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
exec_user_role = set()
|
||||
if superuser:
|
||||
authorized_servers = self.controller.servers.get_all_defined_servers()
|
||||
exec_user_role.add("Super User")
|
||||
exec_user_crafty_permissions = (
|
||||
self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||
)
|
||||
|
||||
else:
|
||||
if api_key is not None:
|
||||
exec_user_crafty_permissions = (
|
||||
self.controller.crafty_perms.get_api_key_permissions_list(
|
||||
api_key
|
||||
)
|
||||
)
|
||||
else:
|
||||
exec_user_crafty_permissions = (
|
||||
self.controller.crafty_perms.get_crafty_permissions_list(
|
||||
user["user_id"]
|
||||
)
|
||||
)
|
||||
logger.debug(user["roles"])
|
||||
for r in user["roles"]:
|
||||
role = self.controller.roles.get_role(r)
|
||||
exec_user_role.add(role["role_name"])
|
||||
authorized_servers = self.controller.servers.get_authorized_servers(
|
||||
user["user_id"] # TODO: API key authorized servers?
|
||||
)
|
||||
|
||||
logger.debug("Checking results")
|
||||
if user:
|
||||
return (
|
||||
authorized_servers,
|
||||
exec_user_crafty_permissions,
|
||||
exec_user_role,
|
||||
superuser,
|
||||
user,
|
||||
)
|
||||
else:
|
||||
logging.debug("Auth unsuccessful")
|
||||
self.access_denied(None, "the user provided an invalid token")
|
||||
return None
|
||||
except Exception as auth_exception:
|
||||
logger.debug(
|
||||
"An error occured while authenticating an API user:",
|
||||
exc_info=auth_exception,
|
||||
)
|
||||
self.finish_json(
|
||||
403,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "ACCESS_DENIED",
|
||||
"info": "An error occured while authenticating the user",
|
||||
},
|
||||
)
|
||||
return None
|
||||
|
||||
def finish_json(self, status: int, data: t.Dict[str, t.Any]):
|
||||
self.set_status(status)
|
||||
self.set_header("Content-Type", "application/json")
|
||||
self.finish(orjson.dumps(data))
|
||||
|
@ -4,11 +4,11 @@ from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DefaultHandler(BaseHandler):
|
||||
|
||||
# Override prepare() instead of get() to cover all possible HTTP methods.
|
||||
# pylint: disable=arguments-differ
|
||||
def prepare(self, page=None):
|
||||
def prepare(self, page=None): # pylint: disable=arguments-differ
|
||||
if page is not None:
|
||||
self.set_status(404)
|
||||
self.render(
|
||||
@ -18,5 +18,5 @@ class DefaultHandler(BaseHandler):
|
||||
else:
|
||||
self.redirect(
|
||||
"/public/login",
|
||||
#translate=self.translator.translate,
|
||||
# translate=self.translator.translate,
|
||||
)
|
||||
|
@ -1,24 +1,19 @@
|
||||
import os
|
||||
import logging
|
||||
import bleach
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
|
||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.file_helpers import file_helper
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.file_helpers import FileHelpers
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import bleach
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class FileHandler(BaseHandler):
|
||||
|
||||
class FileHandler(BaseHandler):
|
||||
def render_page(self, template, page_data):
|
||||
self.render(
|
||||
template,
|
||||
@ -29,370 +24,480 @@ class FileHandler(BaseHandler):
|
||||
@tornado.web.authenticated
|
||||
def get(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
|
||||
if page == "get_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
file_path = helper.get_os_understandable_path(self.get_argument('file_path', None))
|
||||
file_path = Helpers.get_os_understandable_path(
|
||||
self.get_argument("file_path", None)
|
||||
)
|
||||
|
||||
if not self.check_server_id(server_id, 'get_file'):
|
||||
if not self.check_server_id(server_id, "get_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in get_file file file ajax call ({file_path})")
|
||||
console.warning(f"Invalid path in get_file file file ajax call ({file_path})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
file_path,
|
||||
) or not Helpers.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(
|
||||
f"Invalid path in get_file file file ajax call ({file_path})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid path in get_file file file ajax call ({file_path})"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
error = None
|
||||
|
||||
try:
|
||||
with open(file_path, encoding='utf-8') as file:
|
||||
with open(file_path, encoding="utf-8") as file:
|
||||
file_contents = file.read()
|
||||
except UnicodeDecodeError:
|
||||
file_contents = ''
|
||||
error = 'UnicodeDecodeError'
|
||||
file_contents = ""
|
||||
error = "UnicodeDecodeError"
|
||||
|
||||
self.write({
|
||||
'content': file_contents,
|
||||
'error': error
|
||||
})
|
||||
self.write({"content": file_contents, "error": error})
|
||||
self.finish()
|
||||
|
||||
elif page == "get_tree":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
path = self.get_argument('path', None)
|
||||
path = self.get_argument("path", None)
|
||||
|
||||
if not self.check_server_id(server_id, 'get_tree'):
|
||||
if not self.check_server_id(server_id, "get_tree"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||
helper.generate_tree(path))
|
||||
if Helpers.validate_traversal(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||
):
|
||||
self.write(
|
||||
Helpers.get_os_understandable_path(path)
|
||||
+ "\n"
|
||||
+ Helpers.generate_tree(path)
|
||||
)
|
||||
self.finish()
|
||||
|
||||
elif page == "get_dir":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
path = self.get_argument('path', None)
|
||||
path = self.get_argument("path", None)
|
||||
|
||||
if not self.check_server_id(server_id, 'get_tree'):
|
||||
if not self.check_server_id(server_id, "get_tree"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||
helper.generate_dir(path))
|
||||
if Helpers.validate_traversal(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||
):
|
||||
self.write(
|
||||
Helpers.get_os_understandable_path(path)
|
||||
+ "\n"
|
||||
+ Helpers.generate_dir(path)
|
||||
)
|
||||
self.finish()
|
||||
|
||||
@tornado.web.authenticated
|
||||
def post(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
|
||||
if page == "create_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
file_parent = helper.get_os_understandable_path(self.get_body_argument('file_parent', default=None, strip=True))
|
||||
file_name = self.get_body_argument('file_name', default=None, strip=True)
|
||||
file_parent = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("file_parent", default=None, strip=True)
|
||||
)
|
||||
file_name = self.get_body_argument("file_name", default=None, strip=True)
|
||||
file_path = os.path.join(file_parent, file_name)
|
||||
|
||||
if not self.check_server_id(server_id, 'create_file'):
|
||||
if not self.check_server_id(server_id, "create_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path) \
|
||||
or helper.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in create_file file ajax call ({file_path})")
|
||||
console.warning(f"Invalid path in create_file file ajax call ({file_path})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
file_path,
|
||||
) or Helpers.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(
|
||||
f"Invalid path in create_file file ajax call ({file_path})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid path in create_file file ajax call ({file_path})"
|
||||
)
|
||||
return
|
||||
|
||||
# Create the file by opening it
|
||||
with open(file_path, 'w', encoding='utf-8') as file_object:
|
||||
with open(file_path, "w", encoding="utf-8") as file_object:
|
||||
file_object.close()
|
||||
|
||||
elif page == "create_dir":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
dir_parent = helper.get_os_understandable_path(self.get_body_argument('dir_parent', default=None, strip=True))
|
||||
dir_name = self.get_body_argument('dir_name', default=None, strip=True)
|
||||
dir_parent = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("dir_parent", default=None, strip=True)
|
||||
)
|
||||
dir_name = self.get_body_argument("dir_name", default=None, strip=True)
|
||||
dir_path = os.path.join(dir_parent, dir_name)
|
||||
|
||||
if not self.check_server_id(server_id, 'create_dir'):
|
||||
if not self.check_server_id(server_id, "create_dir"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), dir_path) \
|
||||
or helper.check_path_exists(os.path.abspath(dir_path)):
|
||||
logger.warning(f"Invalid path in create_dir file ajax call ({dir_path})")
|
||||
console.warning(f"Invalid path in create_dir file ajax call ({dir_path})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
dir_path,
|
||||
) or Helpers.check_path_exists(os.path.abspath(dir_path)):
|
||||
logger.warning(
|
||||
f"Invalid path in create_dir file ajax call ({dir_path})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid path in create_dir file ajax call ({dir_path})"
|
||||
)
|
||||
return
|
||||
# Create the directory
|
||||
os.mkdir(dir_path)
|
||||
|
||||
elif page == "unzip_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
path = helper.get_os_understandable_path(self.get_argument('path', None))
|
||||
helper.unzipFile(path)
|
||||
path = Helpers.get_os_understandable_path(self.get_argument("path", None))
|
||||
Helpers.unzip_file(path)
|
||||
self.redirect(f"/panel/server_detail?id={server_id}&subpage=files")
|
||||
return
|
||||
|
||||
|
||||
@tornado.web.authenticated
|
||||
def delete(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
if page == "del_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||
file_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("file_path", default=None, strip=True)
|
||||
)
|
||||
|
||||
console.warning(f"Delete {file_path} for server {server_id}")
|
||||
Console.warning(f"Delete {file_path} for server {server_id}")
|
||||
|
||||
if not self.check_server_id(server_id, 'del_file'):
|
||||
return
|
||||
else: server_id = bleach.clean(server_id)
|
||||
|
||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
||||
console.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
||||
return
|
||||
|
||||
# Delete the file
|
||||
file_helper.del_file(file_path)
|
||||
|
||||
elif page == "del_dir":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
dir_path = helper.get_os_understandable_path(self.get_body_argument('dir_path', default=None, strip=True))
|
||||
|
||||
console.warning(f"Delete {dir_path} for server {server_id}")
|
||||
|
||||
if not self.check_server_id(server_id, 'del_dir'):
|
||||
if not self.check_server_id(server_id, "del_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||
if not helper.in_path(helper.get_os_understandable_path(server_info['path']), dir_path) \
|
||||
or not helper.check_path_exists(os.path.abspath(dir_path)):
|
||||
if not (
|
||||
Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(server_info["path"]), file_path
|
||||
)
|
||||
or Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(server_info["backup_path"]),
|
||||
file_path,
|
||||
)
|
||||
) or not Helpers.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
||||
Console.warning(
|
||||
f"Invalid path in del_file file ajax call ({file_path})"
|
||||
)
|
||||
return
|
||||
|
||||
# Delete the file
|
||||
FileHelpers.del_file(file_path)
|
||||
|
||||
elif page == "del_dir":
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
dir_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("dir_path", default=None, strip=True)
|
||||
)
|
||||
|
||||
Console.warning(f"Delete {dir_path} for server {server_id}")
|
||||
|
||||
if not self.check_server_id(server_id, "del_dir"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(server_info["path"]), dir_path
|
||||
) or not Helpers.check_path_exists(os.path.abspath(dir_path)):
|
||||
logger.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
||||
console.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
||||
Console.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
||||
return
|
||||
|
||||
# Delete the directory
|
||||
# os.rmdir(dir_path) # Would only remove empty directories
|
||||
if helper.validate_traversal(helper.get_os_understandable_path(server_info['path']), dir_path):
|
||||
if Helpers.validate_traversal(
|
||||
Helpers.get_os_understandable_path(server_info["path"]), dir_path
|
||||
):
|
||||
# Removes also when there are contents
|
||||
file_helper.del_dirs(dir_path)
|
||||
FileHelpers.del_dirs(dir_path)
|
||||
|
||||
@tornado.web.authenticated
|
||||
def put(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
if page == "save_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
file_contents = self.get_body_argument('file_contents', default=None, strip=True)
|
||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||
file_contents = self.get_body_argument(
|
||||
"file_contents", default=None, strip=True
|
||||
)
|
||||
file_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("file_path", default=None, strip=True)
|
||||
)
|
||||
|
||||
if not self.check_server_id(server_id, 'save_file'):
|
||||
if not self.check_server_id(server_id, "save_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(f"Invalid path in save_file file ajax call ({file_path})")
|
||||
console.warning(f"Invalid path in save_file file ajax call ({file_path})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
file_path,
|
||||
) or not Helpers.check_file_exists(os.path.abspath(file_path)):
|
||||
logger.warning(
|
||||
f"Invalid path in save_file file ajax call ({file_path})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid path in save_file file ajax call ({file_path})"
|
||||
)
|
||||
return
|
||||
|
||||
# Open the file in write mode and store the content in file_object
|
||||
with open(file_path, 'w', encoding='utf-8') as file_object:
|
||||
with open(file_path, "w", encoding="utf-8") as file_object:
|
||||
file_object.write(file_contents)
|
||||
|
||||
elif page == "rename_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
item_path = helper.get_os_understandable_path(self.get_body_argument('item_path', default=None, strip=True))
|
||||
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
||||
item_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("item_path", default=None, strip=True)
|
||||
)
|
||||
new_item_name = self.get_body_argument(
|
||||
"new_item_name", default=None, strip=True
|
||||
)
|
||||
|
||||
if not self.check_server_id(server_id, 'rename_file'):
|
||||
if not self.check_server_id(server_id, "rename_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if item_path is None or new_item_name is None:
|
||||
logger.warning("Invalid path(s) in rename_file file ajax call")
|
||||
console.warning("Invalid path(s) in rename_file file ajax call")
|
||||
Console.warning("Invalid path(s) in rename_file file ajax call")
|
||||
return
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
||||
or not helper.check_path_exists(os.path.abspath(item_path)):
|
||||
logger.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
||||
console.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
item_path,
|
||||
) or not Helpers.check_path_exists(os.path.abspath(item_path)):
|
||||
logger.warning(
|
||||
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
|
||||
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']),
|
||||
new_item_path) \
|
||||
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
||||
logger.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
||||
console.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
new_item_path,
|
||||
) or Helpers.check_path_exists(os.path.abspath(new_item_path)):
|
||||
logger.warning(
|
||||
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
|
||||
# RENAME
|
||||
os.rename(item_path, new_item_path)
|
||||
|
||||
|
||||
@tornado.web.authenticated
|
||||
def patch(self, page):
|
||||
api_key, _, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
server_id = self.get_argument('id', None)
|
||||
server_id = self.get_argument("id", None)
|
||||
permissions = {
|
||||
'Commands': Enum_Permissions_Server.Commands,
|
||||
'Terminal': Enum_Permissions_Server.Terminal,
|
||||
'Logs': Enum_Permissions_Server.Logs,
|
||||
'Schedule': Enum_Permissions_Server.Schedule,
|
||||
'Backup': Enum_Permissions_Server.Backup,
|
||||
'Files': Enum_Permissions_Server.Files,
|
||||
'Config': Enum_Permissions_Server.Config,
|
||||
'Players': Enum_Permissions_Server.Players,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||
"Commands": EnumPermissionsServer.COMMANDS,
|
||||
"Terminal": EnumPermissionsServer.TERMINAL,
|
||||
"Logs": EnumPermissionsServer.LOGS,
|
||||
"Schedule": EnumPermissionsServer.SCHEDULE,
|
||||
"Backup": EnumPermissionsServer.BACKUP,
|
||||
"Files": EnumPermissionsServer.FILES,
|
||||
"Config": EnumPermissionsServer.CONFIG,
|
||||
"Players": EnumPermissionsServer.PLAYERS,
|
||||
}
|
||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
if page == "rename_file":
|
||||
if not permissions['Files'] in user_perms:
|
||||
if not permissions["Files"] in user_perms:
|
||||
if not superuser:
|
||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||
return
|
||||
item_path = helper.get_os_understandable_path(self.get_body_argument('item_path', default=None, strip=True))
|
||||
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
||||
item_path = Helpers.get_os_understandable_path(
|
||||
self.get_body_argument("item_path", default=None, strip=True)
|
||||
)
|
||||
new_item_name = self.get_body_argument(
|
||||
"new_item_name", default=None, strip=True
|
||||
)
|
||||
|
||||
if not self.check_server_id(server_id, 'rename_file'):
|
||||
if not self.check_server_id(server_id, "rename_file"):
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
if item_path is None or new_item_name is None:
|
||||
logger.warning("Invalid path(s) in rename_file file ajax call")
|
||||
console.warning("Invalid path(s) in rename_file file ajax call")
|
||||
Console.warning("Invalid path(s) in rename_file file ajax call")
|
||||
return
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
||||
or not helper.check_path_exists(os.path.abspath(item_path)):
|
||||
logger.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
||||
console.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
item_path,
|
||||
) or not Helpers.check_path_exists(os.path.abspath(item_path)):
|
||||
logger.warning(
|
||||
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
|
||||
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']),
|
||||
new_item_path) \
|
||||
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
||||
logger.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
||||
console.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
new_item_path,
|
||||
) or Helpers.check_path_exists(os.path.abspath(new_item_path)):
|
||||
logger.warning(
|
||||
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
|
||||
# RENAME
|
||||
@ -400,15 +505,23 @@ class FileHandler(BaseHandler):
|
||||
|
||||
def check_server_id(self, server_id, page_name):
|
||||
if server_id is None:
|
||||
logger.warning(f"Server ID not defined in {page_name} file ajax call ({server_id})")
|
||||
console.warning(f"Server ID not defined in {page_name} file ajax call ({server_id})")
|
||||
logger.warning(
|
||||
f"Server ID not defined in {page_name} file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Server ID not defined in {page_name} file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
else:
|
||||
server_id = bleach.clean(server_id)
|
||||
|
||||
# does this server id exist?
|
||||
if not self.controller.servers.server_id_exists(server_id):
|
||||
logger.warning(f"Server ID not found in {page_name} file ajax call ({server_id})")
|
||||
console.warning(f"Server ID not found in {page_name} file ajax call ({server_id})")
|
||||
logger.warning(
|
||||
f"Server ID not found in {page_name} file ajax call ({server_id})"
|
||||
)
|
||||
Console.warning(
|
||||
f"Server ID not found in {page_name} file ajax call ({server_id})"
|
||||
)
|
||||
return
|
||||
return True
|
||||
|
@ -1,32 +1,27 @@
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import requests
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HTTPHandler(BaseHandler):
|
||||
def get(self):
|
||||
url = str(self.request.host)
|
||||
port = 443
|
||||
url_list = url.split(":")
|
||||
if url_list[0] != "":
|
||||
url = 'https://' + url_list[0]
|
||||
url = "https://" + url_list[0]
|
||||
else:
|
||||
url = 'https://' + url
|
||||
db_port = helper.get_setting('https_port')
|
||||
url = "https://" + url
|
||||
db_port = self.helper.get_setting("https_port")
|
||||
try:
|
||||
resp = requests.get(url + ":" + str(port))
|
||||
resp.raise_for_status()
|
||||
except Exception:
|
||||
port = db_port
|
||||
self.redirect(url+":"+str(port))
|
||||
self.redirect(url + ":" + str(port))
|
||||
|
||||
|
||||
class HTTPHandlerPage(BaseHandler):
|
||||
@ -35,13 +30,13 @@ class HTTPHandlerPage(BaseHandler):
|
||||
port = 443
|
||||
url_list = url.split(":")
|
||||
if url_list[0] != "":
|
||||
url = 'https://' + url_list[0]
|
||||
url = "https://" + url_list[0]
|
||||
else:
|
||||
url = 'https://' + url
|
||||
db_port = helper.get_setting('https_port')
|
||||
url = "https://" + url
|
||||
db_port = self.helper.get_setting("https_port")
|
||||
try:
|
||||
resp = requests.get(url + ":" + str(port))
|
||||
resp.raise_for_status()
|
||||
except Exception:
|
||||
port = db_port
|
||||
self.redirect(url+":"+str(port))
|
||||
self.redirect(url + ":" + str(port))
|
||||
|
@ -1,26 +1,28 @@
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HTTPHandlerPage(BaseHandler):
|
||||
def get(self):
|
||||
url = self.request.full_url
|
||||
port = 443
|
||||
if url[len(url)-1] == '/':
|
||||
url = url.strip(url[len(url)-1])
|
||||
url_list = url.split('/')
|
||||
if url[len(url) - 1] == "/":
|
||||
url = url.strip(url[len(url) - 1])
|
||||
url_list = url.split("/")
|
||||
if url_list[0] != "":
|
||||
primary_url = url_list[0] + ":"+str(port)+"/"
|
||||
backup_url = url_list[0] + ":" +str(helper.get_setting("https_port")) +"/"
|
||||
for i in range(len(url_list)-1):
|
||||
primary_url += url_list[i+1]
|
||||
backup_url += url_list[i+1]
|
||||
primary_url = url_list[0] + ":" + str(port) + "/"
|
||||
backup_url = (
|
||||
url_list[0] + ":" + str(self.helper.get_setting("https_port")) + "/"
|
||||
)
|
||||
for i in range(len(url_list) - 1):
|
||||
primary_url += url_list[i + 1]
|
||||
backup_url += url_list[i + 1]
|
||||
else:
|
||||
primary_url = url + str(port)
|
||||
backup_url = url + str(helper.get_setting('https_port'))
|
||||
backup_url = url + str(self.helper.get_setting("https_port"))
|
||||
|
||||
try:
|
||||
resp = requests.get(primary_url)
|
||||
@ -28,4 +30,4 @@ class HTTPHandlerPage(BaseHandler):
|
||||
url = primary_url
|
||||
except Exception:
|
||||
url = backup_url
|
||||
self.redirect('https://'+url+':'+ str(port))
|
||||
self.redirect("https://" + url + ":" + str(port))
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,44 +1,51 @@
|
||||
import logging
|
||||
import bleach
|
||||
|
||||
from app.classes.models.users import Users
|
||||
from app.classes.shared.authentication import authentication
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.main_models import fn
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.models.users import HelperUsers
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import bleach
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PublicHandler(BaseHandler):
|
||||
|
||||
class PublicHandler(BaseHandler):
|
||||
def set_current_user(self, user_id: str = None):
|
||||
|
||||
expire_days = helper.get_setting('cookie_expire')
|
||||
expire_days = self.helper.get_setting("cookie_expire")
|
||||
|
||||
# if helper comes back with false
|
||||
if not expire_days:
|
||||
expire_days = "5"
|
||||
|
||||
if user_id is not None:
|
||||
self.set_cookie("token", authentication.generate(user_id), expires_days=int(expire_days))
|
||||
self.set_cookie(
|
||||
"token",
|
||||
self.controller.authentication.generate(user_id),
|
||||
expires_days=int(expire_days),
|
||||
)
|
||||
else:
|
||||
self.clear_cookie("user")
|
||||
self.clear_cookie("token")
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
|
||||
def get(self, page=None):
|
||||
|
||||
error = bleach.clean(self.get_argument('error', "Invalid Login!"))
|
||||
error_msg = bleach.clean(self.get_argument('error_msg', ''))
|
||||
|
||||
page_data = {
|
||||
'version': helper.get_version_string(),
|
||||
'error': error, 'lang': helper.get_setting('language'),
|
||||
'lang_page': helper.getLangPage(helper.get_setting('language'))
|
||||
}
|
||||
error = bleach.clean(self.get_argument("error", "Invalid Login!"))
|
||||
error_msg = bleach.clean(self.get_argument("error_msg", ""))
|
||||
try:
|
||||
page_data = {
|
||||
"version": self.helper.get_version_string(),
|
||||
"error": error,
|
||||
"lang": self.helper.get_setting("language"),
|
||||
"lang_page": self.helper.get_lang_page(
|
||||
self.helper.get_setting("language")
|
||||
),
|
||||
"query": "",
|
||||
}
|
||||
except:
|
||||
self.redirect("/public/login.html")
|
||||
if self.request.query:
|
||||
page_data["query"] = self.request.query
|
||||
|
||||
# sensible defaults
|
||||
template = "public/404.html"
|
||||
@ -53,75 +60,141 @@ class PublicHandler(BaseHandler):
|
||||
template = "public/error.html"
|
||||
|
||||
elif page == "logout":
|
||||
self.clear_cookie("user")
|
||||
self.clear_cookie("user_data")
|
||||
self.redirect('/public/login')
|
||||
self.clear_cookie("token")
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
self.redirect("/public/login")
|
||||
return
|
||||
|
||||
# if we have no page, let's go to login
|
||||
else:
|
||||
self.redirect('/public/login')
|
||||
if self.request.query:
|
||||
self.redirect("/public/login?" + self.request.query)
|
||||
else:
|
||||
self.redirect("/public/login")
|
||||
return
|
||||
|
||||
self.render(
|
||||
template,
|
||||
data=page_data,
|
||||
translate=self.translator.translate,
|
||||
error_msg = error_msg
|
||||
error_msg=error_msg,
|
||||
)
|
||||
|
||||
def post(self, page=None):
|
||||
|
||||
if page == 'login':
|
||||
next_page = "/public/login"
|
||||
error = bleach.clean(self.get_argument("error", "Invalid Login!"))
|
||||
error_msg = bleach.clean(self.get_argument("error_msg", ""))
|
||||
|
||||
entered_username = bleach.clean(self.get_argument('username'))
|
||||
entered_password = bleach.clean(self.get_argument('password'))
|
||||
page_data = {
|
||||
"version": self.helper.get_version_string(),
|
||||
"error": error,
|
||||
"lang": self.helper.get_setting("language"),
|
||||
"lang_page": self.helper.get_lang_page(self.helper.get_setting("language")),
|
||||
"query": "",
|
||||
}
|
||||
if self.request.query:
|
||||
page_data["query"] = self.request.query
|
||||
|
||||
if page == "login":
|
||||
|
||||
next_page = "/public/login"
|
||||
if self.request.query:
|
||||
next_page = "/public/login?" + self.request.query
|
||||
|
||||
entered_username = bleach.clean(self.get_argument("username"))
|
||||
entered_password = bleach.clean(self.get_argument("password"))
|
||||
|
||||
# pylint: disable=no-member
|
||||
user_data = Users.get_or_none(fn.Lower(Users.username) == entered_username.lower())
|
||||
|
||||
try:
|
||||
user_id = HelperUsers.get_user_id_by_name(entered_username.lower())
|
||||
user_data = HelperUsers.get_user_model(user_id)
|
||||
except:
|
||||
error_msg = "Incorrect username or password. Please try again."
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
self.clear_cookie("token")
|
||||
if self.request.query:
|
||||
self.redirect(
|
||||
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||
)
|
||||
else:
|
||||
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||
return
|
||||
|
||||
# if we don't have a user
|
||||
if not user_data:
|
||||
error_msg = "Incorrect username or password. Please try again."
|
||||
self.clear_cookie("user")
|
||||
self.clear_cookie("user_data")
|
||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
self.clear_cookie("token")
|
||||
if self.request.query:
|
||||
self.redirect(
|
||||
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||
)
|
||||
else:
|
||||
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||
return
|
||||
|
||||
# if they are disabled
|
||||
if not user_data.enabled:
|
||||
error_msg = "User account disabled. Please contact your system administrator for more info."
|
||||
self.clear_cookie("user")
|
||||
self.clear_cookie("user_data")
|
||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||
error_msg = (
|
||||
"User account disabled. Please contact "
|
||||
"your system administrator for more info."
|
||||
)
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
self.clear_cookie("token")
|
||||
if self.request.query:
|
||||
self.redirect(
|
||||
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||
)
|
||||
else:
|
||||
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||
return
|
||||
|
||||
login_result = helper.verify_pass(entered_password, user_data.password)
|
||||
login_result = self.helper.verify_pass(entered_password, user_data.password)
|
||||
|
||||
# Valid Login
|
||||
if login_result:
|
||||
self.set_current_user(user_data.user_id)
|
||||
logger.info(f"User: {user_data} Logged in from IP: {self.get_remote_ip()}")
|
||||
logger.info(
|
||||
f"User: {user_data} Logged in from IP: {self.get_remote_ip()}"
|
||||
)
|
||||
|
||||
# record this login
|
||||
q = Users.select().where(Users.username == entered_username.lower()).get()
|
||||
q.last_ip = self.get_remote_ip()
|
||||
q.last_login = helper.get_time_as_string()
|
||||
q.save()
|
||||
user_data.last_ip = self.get_remote_ip()
|
||||
user_data.last_login = Helpers.get_time_as_string()
|
||||
user_data.save()
|
||||
|
||||
# log this login
|
||||
self.controller.management.add_to_audit_log(user_data.user_id, "Logged in", 0, self.get_remote_ip())
|
||||
self.controller.management.add_to_audit_log(
|
||||
user_data.user_id, "Logged in", 0, self.get_remote_ip()
|
||||
)
|
||||
|
||||
if self.request.query_arguments.get("next"):
|
||||
next_page = self.request.query_arguments.get("next")[0].decode()
|
||||
else:
|
||||
next_page = "/panel/dashboard"
|
||||
|
||||
next_page = "/panel/dashboard"
|
||||
self.redirect(next_page)
|
||||
else:
|
||||
self.clear_cookie("user")
|
||||
self.clear_cookie("user_data")
|
||||
# self.clear_cookie("user")
|
||||
# self.clear_cookie("user_data")
|
||||
self.clear_cookie("token")
|
||||
error_msg = "Inncorrect username or password. Please try again."
|
||||
# log this failed login attempt
|
||||
self.controller.management.add_to_audit_log(user_data.user_id, "Tried to log in", 0, self.get_remote_ip())
|
||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||
self.controller.management.add_to_audit_log(
|
||||
user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
|
||||
)
|
||||
if self.request.query:
|
||||
self.redirect(
|
||||
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||
)
|
||||
else:
|
||||
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||
else:
|
||||
self.redirect("/public/login")
|
||||
if self.request.query:
|
||||
self.redirect("/public/login?" + self.request.query)
|
||||
else:
|
||||
self.redirect("/public/login")
|
||||
|
176
app/classes/web/routes/api/api_handlers.py
Normal file
176
app/classes/web/routes/api/api_handlers.py
Normal file
@ -0,0 +1,176 @@
|
||||
from app.classes.web.routes.api.index_handler import ApiIndexHandler
|
||||
from app.classes.web.routes.api.jsonschema import (
|
||||
ApiJsonSchemaHandler,
|
||||
ApiJsonSchemaListHandler,
|
||||
)
|
||||
from app.classes.web.routes.api.not_found import ApiNotFoundHandler
|
||||
from app.classes.web.routes.api.auth.invalidate_tokens import (
|
||||
ApiAuthInvalidateTokensHandler,
|
||||
)
|
||||
from app.classes.web.routes.api.auth.login import ApiAuthLoginHandler
|
||||
from app.classes.web.routes.api.roles.index import ApiRolesIndexHandler
|
||||
from app.classes.web.routes.api.roles.role.index import ApiRolesRoleIndexHandler
|
||||
from app.classes.web.routes.api.roles.role.servers import ApiRolesRoleServersHandler
|
||||
from app.classes.web.routes.api.roles.role.users import ApiRolesRoleUsersHandler
|
||||
from app.classes.web.routes.api.servers.index import ApiServersIndexHandler
|
||||
from app.classes.web.routes.api.servers.server.action import (
|
||||
ApiServersServerActionHandler,
|
||||
)
|
||||
from app.classes.web.routes.api.servers.server.index import ApiServersServerIndexHandler
|
||||
from app.classes.web.routes.api.servers.server.logs import ApiServersServerLogsHandler
|
||||
from app.classes.web.routes.api.servers.server.public import (
|
||||
ApiServersServerPublicHandler,
|
||||
)
|
||||
from app.classes.web.routes.api.servers.server.stats import ApiServersServerStatsHandler
|
||||
from app.classes.web.routes.api.servers.server.stdin import ApiServersServerStdinHandler
|
||||
from app.classes.web.routes.api.servers.server.users import ApiServersServerUsersHandler
|
||||
from app.classes.web.routes.api.users.index import ApiUsersIndexHandler
|
||||
from app.classes.web.routes.api.users.user.index import ApiUsersUserIndexHandler
|
||||
from app.classes.web.routes.api.users.user.permissions import (
|
||||
ApiUsersUserPermissionsHandler,
|
||||
)
|
||||
from app.classes.web.routes.api.users.user.pfp import ApiUsersUserPfpHandler
|
||||
from app.classes.web.routes.api.users.user.public import ApiUsersUserPublicHandler
|
||||
|
||||
|
||||
def api_handlers(handler_args):
|
||||
return [
|
||||
# Auth routes
|
||||
(
|
||||
r"/api/v2/auth/login/?",
|
||||
ApiAuthLoginHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/auth/invalidate_tokens/?",
|
||||
ApiAuthInvalidateTokensHandler,
|
||||
handler_args,
|
||||
),
|
||||
# User routes
|
||||
(
|
||||
r"/api/v2/users/?",
|
||||
ApiUsersIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/([0-9]+)/?",
|
||||
ApiUsersUserIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/(@me)/?",
|
||||
ApiUsersUserIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/([0-9]+)/permissions/?",
|
||||
ApiUsersUserPermissionsHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/(@me)/permissions/?",
|
||||
ApiUsersUserPermissionsHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/([0-9]+)/pfp/?",
|
||||
ApiUsersUserPfpHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/(@me)/pfp/?",
|
||||
ApiUsersUserPfpHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/([0-9]+)/public/?",
|
||||
ApiUsersUserPublicHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/users/(@me)/public/?",
|
||||
ApiUsersUserPublicHandler,
|
||||
handler_args,
|
||||
),
|
||||
# Server routes
|
||||
(
|
||||
r"/api/v2/servers/?",
|
||||
ApiServersIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/?",
|
||||
ApiServersServerIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/stats/?",
|
||||
ApiServersServerStatsHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/action/([a-z_]+)/?",
|
||||
ApiServersServerActionHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/logs/?",
|
||||
ApiServersServerLogsHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/users/?",
|
||||
ApiServersServerUsersHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/public/?",
|
||||
ApiServersServerPublicHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/servers/([0-9]+)/stdin/?",
|
||||
ApiServersServerStdinHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/roles/?",
|
||||
ApiRolesIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/roles/([0-9]+)/?",
|
||||
ApiRolesRoleIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/roles/([0-9]+)/servers/?",
|
||||
ApiRolesRoleServersHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/roles/([0-9]+)/users/?",
|
||||
ApiRolesRoleUsersHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/jsonschema/?",
|
||||
ApiJsonSchemaListHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/jsonschema/([a-z0-9_]+)/?",
|
||||
ApiJsonSchemaHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/?",
|
||||
ApiIndexHandler,
|
||||
handler_args,
|
||||
),
|
||||
(
|
||||
r"/api/v2/(.*)",
|
||||
ApiNotFoundHandler,
|
||||
handler_args,
|
||||
),
|
||||
]
|
19
app/classes/web/routes/api/auth/invalidate_tokens.py
Normal file
19
app/classes/web/routes/api/auth/invalidate_tokens.py
Normal file
@ -0,0 +1,19 @@
|
||||
import datetime
|
||||
import logging
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiAuthInvalidateTokensHandler(BaseApiHandler):
|
||||
def post(self):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
|
||||
self.controller.users.raw_update_user(
|
||||
auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
|
||||
)
|
||||
|
||||
self.finish_json(200, {"status": "ok"})
|
104
app/classes/web/routes/api/auth/login.py
Normal file
104
app/classes/web/routes/api/auth/login.py
Normal file
@ -0,0 +1,104 @@
|
||||
import logging
|
||||
import json
|
||||
from jsonschema import validate
|
||||
from jsonschema.exceptions import ValidationError
|
||||
from app.classes.models.users import Users
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
login_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"minLength": 4,
|
||||
"pattern": "^[a-z0-9_]+$",
|
||||
},
|
||||
"password": {"type": "string", "maxLength": 20, "minLength": 4},
|
||||
},
|
||||
"required": ["username", "password"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
class ApiAuthLoginHandler(BaseApiHandler):
|
||||
def post(self):
|
||||
|
||||
try:
|
||||
data = json.loads(self.request.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, login_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
username = data["username"]
|
||||
password = data["password"]
|
||||
|
||||
# pylint: disable=no-member
|
||||
user_data = Users.get_or_none(Users.username == username)
|
||||
|
||||
if user_data is None:
|
||||
return self.finish_json(
|
||||
401,
|
||||
{"status": "error", "error": "INCORRECT_CREDENTIALS", "token": None},
|
||||
)
|
||||
|
||||
if not user_data.enabled:
|
||||
self.finish_json(
|
||||
403, {"status": "error", "error": "ACCOUNT_DISABLED", "token": None}
|
||||
)
|
||||
return
|
||||
|
||||
login_result = self.helper.verify_pass(password, user_data.password)
|
||||
|
||||
# Valid Login
|
||||
if login_result:
|
||||
logger.info(f"User: {user_data} Logged in from IP: {self.get_remote_ip()}")
|
||||
|
||||
# record this login
|
||||
query = Users.select().where(Users.username == username.lower()).get()
|
||||
query.last_ip = self.get_remote_ip()
|
||||
query.last_login = Helpers.get_time_as_string()
|
||||
query.save()
|
||||
|
||||
# log this login
|
||||
self.controller.management.add_to_audit_log(
|
||||
user_data.user_id, "logged in via the API", 0, self.get_remote_ip()
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"token": self.controller.authentication.generate(
|
||||
user_data.user_id
|
||||
),
|
||||
"user_id": str(user_data.user_id),
|
||||
},
|
||||
},
|
||||
)
|
||||
else:
|
||||
# log this failed login attempt
|
||||
self.controller.management.add_to_audit_log(
|
||||
user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
|
||||
)
|
||||
self.finish_json(
|
||||
401,
|
||||
{"status": "error", "error": "INCORRECT_CREDENTIALS"},
|
||||
)
|
2
app/classes/web/routes/api/auth/register.py
Normal file
2
app/classes/web/routes/api/auth/register.py
Normal file
@ -0,0 +1,2 @@
|
||||
# nothing here yet
|
||||
# sometime implement configurable self service account creation?
|
17
app/classes/web/routes/api/index_handler.py
Normal file
17
app/classes/web/routes/api/index_handler.py
Normal file
@ -0,0 +1,17 @@
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
WIKI_API_LINK = "https://wiki.craftycontrol.com/en/4/docs/API V2"
|
||||
|
||||
|
||||
class ApiIndexHandler(BaseApiHandler):
|
||||
def get(self):
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"version": self.controller.helper.get_version_string(),
|
||||
"message": f"Please see the API documentation at {WIKI_API_LINK}",
|
||||
},
|
||||
},
|
||||
)
|
107
app/classes/web/routes/api/jsonschema.py
Normal file
107
app/classes/web/routes/api/jsonschema.py
Normal file
@ -0,0 +1,107 @@
|
||||
import typing as t
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
from app.classes.web.routes.api.auth.login import login_schema
|
||||
from app.classes.web.routes.api.roles.role.index import modify_role_schema
|
||||
from app.classes.web.routes.api.roles.index import create_role_schema
|
||||
from app.classes.web.routes.api.servers.server.index import server_patch_schema
|
||||
from app.classes.web.routes.api.servers.index import new_server_schema
|
||||
|
||||
SCHEMA_LIST: t.Final = [
|
||||
"login",
|
||||
"modify_role",
|
||||
"create_role",
|
||||
"server_patch",
|
||||
"new_server",
|
||||
"user_patch",
|
||||
"new_user",
|
||||
]
|
||||
|
||||
|
||||
class ApiJsonSchemaListHandler(BaseApiHandler):
|
||||
def get(self):
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": SCHEMA_LIST},
|
||||
)
|
||||
|
||||
|
||||
class ApiJsonSchemaHandler(BaseApiHandler):
|
||||
def get(self, schema_name: str):
|
||||
if schema_name == "login":
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": login_schema},
|
||||
)
|
||||
elif schema_name == "modify_role":
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": modify_role_schema},
|
||||
)
|
||||
elif schema_name == "create_role":
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": create_role_schema},
|
||||
)
|
||||
elif schema_name == "server_patch":
|
||||
self.finish_json(200, server_patch_schema)
|
||||
elif schema_name == "new_server":
|
||||
self.finish_json(
|
||||
200,
|
||||
new_server_schema,
|
||||
)
|
||||
elif schema_name == "user_patch":
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
**self.controller.users.user_jsonschema_props,
|
||||
},
|
||||
"anyOf": [
|
||||
# Require at least one property
|
||||
{"required": [name]}
|
||||
for name in [
|
||||
"username",
|
||||
"password",
|
||||
"email",
|
||||
"enabled",
|
||||
"lang",
|
||||
"superuser",
|
||||
"permissions",
|
||||
"roles",
|
||||
"hints",
|
||||
]
|
||||
],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
)
|
||||
elif schema_name == "new_user":
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
**self.controller.users.user_jsonschema_props,
|
||||
},
|
||||
"required": ["username", "password"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
)
|
||||
else:
|
||||
self.finish_json(
|
||||
404,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "UNKNOWN_JSON_SCHEMA",
|
||||
"info": (
|
||||
f"Unknown JSON schema: {schema_name}."
|
||||
f" Here's a list of all the valid schema names: {SCHEMA_LIST}"
|
||||
),
|
||||
},
|
||||
)
|
18
app/classes/web/routes/api/not_found.py
Normal file
18
app/classes/web/routes/api/not_found.py
Normal file
@ -0,0 +1,18 @@
|
||||
from typing import Awaitable, Callable, Optional
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
class ApiNotFoundHandler(BaseApiHandler):
|
||||
def _not_found(self, page: str) -> None:
|
||||
self.finish_json(
|
||||
404,
|
||||
{"status": "error", "error": "API_HANDLER_NOT_FOUND", "page": page},
|
||||
)
|
||||
|
||||
head = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
get = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
post = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
delete = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
patch = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
put = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
||||
options = _not_found # type: Callable[..., Optional[Awaitable[None]]]
|
131
app/classes/web/routes/api/roles/index.py
Normal file
131
app/classes/web/routes/api/roles/index.py
Normal file
@ -0,0 +1,131 @@
|
||||
import typing as t
|
||||
from jsonschema import ValidationError, validate
|
||||
import orjson
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
create_role_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
},
|
||||
"servers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"server_id": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
},
|
||||
"permissions": {
|
||||
"type": "string",
|
||||
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
||||
},
|
||||
},
|
||||
"required": ["server_id", "permissions"],
|
||||
},
|
||||
},
|
||||
},
|
||||
"required": ["name"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
class ApiRolesIndexHandler(BaseApiHandler):
|
||||
def get(self):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
_,
|
||||
) = auth_data
|
||||
|
||||
# GET /api/v2/roles?ids=true
|
||||
get_only_ids = self.get_query_argument("ids", None) == "true"
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": self.controller.roles.get_all_role_ids()
|
||||
if get_only_ids
|
||||
else [model_to_dict(r) for r in self.controller.roles.get_all_roles()],
|
||||
},
|
||||
)
|
||||
|
||||
def post(self):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
try:
|
||||
data = orjson.loads(self.request.body)
|
||||
except orjson.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, create_role_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
role_name = data["name"]
|
||||
|
||||
# Get the servers
|
||||
servers_dict = {server["server_id"]: server for server in data["servers"]}
|
||||
server_ids = (
|
||||
(
|
||||
{server["server_id"] for server in data["servers"]}
|
||||
& set(self.controller.servers.get_all_server_ids())
|
||||
) # Only allow existing servers
|
||||
if "servers" in data
|
||||
else set()
|
||||
)
|
||||
servers: t.List[dict] = [servers_dict[server_id] for server_id in server_ids]
|
||||
|
||||
if self.controller.roles.get_roleid_by_name(role_name) is not None:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "ROLE_NAME_ALREADY_EXISTS"}
|
||||
)
|
||||
|
||||
role_id = self.controller.roles.add_role_advanced(role_name, servers)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
f"created role {role_name} (RID:{role_id})",
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": {"role_id": role_id}},
|
||||
)
|
143
app/classes/web/routes/api/roles/role/index.py
Normal file
143
app/classes/web/routes/api/roles/role/index.py
Normal file
@ -0,0 +1,143 @@
|
||||
from jsonschema import ValidationError, validate
|
||||
import orjson
|
||||
from peewee import DoesNotExist
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
modify_role_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
},
|
||||
"servers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"server_id": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
},
|
||||
"permissions": {
|
||||
"type": "string",
|
||||
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
|
||||
},
|
||||
},
|
||||
"required": ["server_id", "permissions"],
|
||||
},
|
||||
},
|
||||
},
|
||||
"anyOf": [
|
||||
{"required": ["name"]},
|
||||
{"required": ["servers"]},
|
||||
],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
class ApiRolesRoleIndexHandler(BaseApiHandler):
|
||||
def get(self, role_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
_,
|
||||
) = auth_data
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
try:
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": self.controller.roles.get_role(role_id)},
|
||||
)
|
||||
except DoesNotExist:
|
||||
self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
|
||||
|
||||
def delete(self, role_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
self.controller.roles.remove_role(role_id)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": role_id},
|
||||
)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
f"deleted role with ID {role_id}",
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
def patch(self, role_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
try:
|
||||
data = orjson.loads(self.request.body)
|
||||
except orjson.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, modify_role_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
self.controller.roles.update_role_advanced(
|
||||
role_id, data.get("role_name", None), data.get("servers", None)
|
||||
)
|
||||
except DoesNotExist:
|
||||
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
f"modified role with ID {role_id}",
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok"},
|
||||
)
|
32
app/classes/web/routes/api/roles/role/servers.py
Normal file
32
app/classes/web/routes/api/roles/role/servers.py
Normal file
@ -0,0 +1,32 @@
|
||||
from app.classes.models.server_permissions import PermissionsServers
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
class ApiRolesRoleServersHandler(BaseApiHandler):
|
||||
def get(self, role_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
_,
|
||||
) = auth_data
|
||||
|
||||
# GET /api/v2/roles/role/servers?ids=true
|
||||
get_only_ids = self.get_query_argument("ids", None) == "true"
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": PermissionsServers.get_server_ids_from_role(role_id)
|
||||
if get_only_ids
|
||||
else self.controller.roles.get_server_ids_and_perms_from_role(role_id),
|
||||
},
|
||||
)
|
36
app/classes/web/routes/api/roles/role/users.py
Normal file
36
app/classes/web/routes/api/roles/role/users.py
Normal file
@ -0,0 +1,36 @@
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
class ApiRolesRoleUsersHandler(BaseApiHandler):
|
||||
def get(self, role_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
superuser,
|
||||
_,
|
||||
) = auth_data
|
||||
|
||||
if not superuser:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
all_user_ids = self.controller.users.get_all_user_ids()
|
||||
|
||||
user_roles = {}
|
||||
for user_id in all_user_ids:
|
||||
user_roles_list = self.controller.users.get_user_roles_names(user_id)
|
||||
user_roles[user_id] = user_roles_list
|
||||
|
||||
role = self.controller.roles.get_role(role_id)
|
||||
|
||||
user_ids = []
|
||||
|
||||
for user_id in all_user_ids:
|
||||
for role_user in user_roles[user_id]:
|
||||
if role_user == role["role_name"]:
|
||||
user_ids.append(user_id)
|
||||
|
||||
self.finish_json(200, {"status": "ok", "data": user_ids})
|
695
app/classes/web/routes/api/servers/index.py
Normal file
695
app/classes/web/routes/api/servers/index.py
Normal file
@ -0,0 +1,695 @@
|
||||
import logging
|
||||
|
||||
from jsonschema import ValidationError, validate
|
||||
import orjson
|
||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
new_server_schema = {
|
||||
"definitions": {},
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Root",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"monitoring_type",
|
||||
"create_type",
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"title": "Name",
|
||||
"type": "string",
|
||||
"examples": ["My Server"],
|
||||
"minLength": 2,
|
||||
},
|
||||
"stop_command": {
|
||||
"title": "Stop command",
|
||||
"description": '"" means the default for the server creation type.',
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"examples": ["stop", "end"],
|
||||
},
|
||||
"log_location": {
|
||||
"title": "Log file",
|
||||
"description": '"" means the default for the server creation type.',
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"examples": ["./logs/latest.log", "./proxy.log.0"],
|
||||
},
|
||||
"crashdetection": {
|
||||
"title": "Crash detection",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"autostart": {
|
||||
"title": "Autostart",
|
||||
"description": "If true, the server will be started"
|
||||
+ " automatically when Crafty is launched.",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"autostart_delay": {
|
||||
"title": "Autostart delay",
|
||||
"description": "Delay in seconds before autostarting. (If enabled)",
|
||||
"type": "number",
|
||||
"default": 10,
|
||||
"minimum": 0,
|
||||
},
|
||||
"monitoring_type": {
|
||||
"title": "Server monitoring type",
|
||||
"type": "string",
|
||||
"default": "minecraft_java",
|
||||
"enum": ["minecraft_java", "minecraft_bedrock", "none"],
|
||||
# TODO: SteamCMD, RakNet, etc.
|
||||
},
|
||||
"minecraft_java_monitoring_data": {
|
||||
"title": "Minecraft Java monitoring data",
|
||||
"type": "object",
|
||||
"required": ["host", "port"],
|
||||
"properties": {
|
||||
"host": {
|
||||
"title": "Host",
|
||||
"type": "string",
|
||||
"default": "127.0.0.1",
|
||||
"examples": ["127.0.0.1"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"port": {
|
||||
"title": "Port",
|
||||
"type": "integer",
|
||||
"examples": [25565],
|
||||
"default": 25565,
|
||||
"minimum": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"minecraft_bedrock_monitoring_data": {
|
||||
"title": "Minecraft Bedrock monitoring data",
|
||||
"type": "object",
|
||||
"required": ["host", "port"],
|
||||
"properties": {
|
||||
"host": {
|
||||
"title": "Host",
|
||||
"type": "string",
|
||||
"default": "127.0.0.1",
|
||||
"examples": ["127.0.0.1"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"port": {
|
||||
"title": "Port",
|
||||
"type": "integer",
|
||||
"examples": [19132],
|
||||
"default": 19132,
|
||||
"minimum": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
"create_type": {
|
||||
# This is only used for creation, this is not saved in the db
|
||||
"title": "Server creation type",
|
||||
"type": "string",
|
||||
"default": "minecraft_java",
|
||||
"enum": ["minecraft_java", "minecraft_bedrock", "custom"],
|
||||
},
|
||||
"minecraft_java_create_data": {
|
||||
"title": "Java creation data",
|
||||
"type": "object",
|
||||
"required": ["create_type"],
|
||||
"properties": {
|
||||
"create_type": {
|
||||
"title": "Creation type",
|
||||
"type": "string",
|
||||
"default": "download_jar",
|
||||
"enum": ["download_jar", "import_server", "import_zip"],
|
||||
},
|
||||
"download_jar_create_data": {
|
||||
"title": "JAR download data",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"version",
|
||||
"mem_min",
|
||||
"mem_max",
|
||||
"server_properties_port",
|
||||
"agree_to_eula",
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"title": "Server JAR Type",
|
||||
"type": "string",
|
||||
"examples": ["Paper"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"version": {
|
||||
"title": "Server JAR Version",
|
||||
"type": "string",
|
||||
"examples": ["1.18.2"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"mem_min": {
|
||||
"title": "Minimum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [1],
|
||||
"default": 1,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"mem_max": {
|
||||
"title": "Maximum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [2],
|
||||
"default": 2,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"server_properties_port": {
|
||||
"title": "Port",
|
||||
"type": "integer",
|
||||
"examples": [25565],
|
||||
"default": 25565,
|
||||
"minimum": 0,
|
||||
},
|
||||
"agree_to_eula": {
|
||||
"title": "Agree to the EULA",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
"import_server_create_data": {
|
||||
"title": "Import server data",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"existing_server_path",
|
||||
"jarfile",
|
||||
"mem_min",
|
||||
"mem_max",
|
||||
"server_properties_port",
|
||||
"agree_to_eula",
|
||||
],
|
||||
"properties": {
|
||||
"existing_server_path": {
|
||||
"title": "Server path",
|
||||
"description": "Absolute path to the old server",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"jarfile": {
|
||||
"title": "JAR file",
|
||||
"description": "The JAR file relative to the previous path",
|
||||
"type": "string",
|
||||
"examples": ["paper.jar", "jars/vanilla-1.12.jar"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"mem_min": {
|
||||
"title": "Minimum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [1],
|
||||
"default": 1,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"mem_max": {
|
||||
"title": "Maximum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [2],
|
||||
"default": 2,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"server_properties_port": {
|
||||
"title": "Port",
|
||||
"type": "integer",
|
||||
"examples": [25565],
|
||||
"default": 25565,
|
||||
"minimum": 0,
|
||||
},
|
||||
"agree_to_eula": {
|
||||
"title": "Agree to the EULA",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
"import_zip_create_data": {
|
||||
"title": "Import ZIP server data",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"zip_path",
|
||||
"zip_root",
|
||||
"jarfile",
|
||||
"mem_min",
|
||||
"mem_max",
|
||||
"server_properties_port",
|
||||
"agree_to_eula",
|
||||
],
|
||||
"properties": {
|
||||
"zip_path": {
|
||||
"title": "ZIP path",
|
||||
"description": "Absolute path to the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server.zip"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"zip_root": {
|
||||
"title": "Server root directory",
|
||||
"description": "The server root in the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/", "/paper-server/", "server-1"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"jarfile": {
|
||||
"title": "JAR file",
|
||||
"description": "The JAR relative to the configured root",
|
||||
"type": "string",
|
||||
"examples": ["paper.jar", "jars/vanilla-1.12.jar"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"mem_min": {
|
||||
"title": "Minimum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [1],
|
||||
"default": 1,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"mem_max": {
|
||||
"title": "Maximum JVM memory (in GiBs)",
|
||||
"type": "number",
|
||||
"examples": [2],
|
||||
"default": 2,
|
||||
"exclusiveMinimum": 0,
|
||||
},
|
||||
"server_properties_port": {
|
||||
"title": "Port",
|
||||
"type": "integer",
|
||||
"examples": [25565],
|
||||
"default": 25565,
|
||||
"minimum": 0,
|
||||
},
|
||||
"agree_to_eula": {
|
||||
"title": "Agree to the EULA",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$comment": "If..then section",
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "download_jar"}}
|
||||
},
|
||||
"then": {"required": ["download_jar_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "import_exec"}}
|
||||
},
|
||||
"then": {"required": ["import_server_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "import_zip"}}
|
||||
},
|
||||
"then": {"required": ["import_zip_create_data"]},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "Only one creation data",
|
||||
"oneOf": [
|
||||
{"required": ["download_jar_create_data"]},
|
||||
{"required": ["import_server_create_data"]},
|
||||
{"required": ["import_zip_create_data"]},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
"minecraft_bedrock_create_data": {
|
||||
"title": "Minecraft Bedrock creation data",
|
||||
"type": "object",
|
||||
"required": ["create_type"],
|
||||
"properties": {
|
||||
"create_type": {
|
||||
"title": "Creation type",
|
||||
"type": "string",
|
||||
"default": "import_server",
|
||||
"enum": ["import_server", "import_zip"],
|
||||
},
|
||||
"import_server_create_data": {
|
||||
"title": "Import server data",
|
||||
"type": "object",
|
||||
"required": ["existing_server_path", "command"],
|
||||
"properties": {
|
||||
"existing_server_path": {
|
||||
"title": "Server path",
|
||||
"description": "Absolute path to the old server",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"command": {
|
||||
"title": "Command",
|
||||
"type": "string",
|
||||
"default": "echo foo bar baz",
|
||||
"examples": ["LD_LIBRARY_PATH=. ./bedrock_server"],
|
||||
"minLength": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
"import_zip_create_data": {
|
||||
"title": "Import ZIP server data",
|
||||
"type": "object",
|
||||
"required": ["zip_path", "zip_root", "command"],
|
||||
"properties": {
|
||||
"zip_path": {
|
||||
"title": "ZIP path",
|
||||
"description": "Absolute path to the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server.zip"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"zip_root": {
|
||||
"title": "Server root directory",
|
||||
"description": "The server root in the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/", "/paper-server/", "server-1"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"command": {
|
||||
"title": "Command",
|
||||
"type": "string",
|
||||
"default": "echo foo bar baz",
|
||||
"examples": ["LD_LIBRARY_PATH=. ./bedrock_server"],
|
||||
"minLength": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$comment": "If..then section",
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "import_exec"}}
|
||||
},
|
||||
"then": {"required": ["import_server_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "import_zip"}}
|
||||
},
|
||||
"then": {"required": ["import_zip_create_data"]},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "Only one creation data",
|
||||
"oneOf": [
|
||||
{"required": ["import_server_create_data"]},
|
||||
{"required": ["import_zip_create_data"]},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
"custom_create_data": {
|
||||
"title": "Custom creation data",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"working_directory",
|
||||
"executable_update",
|
||||
"create_type",
|
||||
],
|
||||
"properties": {
|
||||
"working_directory": {
|
||||
"title": "Working directory",
|
||||
"description": '"" means the default',
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"examples": ["/mnt/mydrive/server-configs/", "./subdirectory", ""],
|
||||
},
|
||||
"executable_update": {
|
||||
"title": "Executable Updation",
|
||||
"description": "Also configurable later on and for other servers",
|
||||
"type": "object",
|
||||
"required": ["enabled", "file", "url"],
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"title": "Enabled",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
},
|
||||
"file": {
|
||||
"title": "Executable to update",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"examples": ["./paper.jar"],
|
||||
},
|
||||
"url": {
|
||||
"title": "URL to download the executable from",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
},
|
||||
},
|
||||
},
|
||||
"create_type": {
|
||||
"title": "Creation type",
|
||||
"type": "string",
|
||||
"default": "raw_exec",
|
||||
"enum": ["raw_exec", "import_server", "import_zip"],
|
||||
},
|
||||
"raw_exec_create_data": {
|
||||
"title": "Raw execution command create data",
|
||||
"type": "object",
|
||||
"required": ["command"],
|
||||
"properties": {
|
||||
"command": {
|
||||
"title": "Command",
|
||||
"type": "string",
|
||||
"default": "echo foo bar baz",
|
||||
"examples": ["caddy start"],
|
||||
"minLength": 1,
|
||||
}
|
||||
},
|
||||
},
|
||||
"import_server_create_data": {
|
||||
"title": "Import server data",
|
||||
"type": "object",
|
||||
"required": ["existing_server_path", "command"],
|
||||
"properties": {
|
||||
"existing_server_path": {
|
||||
"title": "Server path",
|
||||
"description": "Absolute path to the old server",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"command": {
|
||||
"title": "Command",
|
||||
"type": "string",
|
||||
"default": "echo foo bar baz",
|
||||
"examples": ["caddy start"],
|
||||
"minLength": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
"import_zip_create_data": {
|
||||
"title": "Import ZIP server data",
|
||||
"type": "object",
|
||||
"required": ["zip_path", "zip_root", "command"],
|
||||
"properties": {
|
||||
"zip_path": {
|
||||
"title": "ZIP path",
|
||||
"description": "Absolute path to the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/var/opt/server.zip"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"zip_root": {
|
||||
"title": "Server root directory",
|
||||
"description": "The server root in the ZIP archive",
|
||||
"type": "string",
|
||||
"examples": ["/", "/paper-server/", "server-1"],
|
||||
"minLength": 1,
|
||||
},
|
||||
"command": {
|
||||
"title": "Command",
|
||||
"type": "string",
|
||||
"default": "echo foo bar baz",
|
||||
"examples": ["caddy start"],
|
||||
"minLength": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$comment": "If..then section",
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "raw_exec"}}
|
||||
},
|
||||
"then": {"required": ["raw_exec_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"create_type": {"const": "import_server"}
|
||||
}
|
||||
},
|
||||
"then": {"required": ["import_server_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "import_zip"}}
|
||||
},
|
||||
"then": {"required": ["import_zip_create_data"]},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "Only one creation data",
|
||||
"oneOf": [
|
||||
{"required": ["raw_exec_create_data"]},
|
||||
{"required": ["import_server_create_data"]},
|
||||
{"required": ["import_zip_create_data"]},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$comment": "If..then section",
|
||||
"allOf": [
|
||||
# start require creation data
|
||||
{
|
||||
"if": {"properties": {"create_type": {"const": "minecraft_java"}}},
|
||||
"then": {"required": ["minecraft_java_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {"create_type": {"const": "minecraft_bedrock"}}
|
||||
},
|
||||
"then": {"required": ["minecraft_bedrock_create_data"]},
|
||||
},
|
||||
{
|
||||
"if": {"properties": {"create_type": {"const": "custom"}}},
|
||||
"then": {"required": ["custom_create_data"]},
|
||||
},
|
||||
# end require creation data
|
||||
# start require monitoring data
|
||||
{
|
||||
"if": {
|
||||
"properties": {"monitoring_type": {"const": "minecraft_java"}}
|
||||
},
|
||||
"then": {"required": ["minecraft_java_monitoring_data"]},
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"monitoring_type": {"const": "minecraft_bedrock"}
|
||||
}
|
||||
},
|
||||
"then": {"required": ["minecraft_bedrock_monitoring_data"]},
|
||||
},
|
||||
# end require monitoring data
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "Only one creation data",
|
||||
"oneOf": [
|
||||
{"required": ["minecraft_java_create_data"]},
|
||||
{"required": ["minecraft_bedrock_create_data"]},
|
||||
{"required": ["custom_create_data"]},
|
||||
],
|
||||
},
|
||||
{
|
||||
"title": "Only one monitoring data",
|
||||
"oneOf": [
|
||||
{"required": ["minecraft_java_monitoring_data"]},
|
||||
{"required": ["minecraft_bedrock_monitoring_data"]},
|
||||
{"properties": {"monitoring_type": {"const": "none"}}},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class ApiServersIndexHandler(BaseApiHandler):
|
||||
def get(self):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
# TODO: limit some columns for specific permissions
|
||||
|
||||
self.finish_json(200, {"status": "ok", "data": auth_data[0]})
|
||||
|
||||
def post(self):
|
||||
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
_superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if EnumPermissionsCrafty.SERVER_CREATION not in exec_user_crafty_permissions:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
try:
|
||||
data = orjson.loads(self.request.body)
|
||||
except orjson.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, new_server_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
new_server_id, new_server_uuid = self.controller.create_api_server(data)
|
||||
|
||||
# Increase the server creation counter
|
||||
self.controller.crafty_perms.add_server_creation(user["user_id"])
|
||||
|
||||
self.controller.servers.stats.record_stats()
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
(
|
||||
f"created server {data['name']}"
|
||||
f" (ID: {new_server_id})"
|
||||
f" (UUID: {new_server_uuid})"
|
||||
),
|
||||
server_id=new_server_id,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
201,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"new_server_id": str(new_server_id),
|
||||
"new_server_uuid": new_server_uuid,
|
||||
},
|
||||
},
|
||||
)
|
98
app/classes/web/routes/api/servers/server/action.py
Normal file
98
app/classes/web/routes/api/servers/server/action.py
Normal file
@ -0,0 +1,98 @@
|
||||
import logging
|
||||
import os
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.models.servers import Servers
|
||||
from app.classes.shared.file_helpers import FileHelpers
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiServersServerActionHandler(BaseApiHandler):
|
||||
def post(self, server_id: str, action: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if (
|
||||
EnumPermissionsServer.COMMANDS
|
||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
||||
auth_data[4]["user_id"], server_id
|
||||
)
|
||||
):
|
||||
# if the user doesn't have Commands permission, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if action == "clone_server":
|
||||
return self._clone_server(server_id, auth_data[4]["user_id"])
|
||||
|
||||
self.controller.management.send_command(
|
||||
auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok"},
|
||||
)
|
||||
|
||||
def _clone_server(self, server_id, user_id):
|
||||
def is_name_used(name):
|
||||
return Servers.select().where(Servers.server_name == name).exists()
|
||||
|
||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||
server_uuid = server_data.get("server_uuid")
|
||||
new_server_name = server_data.get("server_name") + " (Copy)"
|
||||
|
||||
name_counter = 1
|
||||
while is_name_used(new_server_name):
|
||||
name_counter += 1
|
||||
new_server_name = server_data.get("server_name") + f" (Copy {name_counter})"
|
||||
|
||||
new_server_uuid = Helpers.create_uuid()
|
||||
while os.path.exists(os.path.join(self.helper.servers_dir, new_server_uuid)):
|
||||
new_server_uuid = Helpers.create_uuid()
|
||||
new_server_path = os.path.join(self.helper.servers_dir, new_server_uuid)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user_id,
|
||||
f"is cloning server {server_id} named {server_data.get('server_name')}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
# copy the old server
|
||||
FileHelpers.copy_dir(server_data.get("path"), new_server_path)
|
||||
|
||||
# TODO get old server DB data to individual variables
|
||||
new_server_command = str(server_data.get("execution_command")).replace(
|
||||
server_uuid, new_server_uuid
|
||||
)
|
||||
new_server_log_file = str(
|
||||
self.helper.get_os_understandable_path(server_data.get("log_path"))
|
||||
).replace(server_uuid, new_server_uuid)
|
||||
|
||||
new_server_id = self.controller.servers.create_server(
|
||||
new_server_name,
|
||||
new_server_uuid,
|
||||
new_server_path,
|
||||
"",
|
||||
new_server_command,
|
||||
server_data.get("executable"),
|
||||
new_server_log_file,
|
||||
server_data.get("stop_command"),
|
||||
server_data.get("type"),
|
||||
server_data.get("server_port"),
|
||||
)
|
||||
|
||||
self.controller.servers.init_all_servers()
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": {"new_server_id": str(new_server_id)}},
|
||||
)
|
168
app/classes/web/routes/api/servers/server/index.py
Normal file
168
app/classes/web/routes/api/servers/server/index.py
Normal file
@ -0,0 +1,168 @@
|
||||
import logging
|
||||
import json
|
||||
from jsonschema import validate
|
||||
from jsonschema.exceptions import ValidationError
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# TODO: modify monitoring
|
||||
server_patch_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"server_name": {"type": "string", "minLength": 1},
|
||||
"path": {"type": "string", "minLength": 1},
|
||||
"backup_path": {"type": "string"},
|
||||
"executable": {"type": "string"},
|
||||
"log_path": {"type": "string", "minLength": 1},
|
||||
"execution_command": {"type": "string", "minLength": 1},
|
||||
"auto_start": {"type": "boolean"},
|
||||
"auto_start_delay": {"type": "integer"},
|
||||
"crash_detection": {"type": "boolean"},
|
||||
"stop_command": {"type": "string"},
|
||||
"executable_update_url": {"type": "string", "minLength": 1},
|
||||
"server_ip": {"type": "string", "minLength": 1},
|
||||
"server_port": {"type": "integer"},
|
||||
"logs_delete_after": {"type": "integer"},
|
||||
"type": {"type": "string", "minLength": 1},
|
||||
},
|
||||
"anyOf": [
|
||||
# Require at least one property
|
||||
{"required": [name]}
|
||||
for name in [
|
||||
"server_name",
|
||||
"path",
|
||||
"backup_path",
|
||||
"executable",
|
||||
"log_path",
|
||||
"execution_command",
|
||||
"auto_start",
|
||||
"auto_start_delay",
|
||||
"crash_detection",
|
||||
"stop_command",
|
||||
"executable_update_url",
|
||||
"server_ip",
|
||||
"server_port",
|
||||
"logs_delete_after",
|
||||
"type",
|
||||
]
|
||||
],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
class ApiServersServerIndexHandler(BaseApiHandler):
|
||||
def get(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
server_obj = self.controller.servers.get_server_obj(server_id)
|
||||
server = model_to_dict(server_obj)
|
||||
|
||||
# TODO: limit some columns for specific permissions?
|
||||
|
||||
self.finish_json(200, {"status": "ok", "data": server})
|
||||
|
||||
def patch(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
try:
|
||||
data = json.loads(self.request.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, server_patch_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if (
|
||||
EnumPermissionsServer.CONFIG
|
||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
||||
auth_data[4]["user_id"], server_id
|
||||
)
|
||||
):
|
||||
# if the user doesn't have Config permission, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
server_obj = self.controller.servers.get_server_obj(server_id)
|
||||
for key in data:
|
||||
# If we don't validate the input there could be security issues
|
||||
setattr(server_obj, key, data[key])
|
||||
self.controller.servers.update_server(server_obj)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
auth_data[4]["user_id"],
|
||||
f"modified the server with ID {server_id}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
return self.finish_json(200, {"status": "ok"})
|
||||
|
||||
def delete(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
# DELETE /api/v2/servers/server?files=true
|
||||
remove_files = self.get_query_argument("files", None) == "true"
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if (
|
||||
EnumPermissionsServer.CONFIG
|
||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
||||
auth_data[4]["user_id"], server_id
|
||||
)
|
||||
):
|
||||
# if the user doesn't have Config permission, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
logger.info(
|
||||
(
|
||||
"Removing server and all associated files for server: "
|
||||
if remove_files
|
||||
else "Removing server from panel for server: "
|
||||
)
|
||||
+ self.controller.servers.get_server_friendly_name(server_id)
|
||||
)
|
||||
|
||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||
self.controller.remove_server(server_id, remove_files)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
auth_data[4]["user_id"],
|
||||
f"deleted the server {server_id}",
|
||||
server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok"},
|
||||
)
|
76
app/classes/web/routes/api/servers/server/logs.py
Normal file
76
app/classes/web/routes/api/servers/server/logs.py
Normal file
@ -0,0 +1,76 @@
|
||||
import html
|
||||
import logging
|
||||
import re
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.shared.server import ServerOutBuf
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
|
||||
|
||||
|
||||
class ApiServersServerLogsHandler(BaseApiHandler):
|
||||
def get(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
# GET /api/v2/servers/server/logs?file=true
|
||||
read_log_file = self.get_query_argument("file", None) == "true"
|
||||
# GET /api/v2/servers/server/logs?colors=true
|
||||
colored_output = self.get_query_argument("colors", None) == "true"
|
||||
# GET /api/v2/servers/server/logs?raw=true
|
||||
disable_ansi_strip = self.get_query_argument("raw", None) == "true"
|
||||
# GET /api/v2/servers/server/logs?html=true
|
||||
use_html = self.get_query_argument("html", None) == "true"
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if (
|
||||
EnumPermissionsServer.LOGS
|
||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
||||
auth_data[4]["user_id"], server_id
|
||||
)
|
||||
):
|
||||
# if the user doesn't have Logs permission, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||
|
||||
if read_log_file:
|
||||
log_lines = self.helper.get_setting("max_log_lines")
|
||||
raw_lines = self.helper.tail_file(
|
||||
self.helper.get_os_understandable_path(server_data["log_path"]),
|
||||
log_lines,
|
||||
)
|
||||
|
||||
# Remove newline characters from the end of the lines
|
||||
raw_lines = [line.rstrip("\r\n") for line in raw_lines]
|
||||
else:
|
||||
raw_lines = ServerOutBuf.lines.get(server_id, [])
|
||||
|
||||
lines = []
|
||||
|
||||
for line in raw_lines:
|
||||
try:
|
||||
if not disable_ansi_strip:
|
||||
line = ansi_escape.sub("", line)
|
||||
line = re.sub("[A-z]{2}\b\b", "", line)
|
||||
line = html.escape(line)
|
||||
|
||||
if colored_output:
|
||||
line = self.helper.log_colors(line)
|
||||
|
||||
lines.append(line)
|
||||
except Exception as e:
|
||||
logger.warning(f"Skipping Log Line due to error: {e}")
|
||||
|
||||
if use_html:
|
||||
for line in lines:
|
||||
self.write(f"{line}<br />")
|
||||
else:
|
||||
self.finish_json(200, {"status": "ok", "data": lines})
|
23
app/classes/web/routes/api/servers/server/public.py
Normal file
23
app/classes/web/routes/api/servers/server/public.py
Normal file
@ -0,0 +1,23 @@
|
||||
import logging
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiServersServerPublicHandler(BaseApiHandler):
|
||||
def get(self, server_id):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
server_obj = self.controller.servers.get_server_obj(server_id)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
key: getattr(server_obj, key)
|
||||
for key in ["server_id", "created", "server_name", "type"]
|
||||
},
|
||||
},
|
||||
)
|
28
app/classes/web/routes/api/servers/server/stats.py
Normal file
28
app/classes/web/routes/api/servers/server/stats.py
Normal file
@ -0,0 +1,28 @@
|
||||
import logging
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
from app.classes.controllers.servers_controller import ServersController
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiServersServerStatsHandler(BaseApiHandler):
|
||||
def get(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
srv = ServersController().get_server_instance_by_id(server_id)
|
||||
latest = srv.stats_helper.get_latest_server_stats()
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": latest,
|
||||
},
|
||||
)
|
47
app/classes/web/routes/api/servers/server/stdin.py
Normal file
47
app/classes/web/routes/api/servers/server/stdin.py
Normal file
@ -0,0 +1,47 @@
|
||||
import logging
|
||||
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiServersServerStdinHandler(BaseApiHandler):
|
||||
def post(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if (
|
||||
EnumPermissionsServer.COMMANDS
|
||||
not in self.controller.server_perms.get_user_id_permissions_list(
|
||||
auth_data[4]["user_id"], server_id
|
||||
)
|
||||
):
|
||||
# if the user doesn't have Commands permission, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
svr = self.controller.get_server_obj_optional(server_id)
|
||||
if svr is None:
|
||||
# It's in auth_data[0] but not as a Server object
|
||||
logger.critical(
|
||||
"Something has gone VERY wrong! "
|
||||
"Crafty can't access the server object. "
|
||||
"Please report this to the devs"
|
||||
)
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if svr.send_command(self.request.body.decode("utf-8")):
|
||||
return self.finish_json(
|
||||
200,
|
||||
{"status": "ok"},
|
||||
)
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "error", "error": "SERVER_NOT_RUNNING"},
|
||||
)
|
31
app/classes/web/routes/api/servers/server/users.py
Normal file
31
app/classes/web/routes/api/servers/server/users.py
Normal file
@ -0,0 +1,31 @@
|
||||
import logging
|
||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiServersServerUsersHandler(BaseApiHandler):
|
||||
def get(self, server_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
|
||||
# if the user doesn't have access to the server, return an error
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if EnumPermissionsCrafty.USER_CONFIG not in auth_data[1]:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
if EnumPermissionsCrafty.ROLES_CONFIG not in auth_data[1]:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": list(self.controller.servers.get_authorized_users(server_id)),
|
||||
},
|
||||
)
|
175
app/classes/web/routes/api/users/index.py
Normal file
175
app/classes/web/routes/api/users/index.py
Normal file
@ -0,0 +1,175 @@
|
||||
import logging
|
||||
import json
|
||||
from jsonschema import validate
|
||||
from jsonschema.exceptions import ValidationError
|
||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||
from app.classes.models.roles import Roles, HelperRoles
|
||||
from app.classes.models.users import PUBLIC_USER_ATTRS
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiUsersIndexHandler(BaseApiHandler):
|
||||
def get(self):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
_,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
# GET /api/v2/users?ids=true
|
||||
get_only_ids = self.get_query_argument("ids", None) == "true"
|
||||
|
||||
if EnumPermissionsCrafty.USER_CONFIG in exec_user_crafty_permissions:
|
||||
if get_only_ids:
|
||||
data = self.controller.users.get_all_user_ids()
|
||||
else:
|
||||
data = [
|
||||
{key: getattr(user_res, key) for key in PUBLIC_USER_ATTRS}
|
||||
for user_res in self.controller.users.get_all_users().execute()
|
||||
]
|
||||
else:
|
||||
if get_only_ids:
|
||||
data = [user["user_id"]]
|
||||
else:
|
||||
user_res = self.controller.users.get_user_by_id(user["user_id"])
|
||||
user_res["roles"] = list(
|
||||
map(HelperRoles.get_role, user_res.get("roles", set()))
|
||||
)
|
||||
data = [{key: user_res[key] for key in PUBLIC_USER_ATTRS}]
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
def post(self):
|
||||
new_user_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
**self.controller.users.user_jsonschema_props,
|
||||
},
|
||||
"required": ["username", "password"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
|
||||
|
||||
try:
|
||||
data = json.loads(self.request.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, new_user_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
username = data["username"]
|
||||
password = data["password"]
|
||||
email = data.get("email", "default@example.com")
|
||||
enabled = data.get("enabled", True)
|
||||
lang = data.get("lang", self.helper.get_setting("language"))
|
||||
new_superuser = data.get("superuser", False)
|
||||
permissions = data.get("permissions", None)
|
||||
roles = data.get("roles", None)
|
||||
hints = data.get("hints", True)
|
||||
|
||||
if username.lower() in ["system", ""]:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_USERNAME"}
|
||||
)
|
||||
|
||||
if self.controller.users.get_id_by_name(username) is not None:
|
||||
return self.finish_json(400, {"status": "error", "error": "USER_EXISTS"})
|
||||
|
||||
if roles is None:
|
||||
roles = set()
|
||||
else:
|
||||
role_ids = [str(role_id) for role_id in Roles.select(Roles.role_id)]
|
||||
roles = {role for role in roles if str(role) in role_ids}
|
||||
|
||||
permissions_mask = "0" * len(EnumPermissionsCrafty.__members__.items())
|
||||
server_quantity = {
|
||||
perm.name: 0
|
||||
for perm in self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||
}
|
||||
|
||||
if permissions is not None:
|
||||
server_quantity = {}
|
||||
permissions_mask = list(permissions_mask)
|
||||
for permission in permissions:
|
||||
server_quantity[permission["name"]] = permission["quantity"]
|
||||
permissions_mask[EnumPermissionsCrafty[permission["name"]].value] = (
|
||||
"1" if permission["enabled"] else "0"
|
||||
)
|
||||
permissions_mask = "".join(permissions_mask)
|
||||
|
||||
if new_superuser and not superuser:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_SUPERUSER_CREATE"}
|
||||
)
|
||||
|
||||
if len(roles) != 0 and not superuser:
|
||||
# HACK: This should check if the user has the roles or something
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
|
||||
)
|
||||
|
||||
# TODO: do this in the most efficient way
|
||||
user_id = self.controller.users.add_user(
|
||||
username,
|
||||
password,
|
||||
email,
|
||||
enabled,
|
||||
new_superuser,
|
||||
)
|
||||
self.controller.users.update_user(
|
||||
user_id,
|
||||
{"roles": roles, "lang": lang, "hints": hints},
|
||||
{
|
||||
"permissions_mask": permissions_mask,
|
||||
"server_quantity": server_quantity,
|
||||
},
|
||||
)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
f"added user {username} (UID:{user_id}) with roles {roles}",
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
201,
|
||||
{"status": "ok", "data": {"user_id": str(user_id)}},
|
||||
)
|
297
app/classes/web/routes/api/users/user/index.py
Normal file
297
app/classes/web/routes/api/users/user/index.py
Normal file
@ -0,0 +1,297 @@
|
||||
import json
|
||||
import logging
|
||||
import typing as t
|
||||
|
||||
from jsonschema import ValidationError, validate
|
||||
from app.classes.controllers.users_controller import UsersController
|
||||
from app.classes.models.crafty_permissions import (
|
||||
EnumPermissionsCrafty,
|
||||
PermissionsCrafty,
|
||||
)
|
||||
from app.classes.models.roles import HelperRoles
|
||||
from app.classes.models.users import HelperUsers
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiUsersUserIndexHandler(BaseApiHandler):
|
||||
def get(self, user_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
_,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if user_id in ["@me", user["user_id"]]:
|
||||
user_id = user["user_id"]
|
||||
res_user = user
|
||||
elif EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "NOT_AUTHORIZED",
|
||||
},
|
||||
)
|
||||
else:
|
||||
# has User_Config permission and isn't viewing self
|
||||
res_user = self.controller.users.get_user_by_id(user_id)
|
||||
if not res_user:
|
||||
return self.finish_json(
|
||||
404,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "USER_NOT_FOUND",
|
||||
},
|
||||
)
|
||||
|
||||
# Remove password and valid_tokens_from from the response
|
||||
# as those should never be sent out to the client.
|
||||
res_user.pop("password", None)
|
||||
res_user.pop("valid_tokens_from", None)
|
||||
res_user["roles"] = list(
|
||||
map(HelperRoles.get_role, res_user.get("roles", set()))
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": res_user},
|
||||
)
|
||||
|
||||
def delete(self, user_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
_,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if (user_id in ["@me", user["user_id"]]) and self.helper.get_setting(
|
||||
"allow_self_delete", False
|
||||
):
|
||||
user_id = user["user_id"]
|
||||
self.controller.users.remove_user(user_id)
|
||||
elif EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "NOT_AUTHORIZED",
|
||||
},
|
||||
)
|
||||
else:
|
||||
# has User_Config permission
|
||||
self.controller.users.remove_user(user_id)
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
f"deleted the user {user_id}",
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok"},
|
||||
)
|
||||
|
||||
def patch(self, user_id: str):
|
||||
user_patch_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
**self.controller.users.user_jsonschema_props,
|
||||
},
|
||||
"anyOf": [
|
||||
# Require at least one property
|
||||
{"required": [name]}
|
||||
for name in [
|
||||
"username",
|
||||
"password",
|
||||
"email",
|
||||
"enabled",
|
||||
"lang",
|
||||
"superuser",
|
||||
"permissions",
|
||||
"roles",
|
||||
"hints",
|
||||
]
|
||||
],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
superuser,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
try:
|
||||
data = json.loads(self.request.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
|
||||
)
|
||||
|
||||
try:
|
||||
validate(data, user_patch_schema)
|
||||
except ValidationError as e:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "INVALID_JSON_SCHEMA",
|
||||
"error_data": str(e),
|
||||
},
|
||||
)
|
||||
|
||||
if user_id == "@me":
|
||||
user_id = user["user_id"]
|
||||
|
||||
if (
|
||||
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
|
||||
and str(user["user_id"]) != str(user_id)
|
||||
):
|
||||
# If doesn't have perm can't edit other users
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "NOT_AUTHORIZED",
|
||||
},
|
||||
)
|
||||
|
||||
if "username" in data:
|
||||
if data["username"].lower() in ["system", ""]:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_USERNAME"}
|
||||
)
|
||||
if self.controller.users.get_id_by_name(data["username"]) is not None:
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "USER_EXISTS"}
|
||||
)
|
||||
|
||||
if "superuser" in data:
|
||||
if str(user["user_id"]) == str(user_id) and not superuser:
|
||||
# Checks if user is trying to change super user status
|
||||
# of self without superuser. We don't want that.
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_SUPERUSER_MODIFY"}
|
||||
)
|
||||
if not superuser:
|
||||
# The user is not superuser so they can't change the superuser status
|
||||
data.pop("superuser")
|
||||
|
||||
if "permissions" in data:
|
||||
if str(user["user_id"]) == str(user_id) and not superuser:
|
||||
# Checks if user is trying to change permissions
|
||||
# of self without superuser. We don't want that.
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_PERMISSIONS_MODIFY"}
|
||||
)
|
||||
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
# Checks if user is trying to change permissions of someone
|
||||
# else without User Config permission. We don't want that.
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_PERMISSIONS_MODIFY"}
|
||||
)
|
||||
|
||||
if "roles" in data:
|
||||
if str(user["user_id"]) == str(user_id) and not superuser:
|
||||
# Checks if user is trying to change roles of
|
||||
# self without superuser. We don't want that.
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
|
||||
)
|
||||
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
# Checks if user is trying to change roles of someone
|
||||
# else without User Config permission. We don't want that.
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
|
||||
)
|
||||
|
||||
if "password" in data and str(user["user_id"] == str(user_id)):
|
||||
# TODO: edit your own password
|
||||
return self.finish_json(
|
||||
400, {"status": "error", "error": "INVALID_PASSWORD_MODIFY"}
|
||||
)
|
||||
|
||||
user_obj = HelperUsers.get_user_model(user_id)
|
||||
|
||||
if "roles" in data:
|
||||
roles: t.Set[str] = set(data.pop("roles"))
|
||||
base_roles: t.Set[str] = set(user_obj.roles)
|
||||
added_roles = roles.difference(base_roles)
|
||||
removed_roles = base_roles.difference(roles)
|
||||
logger.debug(
|
||||
f"updating user {user_id}'s roles: "
|
||||
f"+role:{added_roles} -role:{removed_roles}"
|
||||
)
|
||||
|
||||
for role_id in added_roles:
|
||||
HelperUsers.get_or_create(user_id, role_id)
|
||||
|
||||
if len(removed_roles) != 0:
|
||||
self.controller.users.users_helper.delete_user_roles(
|
||||
user_id, removed_roles
|
||||
)
|
||||
|
||||
if "permissions" in data:
|
||||
permissions: t.List[UsersController.ApiPermissionDict] = data.pop(
|
||||
"permissions"
|
||||
)
|
||||
permissions_mask = "0" * len(EnumPermissionsCrafty)
|
||||
limit_server_creation = 0
|
||||
limit_user_creation = 0
|
||||
limit_role_creation = 0
|
||||
|
||||
for permission in permissions:
|
||||
self.controller.crafty_perms.set_permission(
|
||||
permissions_mask,
|
||||
EnumPermissionsCrafty.__members__[permission["name"]],
|
||||
"1" if permission["enabled"] else "0",
|
||||
)
|
||||
|
||||
PermissionsCrafty.add_or_update_user(
|
||||
user_id,
|
||||
permissions_mask,
|
||||
limit_server_creation,
|
||||
limit_user_creation,
|
||||
limit_role_creation,
|
||||
)
|
||||
|
||||
# TODO: make this more efficient
|
||||
if len(data) != 0:
|
||||
for key in data:
|
||||
# If we don't validate the input there could be security issues
|
||||
value = data[key]
|
||||
if key == "password":
|
||||
value = self.helper.encode_pass(value)
|
||||
setattr(user_obj, key, value)
|
||||
user_obj.save()
|
||||
|
||||
self.controller.management.add_to_audit_log(
|
||||
user["user_id"],
|
||||
(
|
||||
f"edited user {user_obj.username} (UID: {user_id})"
|
||||
f"with roles {user_obj.roles}"
|
||||
),
|
||||
server_id=0,
|
||||
source_ip=self.get_remote_ip(),
|
||||
)
|
||||
|
||||
return self.finish_json(200, {"status": "ok"})
|
73
app/classes/web/routes/api/users/user/permissions.py
Normal file
73
app/classes/web/routes/api/users/user/permissions.py
Normal file
@ -0,0 +1,73 @@
|
||||
import logging
|
||||
import typing as t
|
||||
|
||||
from app.classes.models.crafty_permissions import (
|
||||
EnumPermissionsCrafty,
|
||||
PermissionsCrafty,
|
||||
)
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SERVER_CREATION: t.Final[str] = EnumPermissionsCrafty.SERVER_CREATION.name
|
||||
USER_CONFIG: t.Final[str] = EnumPermissionsCrafty.USER_CONFIG.name
|
||||
ROLES_CONFIG: t.Final[str] = EnumPermissionsCrafty.ROLES_CONFIG.name
|
||||
|
||||
|
||||
class ApiUsersUserPermissionsHandler(BaseApiHandler):
|
||||
def get(self, user_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
exec_user_crafty_permissions,
|
||||
_,
|
||||
_,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if user_id in ["@me", user["user_id"]]:
|
||||
user_id = user["user_id"]
|
||||
res_data = PermissionsCrafty.get_user_crafty(user_id)
|
||||
elif EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
|
||||
return self.finish_json(
|
||||
400,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "NOT_AUTHORIZED",
|
||||
},
|
||||
)
|
||||
else:
|
||||
# has User_Config permission and isn't viewing self
|
||||
res_data = PermissionsCrafty.get_user_crafty_optional(user_id)
|
||||
if res_data is None:
|
||||
return self.finish_json(
|
||||
404,
|
||||
{
|
||||
"status": "error",
|
||||
"error": "USER_NOT_FOUND",
|
||||
},
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{
|
||||
"status": "ok",
|
||||
"data": {
|
||||
"permissions": res_data.permissions,
|
||||
"counters": {
|
||||
SERVER_CREATION: res_data.created_server,
|
||||
USER_CONFIG: res_data.created_user,
|
||||
ROLES_CONFIG: res_data.created_role,
|
||||
},
|
||||
"limits": {
|
||||
SERVER_CREATION: res_data.limit_server_creation,
|
||||
USER_CONFIG: res_data.limit_user_creation,
|
||||
ROLES_CONFIG: res_data.limit_role_creation,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
49
app/classes/web/routes/api/users/user/pfp.py
Normal file
49
app/classes/web/routes/api/users/user/pfp.py
Normal file
@ -0,0 +1,49 @@
|
||||
import logging
|
||||
import libgravatar
|
||||
import requests
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiUsersUserPfpHandler(BaseApiHandler):
|
||||
def get(self, user_id):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
|
||||
if user_id == "@me":
|
||||
user = auth_data[4]
|
||||
else:
|
||||
user = self.controller.users.get_user_by_id(user_id)
|
||||
|
||||
logger.debug(
|
||||
f'User {auth_data[4]["user_id"]} is fetching the pfp for user {user_id}'
|
||||
)
|
||||
|
||||
# http://en.gravatar.com/site/implement/images/#rating
|
||||
if self.helper.get_setting("allow_nsfw_profile_pictures"):
|
||||
rating = "x"
|
||||
else:
|
||||
rating = "g"
|
||||
|
||||
# Get grvatar hash for profile pictures
|
||||
if user["email"] != "default@example.com" or "":
|
||||
gravatar = libgravatar.Gravatar(libgravatar.sanitize_email(user["email"]))
|
||||
url = gravatar.get_image(
|
||||
size=80,
|
||||
default="404",
|
||||
force_default=False,
|
||||
rating=rating,
|
||||
filetype_extension=False,
|
||||
use_ssl=True,
|
||||
)
|
||||
try:
|
||||
requests.head(url).raise_for_status()
|
||||
except requests.HTTPError as e:
|
||||
logger.debug("Gravatar profile picture not found", exc_info=e)
|
||||
else:
|
||||
self.finish_json(200, {"status": "ok", "data": url})
|
||||
return
|
||||
|
||||
self.finish_json(200, {"status": "ok", "data": None})
|
37
app/classes/web/routes/api/users/user/public.py
Normal file
37
app/classes/web/routes/api/users/user/public.py
Normal file
@ -0,0 +1,37 @@
|
||||
import logging
|
||||
from app.classes.models.roles import HelperRoles
|
||||
from app.classes.models.users import PUBLIC_USER_ATTRS
|
||||
from app.classes.web.base_api_handler import BaseApiHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiUsersUserPublicHandler(BaseApiHandler):
|
||||
def get(self, user_id: str):
|
||||
auth_data = self.authenticate_user()
|
||||
if not auth_data:
|
||||
return
|
||||
(
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
_,
|
||||
user,
|
||||
) = auth_data
|
||||
|
||||
if user_id == "@me":
|
||||
user_id = user["user_id"]
|
||||
public_user = user
|
||||
else:
|
||||
public_user = self.controller.users.get_user_by_id(user_id)
|
||||
|
||||
public_user = {key: public_user.get(key) for key in PUBLIC_USER_ATTRS}
|
||||
|
||||
public_user["roles"] = list(
|
||||
map(HelperRoles.get_role, public_user.get("roles", set()))
|
||||
)
|
||||
|
||||
self.finish_json(
|
||||
200,
|
||||
{"status": "ok", "data": public_user},
|
||||
)
|
@ -1,117 +1,162 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
import bleach
|
||||
import libgravatar
|
||||
import requests
|
||||
|
||||
from app.classes.minecraft.serverjars import server_jar_obj
|
||||
from app.classes.models.crafty_permissions import Enum_Permissions_Crafty
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.file_helpers import file_helper
|
||||
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.file_helpers import FileHelpers
|
||||
from app.classes.shared.main_models import DatabaseShortcuts
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
import bleach
|
||||
import libgravatar
|
||||
import requests
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ServerHandler(BaseHandler):
|
||||
|
||||
class ServerHandler(BaseHandler):
|
||||
@tornado.web.authenticated
|
||||
def get(self, page):
|
||||
# pylint: disable=unused-variable
|
||||
api_key, token_data, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
(
|
||||
api_key,
|
||||
_token_data,
|
||||
exec_user,
|
||||
) = self.current_user
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
exec_user_role = set()
|
||||
if superuser:
|
||||
defined_servers = self.controller.list_defined_servers()
|
||||
exec_user_role.add("Super User")
|
||||
exec_user_crafty_permissions = self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||
defined_servers = self.controller.servers.list_defined_servers()
|
||||
exec_user_role = {"Super User"}
|
||||
exec_user_crafty_permissions = (
|
||||
self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||
)
|
||||
list_roles = []
|
||||
for role in self.controller.roles.get_all_roles():
|
||||
list_roles.append(self.controller.roles.get_role(role.role_id))
|
||||
else:
|
||||
exec_user_crafty_permissions = self.controller.crafty_perms.get_crafty_permissions_list(exec_user["user_id"])
|
||||
defined_servers = self.controller.servers.get_authorized_servers(exec_user["user_id"])
|
||||
exec_user_crafty_permissions = (
|
||||
self.controller.crafty_perms.get_crafty_permissions_list(
|
||||
exec_user["user_id"]
|
||||
)
|
||||
)
|
||||
defined_servers = self.controller.servers.get_authorized_servers(
|
||||
exec_user["user_id"]
|
||||
)
|
||||
list_roles = []
|
||||
for r in exec_user['roles']:
|
||||
exec_user_role = set()
|
||||
for r in exec_user["roles"]:
|
||||
role = self.controller.roles.get_role(r)
|
||||
exec_user_role.add(role['role_name'])
|
||||
list_roles.append(self.controller.roles.get_role(role['role_id']))
|
||||
exec_user_role.add(role["role_name"])
|
||||
list_roles.append(self.controller.roles.get_role(role["role_id"]))
|
||||
|
||||
page_servers = []
|
||||
for server in defined_servers:
|
||||
if server not in page_servers:
|
||||
page_servers.append(
|
||||
DatabaseShortcuts.get_data_obj(server.server_object)
|
||||
)
|
||||
defined_servers = page_servers
|
||||
|
||||
template = "public/404.html"
|
||||
|
||||
page_data = {
|
||||
'version_data': helper.get_version_string(),
|
||||
'user_data': exec_user,
|
||||
'user_role' : exec_user_role,
|
||||
'roles' : list_roles,
|
||||
'user_crafty_permissions' : exec_user_crafty_permissions,
|
||||
'crafty_permissions': {
|
||||
'Server_Creation': Enum_Permissions_Crafty.Server_Creation,
|
||||
'User_Config': Enum_Permissions_Crafty.User_Config,
|
||||
'Roles_Config': Enum_Permissions_Crafty.Roles_Config,
|
||||
"version_data": self.helper.get_version_string(),
|
||||
"user_data": exec_user,
|
||||
"user_role": exec_user_role,
|
||||
"roles": list_roles,
|
||||
"user_crafty_permissions": exec_user_crafty_permissions,
|
||||
"crafty_permissions": {
|
||||
"Server_Creation": EnumPermissionsCrafty.SERVER_CREATION,
|
||||
"User_Config": EnumPermissionsCrafty.USER_CONFIG,
|
||||
"Roles_Config": EnumPermissionsCrafty.ROLES_CONFIG,
|
||||
},
|
||||
'server_stats': {
|
||||
'total': len(self.controller.list_defined_servers()),
|
||||
'running': len(self.controller.list_running_servers()),
|
||||
'stopped': (len(self.controller.list_defined_servers()) - len(self.controller.list_running_servers()))
|
||||
"server_stats": {
|
||||
"total": len(self.controller.servers.list_defined_servers()),
|
||||
"running": len(self.controller.servers.list_running_servers()),
|
||||
"stopped": (
|
||||
len(self.controller.servers.list_defined_servers())
|
||||
- len(self.controller.servers.list_running_servers())
|
||||
),
|
||||
},
|
||||
'hosts_data': self.controller.management.get_latest_hosts_stats(),
|
||||
'menu_servers': defined_servers,
|
||||
'show_contribute': helper.get_setting("show_contribute_link", True),
|
||||
'lang': self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||
'lang_page': helper.getLangPage(self.controller.users.get_user_lang_by_id(exec_user["user_id"])),
|
||||
'api_key': {
|
||||
'name': api_key.name,
|
||||
'created': api_key.created,
|
||||
'server_permissions': api_key.server_permissions,
|
||||
'crafty_permissions': api_key.crafty_permissions,
|
||||
'superuser': api_key.superuser
|
||||
} if api_key is not None else None,
|
||||
'superuser': superuser
|
||||
"hosts_data": self.controller.management.get_latest_hosts_stats(),
|
||||
"menu_servers": defined_servers,
|
||||
"show_contribute": self.helper.get_setting("show_contribute_link", True),
|
||||
"lang": self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||
"lang_page": Helpers.get_lang_page(
|
||||
self.controller.users.get_user_lang_by_id(exec_user["user_id"])
|
||||
),
|
||||
"api_key": {
|
||||
"name": api_key.name,
|
||||
"created": api_key.created,
|
||||
"server_permissions": api_key.server_permissions,
|
||||
"crafty_permissions": api_key.crafty_permissions,
|
||||
"superuser": api_key.superuser,
|
||||
}
|
||||
if api_key is not None
|
||||
else None,
|
||||
"superuser": superuser,
|
||||
}
|
||||
|
||||
if helper.get_setting("allow_nsfw_profile_pictures"):
|
||||
if self.helper.get_setting("allow_nsfw_profile_pictures"):
|
||||
rating = "x"
|
||||
else:
|
||||
rating = "g"
|
||||
|
||||
|
||||
if exec_user['email'] != 'default@example.com' or "":
|
||||
g = libgravatar.Gravatar(libgravatar.sanitize_email(exec_user['email']))
|
||||
url = g.get_image(size=80, default="404", force_default=False, rating=rating, filetype_extension=False, use_ssl=True) # + "?d=404"
|
||||
if requests.head(url).status_code != 404:
|
||||
profile_url = url
|
||||
else:
|
||||
if exec_user["email"] != "default@example.com" or "":
|
||||
gravatar = libgravatar.Gravatar(
|
||||
libgravatar.sanitize_email(exec_user["email"])
|
||||
)
|
||||
url = gravatar.get_image(
|
||||
size=80,
|
||||
default="404",
|
||||
force_default=False,
|
||||
rating=rating,
|
||||
filetype_extension=False,
|
||||
use_ssl=True,
|
||||
) # + "?d=404"
|
||||
try:
|
||||
if requests.head(url).status_code != 404:
|
||||
profile_url = url
|
||||
else:
|
||||
profile_url = "/static/assets/images/faces-clipart/pic-3.png"
|
||||
except:
|
||||
profile_url = "/static/assets/images/faces-clipart/pic-3.png"
|
||||
else:
|
||||
profile_url = "/static/assets/images/faces-clipart/pic-3.png"
|
||||
|
||||
page_data['user_image'] = profile_url
|
||||
page_data["user_image"] = profile_url
|
||||
if superuser:
|
||||
page_data['roles'] = list_roles
|
||||
page_data["roles"] = list_roles
|
||||
|
||||
if page == "step1":
|
||||
if not superuser and not self.controller.crafty_perms.can_create_server(exec_user["user_id"]):
|
||||
self.redirect("/panel/error?error=Unauthorized access: not a server creator or server limit reached")
|
||||
if not superuser and not self.controller.crafty_perms.can_create_server(
|
||||
exec_user["user_id"]
|
||||
):
|
||||
self.redirect(
|
||||
"/panel/error?error=Unauthorized access: "
|
||||
"not a server creator or server limit reached"
|
||||
)
|
||||
return
|
||||
|
||||
page_data['server_types'] = server_jar_obj.get_serverjar_data()
|
||||
page_data['js_server_types'] = json.dumps(server_jar_obj.get_serverjar_data())
|
||||
page_data["online"] = Helpers.check_internet()
|
||||
page_data["server_types"] = self.controller.server_jars.get_serverjar_data()
|
||||
page_data["js_server_types"] = json.dumps(
|
||||
self.controller.server_jars.get_serverjar_data()
|
||||
)
|
||||
template = "server/wizard.html"
|
||||
|
||||
if page == "bedrock_step1":
|
||||
if not superuser and not self.controller.crafty_perms.can_create_server(exec_user["user_id"]):
|
||||
self.redirect("/panel/error?error=Unauthorized access: not a server creator or server limit reached")
|
||||
if not superuser and not self.controller.crafty_perms.can_create_server(
|
||||
exec_user["user_id"]
|
||||
):
|
||||
self.redirect(
|
||||
"/panel/error?error=Unauthorized access: "
|
||||
"not a server creator or server limit reached"
|
||||
)
|
||||
return
|
||||
|
||||
template = "server/bedrock_wizard.html"
|
||||
@ -124,19 +169,20 @@ class ServerHandler(BaseHandler):
|
||||
|
||||
@tornado.web.authenticated
|
||||
def post(self, page):
|
||||
# pylint: disable=unused-variable
|
||||
api_key, token_data, exec_user = self.current_user
|
||||
superuser = exec_user['superuser']
|
||||
api_key, _token_data, exec_user = self.current_user
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
|
||||
template = "public/404.html"
|
||||
page_data = {
|
||||
'version_data': "version_data_here", # TODO
|
||||
'user_data': exec_user,
|
||||
'show_contribute': helper.get_setting("show_contribute_link", True),
|
||||
'lang': self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||
'lang_page': helper.getLangPage(self.controller.users.get_user_lang_by_id(exec_user["user_id"]))
|
||||
"version_data": "version_data_here", # TODO
|
||||
"user_data": exec_user,
|
||||
"show_contribute": self.helper.get_setting("show_contribute_link", True),
|
||||
"lang": self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||
"lang_page": Helpers.get_lang_page(
|
||||
self.controller.users.get_user_lang_by_id(exec_user["user_id"])
|
||||
),
|
||||
}
|
||||
|
||||
if page == "command":
|
||||
@ -145,53 +191,87 @@ class ServerHandler(BaseHandler):
|
||||
|
||||
if server_id is not None:
|
||||
if command == "clone_server":
|
||||
|
||||
def is_name_used(name):
|
||||
for server in self.controller.servers.get_all_defined_servers():
|
||||
if server['server_name'] == name:
|
||||
if server["server_name"] == name:
|
||||
return True
|
||||
return
|
||||
|
||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||
server_uuid = server_data.get('server_uuid')
|
||||
new_server_name = server_data.get('server_name') + " (Copy)"
|
||||
server_data = self.controller.servers.get_server_data_by_id(
|
||||
server_id
|
||||
)
|
||||
server_uuid = server_data.get("server_uuid")
|
||||
new_server_name = server_data.get("server_name") + " (Copy)"
|
||||
|
||||
name_counter = 1
|
||||
while is_name_used(new_server_name):
|
||||
name_counter += 1
|
||||
new_server_name = server_data.get('server_name') + f" (Copy {name_counter})"
|
||||
new_server_name = (
|
||||
server_data.get("server_name") + f" (Copy {name_counter})"
|
||||
)
|
||||
|
||||
new_server_uuid = helper.create_uuid()
|
||||
while os.path.exists(os.path.join(helper.servers_dir, new_server_uuid)):
|
||||
new_server_uuid = helper.create_uuid()
|
||||
new_server_path = os.path.join(helper.servers_dir, new_server_uuid)
|
||||
new_server_uuid = Helpers.create_uuid()
|
||||
while os.path.exists(
|
||||
os.path.join(self.helper.servers_dir, new_server_uuid)
|
||||
):
|
||||
new_server_uuid = Helpers.create_uuid()
|
||||
new_server_path = os.path.join(
|
||||
self.helper.servers_dir, new_server_uuid
|
||||
)
|
||||
|
||||
# copy the old server
|
||||
file_helper.copy_dir(server_data.get('path'), new_server_path)
|
||||
FileHelpers.copy_dir(server_data.get("path"), new_server_path)
|
||||
|
||||
# TODO get old server DB data to individual variables
|
||||
stop_command = server_data.get('stop_command')
|
||||
new_server_command = str(server_data.get('execution_command')).replace(server_uuid, new_server_uuid)
|
||||
new_executable = server_data.get('executable')
|
||||
new_server_log_file = str(helper.get_os_understandable_path(server_data.get('log_path'))).replace(server_uuid, new_server_uuid)
|
||||
server_port = server_data.get('server_port')
|
||||
server_type = server_data.get('type')
|
||||
stop_command = server_data.get("stop_command")
|
||||
new_server_command = str(
|
||||
server_data.get("execution_command")
|
||||
).replace(server_uuid, new_server_uuid)
|
||||
new_executable = server_data.get("executable")
|
||||
new_server_log_file = str(
|
||||
Helpers.get_os_understandable_path(server_data.get("log_path"))
|
||||
).replace(server_uuid, new_server_uuid)
|
||||
backup_path = os.path.join(self.helper.backup_path, new_server_uuid)
|
||||
server_port = server_data.get("server_port")
|
||||
server_type = server_data.get("type")
|
||||
|
||||
self.controller.servers.create_server(new_server_name,
|
||||
new_server_uuid,
|
||||
new_server_path,
|
||||
"",
|
||||
new_server_command,
|
||||
new_executable,
|
||||
new_server_log_file,
|
||||
stop_command,
|
||||
server_type,
|
||||
server_port)
|
||||
new_server_id = self.controller.servers.create_server(
|
||||
new_server_name,
|
||||
new_server_uuid,
|
||||
new_server_path,
|
||||
backup_path,
|
||||
new_server_command,
|
||||
new_executable,
|
||||
new_server_log_file,
|
||||
stop_command,
|
||||
server_type,
|
||||
server_port,
|
||||
)
|
||||
if not exec_user["superuser"]:
|
||||
new_server_uuid = self.controller.servers.get_server_data_by_id(
|
||||
new_server_id
|
||||
).get("server_uuid")
|
||||
role_id = self.controller.roles.add_role(
|
||||
f"Creator of Server with uuid={new_server_uuid}"
|
||||
)
|
||||
self.controller.server_perms.add_role_server(
|
||||
new_server_id, role_id, "11111111"
|
||||
)
|
||||
self.controller.users.add_role_to_user(
|
||||
exec_user["user_id"], role_id
|
||||
)
|
||||
self.controller.crafty_perms.add_server_creation(
|
||||
exec_user["user_id"]
|
||||
)
|
||||
|
||||
self.controller.init_all_servers()
|
||||
self.controller.servers.init_all_servers()
|
||||
|
||||
return
|
||||
|
||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), command)
|
||||
self.controller.management.send_command(
|
||||
exec_user["user_id"], server_id, self.get_remote_ip(), command
|
||||
)
|
||||
|
||||
if page == "step1":
|
||||
|
||||
@ -199,84 +279,123 @@ class ServerHandler(BaseHandler):
|
||||
user_roles = self.controller.roles.get_all_roles()
|
||||
else:
|
||||
user_roles = self.controller.roles.get_all_roles()
|
||||
server = bleach.clean(self.get_argument('server', ''))
|
||||
server_name = bleach.clean(self.get_argument('server_name', ''))
|
||||
min_mem = bleach.clean(self.get_argument('min_memory', ''))
|
||||
max_mem = bleach.clean(self.get_argument('max_memory', ''))
|
||||
port = bleach.clean(self.get_argument('port', ''))
|
||||
import_type = bleach.clean(self.get_argument('create_type', ''))
|
||||
import_server_path = bleach.clean(self.get_argument('server_path', ''))
|
||||
import_server_jar = bleach.clean(self.get_argument('server_jar', ''))
|
||||
server = bleach.clean(self.get_argument("server", ""))
|
||||
server_name = bleach.clean(self.get_argument("server_name", ""))
|
||||
min_mem = bleach.clean(self.get_argument("min_memory", ""))
|
||||
max_mem = bleach.clean(self.get_argument("max_memory", ""))
|
||||
port = bleach.clean(self.get_argument("port", ""))
|
||||
import_type = bleach.clean(self.get_argument("create_type", ""))
|
||||
import_server_path = bleach.clean(self.get_argument("server_path", ""))
|
||||
import_server_jar = bleach.clean(self.get_argument("server_jar", ""))
|
||||
server_parts = server.split("|")
|
||||
captured_roles = []
|
||||
for role in user_roles:
|
||||
if bleach.clean(self.get_argument(str(role), '')) == "on":
|
||||
if bleach.clean(self.get_argument(str(role), "")) == "on":
|
||||
captured_roles.append(role)
|
||||
|
||||
if not server_name:
|
||||
self.redirect("/panel/error?error=Server name cannot be empty!")
|
||||
return
|
||||
|
||||
if import_type == 'import_jar':
|
||||
good_path = self.controller.verify_jar_server(import_server_path, import_server_jar)
|
||||
if import_type == "import_jar":
|
||||
good_path = self.controller.verify_jar_server(
|
||||
import_server_path, import_server_jar
|
||||
)
|
||||
|
||||
if not good_path:
|
||||
self.redirect("/panel/error?error=Server path or Server Jar not found!")
|
||||
self.redirect(
|
||||
"/panel/error?error=Server path or Server Jar not found!"
|
||||
)
|
||||
return
|
||||
|
||||
new_server_id = self.controller.import_jar_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"imported a jar server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
elif import_type == 'import_zip':
|
||||
new_server_id = self.controller.import_jar_server(
|
||||
server_name,
|
||||
import_server_path,
|
||||
import_server_jar,
|
||||
min_mem,
|
||||
max_mem,
|
||||
port,
|
||||
)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f'imported a jar server named "{server_name}"',
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
elif import_type == "import_zip":
|
||||
# here import_server_path means the zip path
|
||||
zip_path = bleach.clean(self.get_argument('root_path'))
|
||||
good_path = helper.check_path_exists(zip_path)
|
||||
zip_path = bleach.clean(self.get_argument("root_path"))
|
||||
good_path = Helpers.check_path_exists(zip_path)
|
||||
if not good_path:
|
||||
self.redirect("/panel/error?error=Temp path not found!")
|
||||
return
|
||||
|
||||
new_server_id = self.controller.import_zip_server(server_name, zip_path, import_server_jar, min_mem, max_mem, port)
|
||||
new_server_id = self.controller.import_zip_server(
|
||||
server_name, zip_path, import_server_jar, min_mem, max_mem, port
|
||||
)
|
||||
if new_server_id == "false":
|
||||
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with" +
|
||||
f"sudo chown -R crafty:crafty {import_server_path} And sudo chmod 2775 -R {import_server_path}")
|
||||
self.redirect(
|
||||
f"/panel/error?error=Zip file not accessible! "
|
||||
f"You can fix this permissions issue with "
|
||||
f"sudo chown -R crafty:crafty {import_server_path} "
|
||||
f"And sudo chmod 2775 -R {import_server_path}"
|
||||
)
|
||||
return
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"imported a zip server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
#deletes temp dir
|
||||
file_helper.del_dirs(zip_path)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f'imported a zip server named "{server_name}"',
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
# deletes temp dir
|
||||
FileHelpers.del_dirs(zip_path)
|
||||
else:
|
||||
if len(server_parts) != 2:
|
||||
self.redirect("/panel/error?error=Invalid server data")
|
||||
return
|
||||
server_type, server_version = server_parts
|
||||
# TODO: add server type check here and call the correct server add functions if not a jar
|
||||
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
||||
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"created a {server_version} {str(server_type).capitalize()} server named \"{server_name}\"",
|
||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
# TODO: add server type check here and call the correct server
|
||||
# add functions if not a jar
|
||||
new_server_id = self.controller.create_jar_server(
|
||||
server_type, server_version, server_name, min_mem, max_mem, port
|
||||
)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f"created a {server_version} {str(server_type).capitalize()}"
|
||||
f' server named "{server_name}"',
|
||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
# These lines create a new Role for the Server with full permissions and add the user to it if he's not a superuser
|
||||
# These lines create a new Role for the Server with full permissions
|
||||
# and add the user to it if he's not a superuser
|
||||
if len(captured_roles) == 0:
|
||||
if not superuser:
|
||||
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
||||
role_id = self.controller.roles.add_role(f"Creator of Server with uuid={new_server_uuid}")
|
||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
||||
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
||||
new_server_uuid = self.controller.servers.get_server_data_by_id(
|
||||
new_server_id
|
||||
).get("server_uuid")
|
||||
role_id = self.controller.roles.add_role(
|
||||
f"Creator of Server with uuid={new_server_uuid}"
|
||||
)
|
||||
self.controller.server_perms.add_role_server(
|
||||
new_server_id, role_id, "11111111"
|
||||
)
|
||||
self.controller.users.add_role_to_user(
|
||||
exec_user["user_id"], role_id
|
||||
)
|
||||
self.controller.crafty_perms.add_server_creation(
|
||||
exec_user["user_id"]
|
||||
)
|
||||
|
||||
else:
|
||||
for role in captured_roles:
|
||||
role_id = role
|
||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||
self.controller.server_perms.add_role_server(
|
||||
new_server_id, role_id, "11111111"
|
||||
)
|
||||
|
||||
self.controller.stats.record_stats()
|
||||
self.controller.servers.stats.record_stats()
|
||||
self.redirect("/panel/dashboard")
|
||||
|
||||
if page == "bedrock_step1":
|
||||
@ -284,82 +403,116 @@ class ServerHandler(BaseHandler):
|
||||
user_roles = self.controller.roles.get_all_roles()
|
||||
else:
|
||||
user_roles = self.controller.roles.get_all_roles()
|
||||
server = bleach.clean(self.get_argument('server', ''))
|
||||
server_name = bleach.clean(self.get_argument('server_name', ''))
|
||||
port = bleach.clean(self.get_argument('port', ''))
|
||||
import_type = bleach.clean(self.get_argument('create_type', ''))
|
||||
import_server_path = bleach.clean(self.get_argument('server_path', ''))
|
||||
import_server_exe = bleach.clean(self.get_argument('server_jar', ''))
|
||||
server = bleach.clean(self.get_argument("server", ""))
|
||||
server_name = bleach.clean(self.get_argument("server_name", ""))
|
||||
port = bleach.clean(self.get_argument("port", ""))
|
||||
import_type = bleach.clean(self.get_argument("create_type", ""))
|
||||
import_server_path = bleach.clean(self.get_argument("server_path", ""))
|
||||
import_server_exe = bleach.clean(self.get_argument("server_jar", ""))
|
||||
server_parts = server.split("|")
|
||||
captured_roles = []
|
||||
for role in user_roles:
|
||||
if bleach.clean(self.get_argument(str(role), '')) == "on":
|
||||
if bleach.clean(self.get_argument(str(role), "")) == "on":
|
||||
captured_roles.append(role)
|
||||
|
||||
if not server_name:
|
||||
self.redirect("/panel/error?error=Server name cannot be empty!")
|
||||
return
|
||||
|
||||
if import_type == 'import_jar':
|
||||
good_path = self.controller.verify_jar_server(import_server_path, import_server_exe)
|
||||
if import_type == "import_jar":
|
||||
good_path = self.controller.verify_jar_server(
|
||||
import_server_path, import_server_exe
|
||||
)
|
||||
|
||||
if not good_path:
|
||||
self.redirect("/panel/error?error=Server path or Server Jar not found!")
|
||||
self.redirect(
|
||||
"/panel/error?error=Server path or Server Jar not found!"
|
||||
)
|
||||
return
|
||||
|
||||
new_server_id = self.controller.import_bedrock_server(server_name, import_server_path,import_server_exe, port)
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"imported a jar server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
elif import_type == 'import_zip':
|
||||
new_server_id = self.controller.import_bedrock_server(
|
||||
server_name, import_server_path, import_server_exe, port
|
||||
)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f'imported a jar server named "{server_name}"',
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
elif import_type == "import_zip":
|
||||
# here import_server_path means the zip path
|
||||
zip_path = bleach.clean(self.get_argument('root_path'))
|
||||
good_path = helper.check_path_exists(zip_path)
|
||||
zip_path = bleach.clean(self.get_argument("root_path"))
|
||||
good_path = Helpers.check_path_exists(zip_path)
|
||||
if not good_path:
|
||||
self.redirect("/panel/error?error=Temp path not found!")
|
||||
return
|
||||
|
||||
new_server_id = self.controller.import_bedrock_zip_server(server_name, zip_path, import_server_exe, port)
|
||||
new_server_id = self.controller.import_bedrock_zip_server(
|
||||
server_name, zip_path, import_server_exe, port
|
||||
)
|
||||
if new_server_id == "false":
|
||||
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with" +
|
||||
f"sudo chown -R crafty:crafty {import_server_path} And sudo chmod 2775 -R {import_server_path}")
|
||||
self.redirect(
|
||||
f"/panel/error?error=Zip file not accessible! "
|
||||
f"You can fix this permissions issue with"
|
||||
f"sudo chown -R crafty:crafty {import_server_path} "
|
||||
f"And sudo chmod 2775 -R {import_server_path}"
|
||||
)
|
||||
return
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"imported a zip server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
#deletes temp dir
|
||||
file_helper.del_dirs(zip_path)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f'imported a zip server named "{server_name}"',
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
# deletes temp dir
|
||||
FileHelpers.del_dirs(zip_path)
|
||||
else:
|
||||
if len(server_parts) != 2:
|
||||
self.redirect("/panel/error?error=Invalid server data")
|
||||
return
|
||||
server_type, server_version = server_parts
|
||||
# TODO: add server type check here and call the correct server add functions if not a jar
|
||||
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
||||
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||
f"created a {server_version} {str(server_type).capitalize()} server named \"{server_name}\"",
|
||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||
new_server_id,
|
||||
self.get_remote_ip())
|
||||
# TODO: add server type check here and call the correct server
|
||||
# add functions if not a jar
|
||||
new_server_id = self.controller.create_jar_server(
|
||||
server_type, server_version, server_name, min_mem, max_mem, port
|
||||
)
|
||||
self.controller.management.add_to_audit_log(
|
||||
exec_user["user_id"],
|
||||
f"created a {server_version} {str(server_type).capitalize()} "
|
||||
f'server named "{server_name}"',
|
||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||
new_server_id,
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
|
||||
# These lines create a new Role for the Server with full permissions and add the user to it if he's not a superuser
|
||||
# These lines create a new Role for the Server with full permissions
|
||||
# and add the user to it if he's not a superuser
|
||||
if len(captured_roles) == 0:
|
||||
if not superuser:
|
||||
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
||||
role_id = self.controller.roles.add_role(f"Creator of Server with uuid={new_server_uuid}")
|
||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
||||
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
||||
new_server_uuid = self.controller.servers.get_server_data_by_id(
|
||||
new_server_id
|
||||
).get("server_uuid")
|
||||
role_id = self.controller.roles.add_role(
|
||||
f"Creator of Server with uuid={new_server_uuid}"
|
||||
)
|
||||
self.controller.server_perms.add_role_server(
|
||||
new_server_id, role_id, "11111111"
|
||||
)
|
||||
self.controller.users.add_role_to_user(
|
||||
exec_user["user_id"], role_id
|
||||
)
|
||||
self.controller.crafty_perms.add_server_creation(
|
||||
exec_user["user_id"]
|
||||
)
|
||||
|
||||
else:
|
||||
for role in captured_roles:
|
||||
role_id = role
|
||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||
self.controller.server_perms.add_role_server(
|
||||
new_server_id, role_id, "11111111"
|
||||
)
|
||||
|
||||
self.controller.stats.record_stats()
|
||||
self.controller.servers.stats.record_stats()
|
||||
self.redirect("/panel/dashboard")
|
||||
|
||||
try:
|
||||
@ -369,4 +522,4 @@ class ServerHandler(BaseHandler):
|
||||
translate=self.translator.translate,
|
||||
)
|
||||
except RuntimeError:
|
||||
self.redirect('/panel/dashboard')
|
||||
self.redirect("/panel/dashboard")
|
||||
|
@ -1,17 +1,24 @@
|
||||
from typing import ( Optional )
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
import tornado.web
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
from app.classes.shared.helpers import helper
|
||||
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
|
||||
class CustomStaticHandler(tornado.web.StaticFileHandler):
|
||||
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
|
||||
try:
|
||||
return super().validate_absolute_path(root, absolute_path)
|
||||
except tornado.web.HTTPError as error:
|
||||
if 'HTTP 404: Not Found' in str(error):
|
||||
if "HTTP 404: Not Found" in str(error):
|
||||
self.set_status(404)
|
||||
self.finish({'error':'NOT_FOUND', 'info':'The requested resource was not found on the server'})
|
||||
self.finish(
|
||||
{
|
||||
"error": "NOT_FOUND",
|
||||
"info": "The requested resource was not found on the server",
|
||||
}
|
||||
)
|
||||
|
@ -1,46 +1,53 @@
|
||||
import logging
|
||||
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StatusHandler(BaseHandler):
|
||||
def get(self):
|
||||
page_data = {}
|
||||
page_data['lang'] = helper.get_setting('language')
|
||||
page_data['lang_page'] = helper.getLangPage(helper.get_setting('language'))
|
||||
page_data['servers'] = self.controller.servers.get_all_servers_stats()
|
||||
page_data["lang"] = self.helper.get_setting("language")
|
||||
page_data["lang_page"] = self.helper.get_lang_page(
|
||||
self.helper.get_setting("language")
|
||||
)
|
||||
page_data["servers"] = self.controller.servers.get_all_servers_stats()
|
||||
running = 0
|
||||
for srv in page_data['servers']:
|
||||
if srv['stats']['running']:
|
||||
for srv in page_data["servers"]:
|
||||
if srv["stats"]["running"]:
|
||||
running += 1
|
||||
server_data = srv.get('server_data', False)
|
||||
server_id = server_data.get('server_id', False)
|
||||
srv['raw_ping_result'] = self.controller.servers.get_server_stats_by_id(server_id)
|
||||
if 'icon' not in srv['raw_ping_result']:
|
||||
srv['raw_ping_result']['icon'] = False
|
||||
server_data = srv.get("server_data", False)
|
||||
server_id = server_data.get("server_id", False)
|
||||
srv["raw_ping_result"] = self.controller.servers.get_server_stats_by_id(
|
||||
server_id
|
||||
)
|
||||
if "icon" not in srv["raw_ping_result"]:
|
||||
srv["raw_ping_result"]["icon"] = False
|
||||
|
||||
page_data['running'] = running
|
||||
page_data["running"] = running
|
||||
|
||||
template = 'public/status.html'
|
||||
template = "public/status.html"
|
||||
|
||||
self.render(
|
||||
template,
|
||||
data=page_data,
|
||||
translate=self.translator.translate,
|
||||
)
|
||||
template,
|
||||
data=page_data,
|
||||
translate=self.translator.translate,
|
||||
)
|
||||
|
||||
def post(self):
|
||||
page_data = {}
|
||||
page_data['servers'] = self.controller.servers.get_all_servers_stats()
|
||||
for srv in page_data['servers']:
|
||||
server_data = srv.get('server_data', False)
|
||||
server_id = server_data.get('server_id', False)
|
||||
srv['raw_ping_result'] = self.controller.servers.get_server_stats_by_id(server_id)
|
||||
template = 'public/status.html'
|
||||
page_data["servers"] = self.controller.servers.get_all_servers_stats()
|
||||
for srv in page_data["servers"]:
|
||||
server_data = srv.get("server_data", False)
|
||||
server_id = server_data.get("server_id", False)
|
||||
srv["raw_ping_result"] = self.controller.servers.get_server_stats_by_id(
|
||||
server_id
|
||||
)
|
||||
template = "public/status.html"
|
||||
|
||||
self.render(
|
||||
template,
|
||||
data=page_data,
|
||||
translate=self.translator.translate,
|
||||
)
|
||||
template,
|
||||
data=page_data,
|
||||
translate=self.translator.translate,
|
||||
)
|
||||
|
@ -3,43 +3,55 @@ import sys
|
||||
import json
|
||||
import asyncio
|
||||
import logging
|
||||
import tornado.web
|
||||
import tornado.ioloop
|
||||
import tornado.log
|
||||
import tornado.template
|
||||
import tornado.escape
|
||||
import tornado.locale
|
||||
import tornado.httpserver
|
||||
|
||||
from app.classes.shared.translation import translation
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_controller import Controller
|
||||
from app.classes.web.file_handler import FileHandler
|
||||
from app.classes.web.public_handler import PublicHandler
|
||||
from app.classes.web.panel_handler import PanelHandler
|
||||
from app.classes.web.default_handler import DefaultHandler
|
||||
from app.classes.web.routes.api.api_handlers import api_handlers
|
||||
from app.classes.web.server_handler import ServerHandler
|
||||
from app.classes.web.ajax_handler import AjaxHandler
|
||||
from app.classes.web.api_handler import ServersStats, NodeStats
|
||||
from app.classes.web.api_handler import (
|
||||
ServersStats,
|
||||
NodeStats,
|
||||
ServerBackup,
|
||||
StartServer,
|
||||
StopServer,
|
||||
RestartServer,
|
||||
CreateUser,
|
||||
DeleteUser,
|
||||
ListServers,
|
||||
SendCommand,
|
||||
)
|
||||
from app.classes.web.websocket_handler import SocketHandler
|
||||
from app.classes.web.static_handler import CustomStaticHandler
|
||||
from app.classes.web.upload_handler import UploadHandler
|
||||
from app.classes.web.http_handler import HTTPHandler, HTTPHandlerPage
|
||||
from app.classes.web.status_handler import StatusHandler
|
||||
|
||||
try:
|
||||
import tornado.web
|
||||
import tornado.ioloop
|
||||
import tornado.log
|
||||
import tornado.template
|
||||
import tornado.escape
|
||||
import tornado.locale
|
||||
import tornado.httpserver
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Webserver:
|
||||
|
||||
def __init__(self, controller, tasks_manager):
|
||||
class Webserver:
|
||||
controller: Controller
|
||||
helper: Helpers
|
||||
|
||||
def __init__(self, helper, controller, tasks_manager):
|
||||
self.ioloop = None
|
||||
self.HTTP_Server = None
|
||||
self.HTTPS_Server = None
|
||||
self.http_server = None
|
||||
self.https_server = None
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
self.tasks_manager = tasks_manager
|
||||
self._asyncio_patch()
|
||||
@ -48,12 +60,12 @@ class Webserver:
|
||||
def log_function(handler):
|
||||
|
||||
info = {
|
||||
'Status_Code': handler.get_status(),
|
||||
'Method': handler.request.method,
|
||||
'URL': handler.request.uri,
|
||||
'Remote_IP': handler.request.remote_ip,
|
||||
"Status_Code": handler.get_status(),
|
||||
"Method": handler.request.method,
|
||||
"URL": handler.request.uri,
|
||||
"Remote_IP": handler.request.remote_ip,
|
||||
# pylint: disable=consider-using-f-string
|
||||
'Elapsed_Time': '%.2fms' % (handler.request.request_time() * 1000)
|
||||
"Elapsed_Time": "%.2fms" % (handler.request.request_time() * 1000),
|
||||
}
|
||||
|
||||
tornado.log.access_log.info(json.dumps(info, indent=4))
|
||||
@ -61,39 +73,47 @@ class Webserver:
|
||||
@staticmethod
|
||||
def _asyncio_patch():
|
||||
"""
|
||||
As of Python 3.8 (on Windows), the asyncio default event handler has changed to "proactor",
|
||||
As of Python 3.8 (on Windows),
|
||||
the asyncio default event handler has changed to "proactor",
|
||||
where tornado expects the "selector" handler.
|
||||
|
||||
This function checks if the platform is windows and changes the event handler to suit.
|
||||
This function checks if the platform is windows and
|
||||
changes the event handler to suit.
|
||||
|
||||
(Taken from https://github.com/mkdocs/mkdocs/commit/cf2b136d4257787c0de51eba2d9e30ded5245b31)
|
||||
(Taken from
|
||||
https://github.com/mkdocs/mkdocs/commit/cf2b136d4257787c0de51eba2d9e30ded5245b31)
|
||||
"""
|
||||
logger.debug("Checking if asyncio patch is required")
|
||||
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
|
||||
# pylint: disable=reimported,import-outside-toplevel,redefined-outer-name
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
from asyncio import WindowsSelectorEventLoopPolicy
|
||||
except ImportError:
|
||||
logger.debug("asyncio patch isn't required") # Can't assign a policy which doesn't exist.
|
||||
logger.debug(
|
||||
"asyncio patch isn't required"
|
||||
) # Can't assign a policy which doesn't exist.
|
||||
else:
|
||||
if not isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
||||
if not isinstance(
|
||||
asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy
|
||||
):
|
||||
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||
logger.debug("Applied asyncio patch")
|
||||
|
||||
def run_tornado(self):
|
||||
|
||||
# let's verify we have an SSL cert
|
||||
helper.create_self_signed_cert()
|
||||
self.helper.create_self_signed_cert()
|
||||
|
||||
http_port = helper.get_setting('http_port')
|
||||
https_port = helper.get_setting('https_port')
|
||||
http_port = self.helper.get_setting("http_port")
|
||||
https_port = self.helper.get_setting("https_port")
|
||||
|
||||
debug_errors = helper.get_setting('show_errors')
|
||||
cookie_secret = helper.get_setting('cookie_secret')
|
||||
debug_errors = self.helper.get_setting("show_errors")
|
||||
cookie_secret = self.helper.get_setting("cookie_secret")
|
||||
|
||||
if cookie_secret is False:
|
||||
cookie_secret = helper.random_string_generator(32)
|
||||
cookie_secret = self.helper.random_string_generator(32)
|
||||
|
||||
if not http_port:
|
||||
http_port = 8000
|
||||
@ -102,38 +122,58 @@ class Webserver:
|
||||
https_port = 8443
|
||||
|
||||
cert_objects = {
|
||||
'certfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.cert.pem'),
|
||||
'keyfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.key.pem'),
|
||||
"certfile": os.path.join(
|
||||
self.helper.config_dir, "web", "certs", "commander.cert.pem"
|
||||
),
|
||||
"keyfile": os.path.join(
|
||||
self.helper.config_dir, "web", "certs", "commander.key.pem"
|
||||
),
|
||||
}
|
||||
|
||||
logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
|
||||
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
|
||||
tornado.template.Loader('.')
|
||||
tornado.template.Loader(".")
|
||||
|
||||
# TODO: Remove because we don't and won't use
|
||||
tornado.locale.set_default_locale('en_EN')
|
||||
tornado.locale.set_default_locale("en_EN")
|
||||
|
||||
handler_args = {"controller": self.controller, "tasks_manager": self.tasks_manager, "translator": translation}
|
||||
handler_args = {
|
||||
"helper": self.helper,
|
||||
"controller": self.controller,
|
||||
"tasks_manager": self.tasks_manager,
|
||||
"translator": self.helper.translation,
|
||||
}
|
||||
handlers = [
|
||||
(r'/', DefaultHandler, handler_args),
|
||||
(r'/public/(.*)', PublicHandler, handler_args),
|
||||
(r'/panel/(.*)', PanelHandler, handler_args),
|
||||
(r'/server/(.*)', ServerHandler, handler_args),
|
||||
(r'/ajax/(.*)', AjaxHandler, handler_args),
|
||||
(r'/files/(.*)', FileHandler, handler_args),
|
||||
(r'/api/stats/servers', ServersStats, handler_args),
|
||||
(r'/api/stats/node', NodeStats, handler_args),
|
||||
(r'/ws', SocketHandler, handler_args),
|
||||
(r'/upload', UploadHandler, handler_args),
|
||||
(r'/status', StatusHandler, handler_args)
|
||||
]
|
||||
(r"/", DefaultHandler, handler_args),
|
||||
(r"/public/(.*)", PublicHandler, handler_args),
|
||||
(r"/panel/(.*)", PanelHandler, handler_args),
|
||||
(r"/server/(.*)", ServerHandler, handler_args),
|
||||
(r"/ajax/(.*)", AjaxHandler, handler_args),
|
||||
(r"/files/(.*)", FileHandler, handler_args),
|
||||
(r"/ws", SocketHandler, handler_args),
|
||||
(r"/upload", UploadHandler, handler_args),
|
||||
(r"/status", StatusHandler, handler_args),
|
||||
# API Routes V1
|
||||
(r"/api/v1/stats/servers", ServersStats, handler_args),
|
||||
(r"/api/v1/stats/node", NodeStats, handler_args),
|
||||
(r"/api/v1/server/send_command", SendCommand, handler_args),
|
||||
(r"/api/v1/server/backup", ServerBackup, handler_args),
|
||||
(r"/api/v1/server/start", StartServer, handler_args),
|
||||
(r"/api/v1/server/stop", StopServer, handler_args),
|
||||
(r"/api/v1/server/restart", RestartServer, handler_args),
|
||||
(r"/api/v1/list_servers", ListServers, handler_args),
|
||||
(r"/api/v1/users/create_user", CreateUser, handler_args),
|
||||
(r"/api/v1/users/delete_user", DeleteUser, handler_args),
|
||||
# API Routes V2
|
||||
*api_handlers(handler_args),
|
||||
]
|
||||
|
||||
app = tornado.web.Application(
|
||||
handlers,
|
||||
template_path=os.path.join(helper.webroot, 'templates'),
|
||||
static_path=os.path.join(helper.webroot, 'static'),
|
||||
template_path=os.path.join(self.helper.webroot, "templates"),
|
||||
static_path=os.path.join(self.helper.webroot, "static"),
|
||||
debug=debug_errors,
|
||||
cookie_secret=cookie_secret,
|
||||
xsrf_cookies=True,
|
||||
@ -144,48 +184,56 @@ class Webserver:
|
||||
static_handler_class=CustomStaticHandler,
|
||||
serve_traceback=debug_errors,
|
||||
)
|
||||
HTTPhanders = [(r'/', HTTPHandler, handler_args),
|
||||
(r'/public/(.*)', HTTPHandlerPage, handler_args),
|
||||
(r'/panel/(.*)', HTTPHandlerPage, handler_args),
|
||||
(r'/server/(.*)', HTTPHandlerPage, handler_args),
|
||||
(r'/ajax/(.*)', HTTPHandlerPage, handler_args),
|
||||
(r'/api/stats/servers', HTTPHandlerPage, handler_args),
|
||||
(r'/api/stats/node', HTTPHandlerPage, handler_args),
|
||||
(r'/ws', HTTPHandlerPage, handler_args),
|
||||
(r'/upload', HTTPHandlerPage, handler_args)]
|
||||
HTTPapp = tornado.web.Application(
|
||||
HTTPhanders,
|
||||
template_path=os.path.join(helper.webroot, 'templates'),
|
||||
static_path=os.path.join(helper.webroot, 'static'),
|
||||
http_handers = [
|
||||
(r"/", HTTPHandler, handler_args),
|
||||
(r"/public/(.*)", HTTPHandlerPage, handler_args),
|
||||
(r"/panel/(.*)", HTTPHandlerPage, handler_args),
|
||||
(r"/server/(.*)", HTTPHandlerPage, handler_args),
|
||||
(r"/ajax/(.*)", HTTPHandlerPage, handler_args),
|
||||
(r"/api/stats/servers", HTTPHandlerPage, handler_args),
|
||||
(r"/api/stats/node", HTTPHandlerPage, handler_args),
|
||||
(r"/ws", HTTPHandlerPage, handler_args),
|
||||
(r"/upload", HTTPHandlerPage, handler_args),
|
||||
]
|
||||
http_app = tornado.web.Application(
|
||||
http_handers,
|
||||
template_path=os.path.join(self.helper.webroot, "templates"),
|
||||
static_path=os.path.join(self.helper.webroot, "static"),
|
||||
debug=debug_errors,
|
||||
cookie_secret=cookie_secret,
|
||||
xsrf_cookies=True,
|
||||
autoreload=False,
|
||||
log_function=self.log_function,
|
||||
default_handler_class = HTTPHandler,
|
||||
default_handler_class=HTTPHandler,
|
||||
login_url="/login",
|
||||
serve_traceback=debug_errors,
|
||||
)
|
||||
|
||||
self.HTTP_Server = tornado.httpserver.HTTPServer(HTTPapp)
|
||||
self.HTTP_Server.listen(http_port)
|
||||
self.http_server = tornado.httpserver.HTTPServer(http_app)
|
||||
self.http_server.listen(http_port)
|
||||
|
||||
self.HTTPS_Server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
||||
self.HTTPS_Server.listen(https_port)
|
||||
self.https_server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
||||
self.https_server.listen(https_port)
|
||||
|
||||
logger.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
||||
console.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
||||
logger.info(
|
||||
f"https://{Helpers.get_local_ip()}:{https_port} "
|
||||
f"is up and ready for connections."
|
||||
)
|
||||
Console.info(
|
||||
f"https://{Helpers.get_local_ip()}:{https_port} "
|
||||
f"is up and ready for connections."
|
||||
)
|
||||
|
||||
console.info("Server Init Complete: Listening For Connections:")
|
||||
Console.info("Server Init Complete: Listening For Connections!")
|
||||
|
||||
self.ioloop = tornado.ioloop.IOLoop.current()
|
||||
self.ioloop.start()
|
||||
|
||||
def stop_web_server(self):
|
||||
logger.info("Shutting Down Web Server")
|
||||
console.info("Shutting Down Web Server")
|
||||
Console.info("Shutting Down Web Server")
|
||||
self.ioloop.stop()
|
||||
self.HTTP_Server.stop()
|
||||
self.HTTPS_Server.stop()
|
||||
self.http_server.stop()
|
||||
self.https_server.stop()
|
||||
logger.info("Web Server Stopped")
|
||||
console.info("Web Server Stopped")
|
||||
Console.info("Web Server Stopped")
|
||||
|
@ -1,79 +1,133 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import tornado.web
|
||||
import tornado.options
|
||||
import tornado.httpserver
|
||||
|
||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.models.server_permissions import EnumPermissionsServer
|
||||
from app.classes.shared.console import Console
|
||||
from app.classes.shared.helpers import Helpers
|
||||
from app.classes.shared.main_controller import Controller
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
from app.classes.web.base_handler import BaseHandler
|
||||
|
||||
try:
|
||||
import tornado.web
|
||||
import tornado.options
|
||||
import tornado.httpserver
|
||||
|
||||
except ModuleNotFoundError as ex:
|
||||
helper.auto_installer_fix(ex)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Class & Function Defination
|
||||
MAX_STREAMED_SIZE = 1024 * 1024 * 1024
|
||||
|
||||
@tornado.web.stream_request_body
|
||||
class UploadHandler(BaseHandler):
|
||||
|
||||
# noinspection PyAttributeOutsideInit
|
||||
def initialize(self, controller: Controller=None, tasks_manager=None, translator=None):
|
||||
def initialize(
|
||||
self,
|
||||
helper: Helpers = None,
|
||||
controller: Controller = None,
|
||||
tasks_manager=None,
|
||||
translator=None,
|
||||
):
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
self.tasks_manager = tasks_manager
|
||||
self.translator = translator
|
||||
|
||||
def prepare(self):
|
||||
self.do_upload = True
|
||||
# pylint: disable=unused-variable
|
||||
api_key, token_data, exec_user = self.current_user
|
||||
server_id = self.get_argument('server_id', None)
|
||||
superuser = exec_user['superuser']
|
||||
# Class & Function Defination
|
||||
api_key, _token_data, exec_user = self.current_user
|
||||
server_id = self.get_argument("server_id", None)
|
||||
superuser = exec_user["superuser"]
|
||||
if api_key is not None:
|
||||
superuser = superuser and api_key.superuser
|
||||
user_id = exec_user['user_id']
|
||||
user_id = exec_user["user_id"]
|
||||
stream_size_value = self.helper.get_setting("stream_size_GB")
|
||||
|
||||
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
|
||||
|
||||
self.content_len = int(self.request.headers.get("Content-Length"))
|
||||
if self.content_len > max_streamed_size:
|
||||
logger.error(
|
||||
f"User with ID {user_id} attempted to upload a file that"
|
||||
f" exceeded the max body size."
|
||||
)
|
||||
self.helper.websocket_helper.broadcast_user(
|
||||
user_id,
|
||||
"send_start_error",
|
||||
{
|
||||
"error": self.helper.translation.translate(
|
||||
"error",
|
||||
"fileTooLarge",
|
||||
self.controller.users.get_user_lang_by_id(user_id),
|
||||
),
|
||||
},
|
||||
)
|
||||
return
|
||||
self.do_upload = True
|
||||
|
||||
if superuser:
|
||||
exec_user_server_permissions = self.controller.server_perms.list_defined_permissions()
|
||||
exec_user_server_permissions = (
|
||||
self.controller.server_perms.list_defined_permissions()
|
||||
)
|
||||
elif api_key is not None:
|
||||
exec_user_server_permissions = self.controller.server_perms.get_api_key_permissions_list(api_key, server_id)
|
||||
exec_user_server_permissions = (
|
||||
self.controller.server_perms.get_api_key_permissions_list(
|
||||
api_key, server_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
exec_user_server_permissions = self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id)
|
||||
exec_user_server_permissions = (
|
||||
self.controller.server_perms.get_user_id_permissions_list(
|
||||
exec_user["user_id"], server_id
|
||||
)
|
||||
)
|
||||
|
||||
server_id = self.request.headers.get('X-ServerId', None)
|
||||
server_id = self.request.headers.get("X-ServerId", None)
|
||||
|
||||
if user_id is None:
|
||||
logger.warning('User ID not found in upload handler call')
|
||||
console.warning('User ID not found in upload handler call')
|
||||
logger.warning("User ID not found in upload handler call")
|
||||
Console.warning("User ID not found in upload handler call")
|
||||
self.do_upload = False
|
||||
|
||||
if server_id is None:
|
||||
logger.warning('Server ID not found in upload handler call')
|
||||
console.warning('Server ID not found in upload handler call')
|
||||
logger.warning("Server ID not found in upload handler call")
|
||||
Console.warning("Server ID not found in upload handler call")
|
||||
self.do_upload = False
|
||||
|
||||
if Enum_Permissions_Server.Files not in exec_user_server_permissions:
|
||||
logger.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
||||
console.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
||||
if EnumPermissionsServer.FILES not in exec_user_server_permissions:
|
||||
logger.warning(
|
||||
f"User {user_id} tried to upload a file to "
|
||||
f"{server_id} without permissions!"
|
||||
)
|
||||
Console.warning(
|
||||
f"User {user_id} tried to upload a file to "
|
||||
f"{server_id} without permissions!"
|
||||
)
|
||||
self.do_upload = False
|
||||
|
||||
path = self.request.headers.get('X-Path', None)
|
||||
filename = self.request.headers.get('X-FileName', None)
|
||||
path = self.request.headers.get("X-Path", None)
|
||||
filename = self.request.headers.get("X-FileName", None)
|
||||
full_path = os.path.join(path, filename)
|
||||
|
||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path):
|
||||
print(user_id, server_id, helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path)
|
||||
logger.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!')
|
||||
console.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!')
|
||||
if not Helpers.in_path(
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
full_path,
|
||||
):
|
||||
print(
|
||||
user_id,
|
||||
server_id,
|
||||
Helpers.get_os_understandable_path(
|
||||
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||
),
|
||||
full_path,
|
||||
)
|
||||
logger.warning(
|
||||
f"User {user_id} tried to upload a file to {server_id} "
|
||||
f"but the path is not inside of the server!"
|
||||
)
|
||||
Console.warning(
|
||||
f"User {user_id} tried to upload a file to {server_id} "
|
||||
f"but the path is not inside of the server!"
|
||||
)
|
||||
self.do_upload = False
|
||||
|
||||
if self.do_upload:
|
||||
@ -83,23 +137,23 @@ class UploadHandler(BaseHandler):
|
||||
logger.error(f"Upload failed with error: {e}")
|
||||
self.do_upload = False
|
||||
# If max_body_size is not set, you cannot upload files > 100MB
|
||||
self.request.connection.set_max_body_size(MAX_STREAMED_SIZE)
|
||||
self.request.connection.set_max_body_size(max_streamed_size)
|
||||
|
||||
def post(self):
|
||||
logger.info("Upload completed")
|
||||
files_left = int(self.request.headers.get('X-Files-Left', None))
|
||||
files_left = int(self.request.headers.get("X-Files-Left", None))
|
||||
|
||||
if self.do_upload:
|
||||
time.sleep(5)
|
||||
if files_left == 0:
|
||||
websocket_helper.broadcast('close_upload_box', 'success')
|
||||
self.finish('success') # Nope, I'm sending "success"
|
||||
self.helper.websocket_helper.broadcast("close_upload_box", "success")
|
||||
self.finish("success") # Nope, I'm sending "success"
|
||||
self.f.close()
|
||||
else:
|
||||
time.sleep(5)
|
||||
if files_left == 0:
|
||||
websocket_helper.broadcast('close_upload_box', 'error')
|
||||
self.finish('error')
|
||||
self.helper.websocket_helper.broadcast("close_upload_box", "error")
|
||||
self.finish("error")
|
||||
|
||||
def data_received(self, chunk):
|
||||
if self.do_upload:
|
||||
|
@ -2,19 +2,13 @@ import json
|
||||
import logging
|
||||
import asyncio
|
||||
from urllib.parse import parse_qsl
|
||||
import tornado.websocket
|
||||
|
||||
from app.classes.shared.authentication import authentication
|
||||
from app.classes.shared.helpers import helper
|
||||
from app.classes.web.websocket_helper import websocket_helper
|
||||
|
||||
try:
|
||||
import tornado.websocket
|
||||
|
||||
except ModuleNotFoundError as e:
|
||||
helper.auto_installer_fix(e)
|
||||
from app.classes.shared.helpers import Helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SocketHandler(tornado.websocket.WebSocketHandler):
|
||||
page = None
|
||||
page_query_params = None
|
||||
@ -23,63 +17,81 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
||||
translator = None
|
||||
io_loop = None
|
||||
|
||||
def initialize(self, controller=None, tasks_manager=None, translator=None):
|
||||
def initialize(
|
||||
self, helper=None, controller=None, tasks_manager=None, translator=None
|
||||
):
|
||||
self.helper = helper
|
||||
self.controller = controller
|
||||
self.tasks_manager = tasks_manager
|
||||
self.translator = translator
|
||||
self.io_loop = tornado.ioloop.IOLoop.current()
|
||||
|
||||
def get_remote_ip(self):
|
||||
remote_ip = self.request.headers.get("X-Real-IP") or \
|
||||
self.request.headers.get("X-Forwarded-For") or \
|
||||
self.request.remote_ip
|
||||
remote_ip = (
|
||||
self.request.headers.get("X-Real-IP")
|
||||
or self.request.headers.get("X-Forwarded-For")
|
||||
or self.request.remote_ip
|
||||
)
|
||||
return remote_ip
|
||||
|
||||
def get_user_id(self):
|
||||
_, _, user = authentication.check(self.get_cookie('token'))
|
||||
return user['user_id']
|
||||
_, _, user = self.controller.authentication.check(self.get_cookie("token"))
|
||||
return user["user_id"]
|
||||
|
||||
def check_auth(self):
|
||||
return authentication.check_bool(self.get_cookie('token'))
|
||||
return self.controller.authentication.check_bool(self.get_cookie("token"))
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def open(self):
|
||||
logger.debug('Checking WebSocket authentication')
|
||||
logger.debug("Checking WebSocket authentication")
|
||||
if self.check_auth():
|
||||
self.handle()
|
||||
else:
|
||||
websocket_helper.send_message(self, 'notification', 'Not authenticated for WebSocket connection')
|
||||
self.helper.websocket_helper.send_message(
|
||||
self, "notification", "Not authenticated for WebSocket connection"
|
||||
)
|
||||
self.close()
|
||||
self.controller.management.add_to_audit_log_raw('unknown',
|
||||
0, 0,
|
||||
'Someone tried to connect via WebSocket without proper authentication',
|
||||
self.get_remote_ip())
|
||||
websocket_helper.broadcast('notification', 'Someone tried to connect via WebSocket without proper authentication')
|
||||
logger.warning('Someone tried to connect via WebSocket without proper authentication')
|
||||
self.controller.management.add_to_audit_log_raw(
|
||||
"unknown",
|
||||
0,
|
||||
0,
|
||||
"Someone tried to connect via WebSocket without proper authentication",
|
||||
self.get_remote_ip(),
|
||||
)
|
||||
self.helper.websocket_helper.broadcast(
|
||||
"notification",
|
||||
"Someone tried to connect via WebSocket without proper authentication",
|
||||
)
|
||||
logger.warning(
|
||||
"Someone tried to connect via WebSocket without proper authentication"
|
||||
)
|
||||
|
||||
def handle(self):
|
||||
self.page = self.get_query_argument('page')
|
||||
self.page_query_params = dict(parse_qsl(helper.remove_prefix(
|
||||
self.get_query_argument('page_query_params'),
|
||||
'?'
|
||||
)))
|
||||
websocket_helper.add_client(self)
|
||||
logger.debug('Opened WebSocket connection')
|
||||
self.page = self.get_query_argument("page")
|
||||
self.page_query_params = dict(
|
||||
parse_qsl(
|
||||
Helpers.remove_prefix(self.get_query_argument("page_query_params"), "?")
|
||||
)
|
||||
)
|
||||
self.helper.websocket_helper.add_client(self)
|
||||
logger.debug("Opened WebSocket connection")
|
||||
|
||||
# pylint: disable=arguments-renamed
|
||||
@staticmethod
|
||||
def on_message(raw_message):
|
||||
|
||||
logger.debug(f'Got message from WebSocket connection {raw_message}')
|
||||
logger.debug(f"Got message from WebSocket connection {raw_message}")
|
||||
message = json.loads(raw_message)
|
||||
logger.debug(f"Event Type: {message['event']}, Data: {message['data']}")
|
||||
|
||||
def on_close(self):
|
||||
websocket_helper.remove_client(self)
|
||||
logger.debug('Closed WebSocket connection')
|
||||
self.helper.websocket_helper.remove_client(self)
|
||||
logger.debug("Closed WebSocket connection")
|
||||
|
||||
async def write_message_int(self, message):
|
||||
self.write_message(message)
|
||||
|
||||
def write_message_helper(self, message):
|
||||
asyncio.run_coroutine_threadsafe(self.write_message_int(message), self.io_loop.asyncio_loop)
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.write_message_int(message), self.io_loop.asyncio_loop
|
||||
)
|
||||
|
@ -1,12 +1,14 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from app.classes.shared.console import console
|
||||
from app.classes.shared.console import Console
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebSocketHelper:
|
||||
def __init__(self):
|
||||
def __init__(self, helper):
|
||||
self.helper = helper
|
||||
self.clients = set()
|
||||
|
||||
def add_client(self, client):
|
||||
@ -15,19 +17,26 @@ class WebSocketHelper:
|
||||
def remove_client(self, client):
|
||||
self.clients.remove(client)
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def send_message(self, client, event_type: str, data):
|
||||
def send_message(
|
||||
self, client, event_type: str, data
|
||||
): # pylint: disable=no-self-use
|
||||
if client.check_auth():
|
||||
message = str(json.dumps({'event': event_type, 'data': data}))
|
||||
message = str(json.dumps({"event": event_type, "data": data}))
|
||||
client.write_message_helper(message)
|
||||
|
||||
def broadcast(self, event_type: str, data):
|
||||
logger.debug(f"Sending to {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
||||
logger.debug(
|
||||
f"Sending to {len(self.clients)} clients: "
|
||||
f"{json.dumps({'event': event_type, 'data': data})}"
|
||||
)
|
||||
for client in self.clients:
|
||||
try:
|
||||
self.send_message(client, event_type, data)
|
||||
except Exception as e:
|
||||
logger.exception(f'Error caught while sending WebSocket message to {client.get_remote_ip()} {e}')
|
||||
logger.exception(
|
||||
f"Error caught while sending WebSocket message to "
|
||||
f"{client.get_remote_ip()} {e}"
|
||||
)
|
||||
|
||||
def broadcast_page(self, page: str, event_type: str, data):
|
||||
def filter_fn(client):
|
||||
@ -51,7 +60,9 @@ class WebSocketHelper:
|
||||
|
||||
self.broadcast_with_fn(filter_fn, event_type, data)
|
||||
|
||||
def broadcast_user_page_params(self, page: str, params: dict, user_id: str, event_type: str, data):
|
||||
def broadcast_user_page_params(
|
||||
self, page: str, params: dict, user_id: str, event_type: str, data
|
||||
):
|
||||
def filter_fn(client):
|
||||
if client.get_user_id() != user_id:
|
||||
return False
|
||||
@ -77,18 +88,22 @@ class WebSocketHelper:
|
||||
|
||||
def broadcast_with_fn(self, filter_fn, event_type: str, data):
|
||||
clients = list(filter(filter_fn, self.clients))
|
||||
logger.debug(f"Sending to {len(clients)} out of {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
||||
logger.debug(
|
||||
f"Sending to {len(clients)} out of {len(self.clients)} "
|
||||
f"clients: {json.dumps({'event': event_type, 'data': data})}"
|
||||
)
|
||||
|
||||
for client in clients:
|
||||
try:
|
||||
self.send_message(client, event_type, data)
|
||||
except Exception as e:
|
||||
logger.exception(f'Error catched while sending WebSocket message to {client.get_remote_ip()} {e}')
|
||||
logger.exception(
|
||||
f"Error catched while sending WebSocket message to "
|
||||
f"{client.get_remote_ip()} {e}"
|
||||
)
|
||||
|
||||
def disconnect_all(self):
|
||||
console.info('Disconnecting WebSocket clients')
|
||||
Console.info("Disconnecting WebSocket clients")
|
||||
for client in self.clients:
|
||||
client.close()
|
||||
console.info('Disconnected WebSocket clients')
|
||||
|
||||
websocket_helper = WebSocketHelper()
|
||||
Console.info("Disconnected WebSocket clients")
|
||||
|
@ -1,5 +1,4 @@
|
||||
{
|
||||
"https": true,
|
||||
"http_port": 8000,
|
||||
"https_port": 8443,
|
||||
"language": "en_EN",
|
||||
@ -14,7 +13,15 @@
|
||||
"virtual_terminal_lines": 70,
|
||||
"max_log_lines": 700,
|
||||
"max_audit_entries": 300,
|
||||
"disabled_language_files": ["lol_EN.json", ""],
|
||||
"keywords": ["help", "chunk"],
|
||||
"allow_nsfw_profile_pictures": false
|
||||
"disabled_language_files": [
|
||||
"lol_EN.json",
|
||||
""
|
||||
],
|
||||
"stream_size_GB": 1,
|
||||
"keywords": [
|
||||
"help",
|
||||
"chunk"
|
||||
],
|
||||
"allow_nsfw_profile_pictures": false,
|
||||
"enable_user_self_delete": false
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"major": 4,
|
||||
"minor": 0,
|
||||
"sub": 0,
|
||||
"meta": "alpha.3.5"
|
||||
}
|
||||
"sub": 0,
|
||||
"meta": "beta"
|
||||
}
|
178
app/frontend/static/assets/css/jquery-ui.css
vendored
Normal file
178
app/frontend/static/assets/css/jquery-ui.css
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
/*! jQuery UI - v1.13.1 - 2022-03-14
|
||||
* http://jqueryui.com
|
||||
* Includes: draggable.css, core.css, resizable.css, selectable.css, sortable.css
|
||||
* Copyright jQuery Foundation and other contributors; Licensed MIT */
|
||||
|
||||
.ui-draggable-handle {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
/* Layout helpers
|
||||
----------------------------------*/
|
||||
.ui-helper-hidden {
|
||||
display: none;
|
||||
}
|
||||
.ui-helper-hidden-accessible {
|
||||
border: 0;
|
||||
clip: rect(0 0 0 0);
|
||||
height: 1px;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
}
|
||||
.ui-helper-reset {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
outline: 0;
|
||||
line-height: 1.3;
|
||||
text-decoration: none;
|
||||
font-size: 100%;
|
||||
list-style: none;
|
||||
}
|
||||
.ui-helper-clearfix:before,
|
||||
.ui-helper-clearfix:after {
|
||||
content: "";
|
||||
display: table;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.ui-helper-clearfix:after {
|
||||
clear: both;
|
||||
}
|
||||
.ui-helper-zfix {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
-ms-filter: "alpha(opacity=0)"; /* support: IE8 */
|
||||
}
|
||||
|
||||
.ui-front {
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
|
||||
/* Interaction Cues
|
||||
----------------------------------*/
|
||||
.ui-state-disabled {
|
||||
cursor: default !important;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
|
||||
/* Icons
|
||||
----------------------------------*/
|
||||
.ui-icon {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
margin-top: -.25em;
|
||||
position: relative;
|
||||
text-indent: -99999px;
|
||||
overflow: hidden;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
.ui-widget-icon-block {
|
||||
left: 50%;
|
||||
margin-left: -8px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Misc visuals
|
||||
----------------------------------*/
|
||||
|
||||
/* Overlays */
|
||||
.ui-widget-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable {
|
||||
position: relative;
|
||||
}
|
||||
.ui-resizable-handle {
|
||||
position: absolute;
|
||||
font-size: 0.1px;
|
||||
display: block;
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
.ui-resizable-disabled .ui-resizable-handle,
|
||||
.ui-resizable-autohide .ui-resizable-handle {
|
||||
display: none;
|
||||
}
|
||||
.ui-resizable-n {
|
||||
cursor: n-resize;
|
||||
height: 7px;
|
||||
width: 100%;
|
||||
top: -5px;
|
||||
left: 0;
|
||||
}
|
||||
.ui-resizable-s {
|
||||
cursor: s-resize;
|
||||
height: 7px;
|
||||
width: 100%;
|
||||
bottom: -5px;
|
||||
left: 0;
|
||||
}
|
||||
.ui-resizable-e {
|
||||
cursor: e-resize;
|
||||
width: 7px;
|
||||
right: -5px;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable-w {
|
||||
cursor: w-resize;
|
||||
width: 7px;
|
||||
left: -5px;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable-se {
|
||||
cursor: se-resize;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
right: 1px;
|
||||
bottom: 1px;
|
||||
}
|
||||
.ui-resizable-sw {
|
||||
cursor: sw-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
left: -5px;
|
||||
bottom: -5px;
|
||||
}
|
||||
.ui-resizable-nw {
|
||||
cursor: nw-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
left: -5px;
|
||||
top: -5px;
|
||||
}
|
||||
.ui-resizable-ne {
|
||||
cursor: ne-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
right: -5px;
|
||||
top: -5px;
|
||||
}
|
||||
.ui-selectable {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
.ui-selectable-helper {
|
||||
position: absolute;
|
||||
z-index: 100;
|
||||
border: 1px dotted black;
|
||||
}
|
||||
.ui-sortable-handle {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
173
app/frontend/static/assets/css/jquery-ui.structure.css
vendored
Normal file
173
app/frontend/static/assets/css/jquery-ui.structure.css
vendored
Normal file
@ -0,0 +1,173 @@
|
||||
.ui-draggable-handle {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
/* Layout helpers
|
||||
----------------------------------*/
|
||||
.ui-helper-hidden {
|
||||
display: none;
|
||||
}
|
||||
.ui-helper-hidden-accessible {
|
||||
border: 0;
|
||||
clip: rect(0 0 0 0);
|
||||
height: 1px;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
}
|
||||
.ui-helper-reset {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: 0;
|
||||
outline: 0;
|
||||
line-height: 1.3;
|
||||
text-decoration: none;
|
||||
font-size: 100%;
|
||||
list-style: none;
|
||||
}
|
||||
.ui-helper-clearfix:before,
|
||||
.ui-helper-clearfix:after {
|
||||
content: "";
|
||||
display: table;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.ui-helper-clearfix:after {
|
||||
clear: both;
|
||||
}
|
||||
.ui-helper-zfix {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
top: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
opacity: 0;
|
||||
-ms-filter: "alpha(opacity=0)"; /* support: IE8 */
|
||||
}
|
||||
|
||||
.ui-front {
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
|
||||
/* Interaction Cues
|
||||
----------------------------------*/
|
||||
.ui-state-disabled {
|
||||
cursor: default !important;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
|
||||
/* Icons
|
||||
----------------------------------*/
|
||||
.ui-icon {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
margin-top: -.25em;
|
||||
position: relative;
|
||||
text-indent: -99999px;
|
||||
overflow: hidden;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
|
||||
.ui-widget-icon-block {
|
||||
left: 50%;
|
||||
margin-left: -8px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Misc visuals
|
||||
----------------------------------*/
|
||||
|
||||
/* Overlays */
|
||||
.ui-widget-overlay {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable {
|
||||
position: relative;
|
||||
}
|
||||
.ui-resizable-handle {
|
||||
position: absolute;
|
||||
font-size: 0.1px;
|
||||
display: block;
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
.ui-resizable-disabled .ui-resizable-handle,
|
||||
.ui-resizable-autohide .ui-resizable-handle {
|
||||
display: none;
|
||||
}
|
||||
.ui-resizable-n {
|
||||
cursor: n-resize;
|
||||
height: 7px;
|
||||
width: 100%;
|
||||
top: -5px;
|
||||
left: 0;
|
||||
}
|
||||
.ui-resizable-s {
|
||||
cursor: s-resize;
|
||||
height: 7px;
|
||||
width: 100%;
|
||||
bottom: -5px;
|
||||
left: 0;
|
||||
}
|
||||
.ui-resizable-e {
|
||||
cursor: e-resize;
|
||||
width: 7px;
|
||||
right: -5px;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable-w {
|
||||
cursor: w-resize;
|
||||
width: 7px;
|
||||
left: -5px;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
}
|
||||
.ui-resizable-se {
|
||||
cursor: se-resize;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
right: 1px;
|
||||
bottom: 1px;
|
||||
}
|
||||
.ui-resizable-sw {
|
||||
cursor: sw-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
left: -5px;
|
||||
bottom: -5px;
|
||||
}
|
||||
.ui-resizable-nw {
|
||||
cursor: nw-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
left: -5px;
|
||||
top: -5px;
|
||||
}
|
||||
.ui-resizable-ne {
|
||||
cursor: ne-resize;
|
||||
width: 9px;
|
||||
height: 9px;
|
||||
right: -5px;
|
||||
top: -5px;
|
||||
}
|
||||
.ui-selectable {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
||||
.ui-selectable-helper {
|
||||
position: absolute;
|
||||
z-index: 100;
|
||||
border: 1px dotted black;
|
||||
}
|
||||
.ui-sortable-handle {
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
}
|
@ -1,167 +0,0 @@
|
||||
(function($) {
|
||||
'use strict';
|
||||
$(function() {
|
||||
if ($('#dashoard-line-chart').length) {
|
||||
var lineChartCanvas = $("#dashoard-line-chart").get(0).getContext("2d");
|
||||
var data = {
|
||||
labels: ["2013", "2014", "2014", "2015", "2016", "2017", "2018"],
|
||||
datasets: [{
|
||||
label: 'Profit',
|
||||
data: [1, 3, 7, 1, 3, 5, 3, 2, 6, 8, 8, 8, 5],
|
||||
borderColor: [
|
||||
'rgba(231, 72, 126, 1)'
|
||||
],
|
||||
borderWidth: 2,
|
||||
fill: false
|
||||
}]
|
||||
};
|
||||
var options = {
|
||||
scales: {
|
||||
yAxes: [{
|
||||
display: false
|
||||
}],
|
||||
xAxes: [{
|
||||
display: false
|
||||
}]
|
||||
},
|
||||
legend: {
|
||||
display: false
|
||||
},
|
||||
elements: {
|
||||
point: {
|
||||
radius: 0
|
||||
}
|
||||
},
|
||||
stepsize: 100
|
||||
};
|
||||
var lineChart = new Chart(lineChartCanvas, {
|
||||
type: 'line',
|
||||
data: data,
|
||||
options: options
|
||||
});
|
||||
}
|
||||
if ($("#dashboard-bar-chart").length) {
|
||||
var CurrentChartCanvas = $("#dashboard-bar-chart").get(0).getContext("2d");
|
||||
var CurrentChart = new Chart(CurrentChartCanvas, {
|
||||
type: 'bar',
|
||||
data: {
|
||||
labels: ["1", "3", "6", "9", "12", "15", "18", "21", "24", "27"],
|
||||
datasets: [{
|
||||
label: 'SNOOZED',
|
||||
data: [330, 380, 230, 400, 309, 430, 340, 310, 280, 300],
|
||||
backgroundColor: '#8862e0'
|
||||
},
|
||||
{
|
||||
label: 'COMPLETED',
|
||||
data: [375, 440, 284, 450, 386, 480, 400, 365, 365, 435],
|
||||
backgroundColor: '#49bbce'
|
||||
},
|
||||
{
|
||||
label: 'OVERDUE',
|
||||
data: [425, 480, 324, 490, 426, 520, 440, 405, 425, 475],
|
||||
backgroundColor: '#e7487e'
|
||||
}
|
||||
]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: true,
|
||||
layout: {
|
||||
padding: {
|
||||
left: 0,
|
||||
right: 0,
|
||||
top: 20,
|
||||
bottom: 0
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
yAxes: [{
|
||||
display: false,
|
||||
gridLines: {
|
||||
display: false
|
||||
}
|
||||
}],
|
||||
xAxes: [{
|
||||
stacked: true,
|
||||
ticks: {
|
||||
beginAtZero: true,
|
||||
fontColor: "#bdbcbe"
|
||||
},
|
||||
gridLines: {
|
||||
color: "rgba(0, 0, 0, 0)",
|
||||
display: false
|
||||
},
|
||||
barPercentage: 0.2
|
||||
}]
|
||||
},
|
||||
legend: {
|
||||
display: false
|
||||
},
|
||||
elements: {
|
||||
point: {
|
||||
radius: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if ($("#dashboardTrendingProgress").length) {
|
||||
var bar = new ProgressBar.Circle(dashboardTrendingProgress, {
|
||||
strokeWidth: 15,
|
||||
trailWidth: 15,
|
||||
easing: 'easeInOut',
|
||||
duration: 1400,
|
||||
from: {
|
||||
color: '#f2f2f4',
|
||||
width: 10
|
||||
},
|
||||
to: {
|
||||
color: '#00df59',
|
||||
width: 15
|
||||
},
|
||||
step: function(state, circle) {
|
||||
circle.path.setAttribute('stroke', state.color);
|
||||
circle.path.setAttribute('stroke-width', state.width);
|
||||
}
|
||||
});
|
||||
bar.animate(.84);
|
||||
}
|
||||
if ($("#dashboardTrendingBars").length) {
|
||||
var CurrentChartCanvas = $("#dashboardTrendingBars").get(0).getContext("2d");
|
||||
var CurrentChart = new Chart(CurrentChartCanvas, {
|
||||
type: 'bar',
|
||||
data: {
|
||||
labels: ["1", "3", "6", "9", "12", "15"],
|
||||
datasets: [{
|
||||
data: [100, 130, 180, 170, 130, 170],
|
||||
backgroundColor: '#8862e0'
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: true,
|
||||
layout: {
|
||||
padding: {
|
||||
left: 0,
|
||||
right: 0,
|
||||
top: 0,
|
||||
bottom: 0
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
yAxes: [{
|
||||
display: false
|
||||
}],
|
||||
xAxes: [{
|
||||
display: false,
|
||||
barPercentage: 0.8
|
||||
}]
|
||||
},
|
||||
legend: {
|
||||
display: false
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
})(jQuery);
|
File diff suppressed because it is too large
Load Diff
9447
app/frontend/static/assets/js/jquery-ui.js
vendored
Normal file
9447
app/frontend/static/assets/js/jquery-ui.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,238 +1,232 @@
|
||||
var ChartColor = ["#5D62B4", "#54C3BE", "#EF726F", "#F9C446", "rgb(93.0, 98.0, 180.0)", "#21B7EC", "#04BCCC"];
|
||||
var primaryColor = getComputedStyle(document.body).getPropertyValue('--primary');
|
||||
var secondaryColor = getComputedStyle(document.body).getPropertyValue('--secondary');
|
||||
var successColor = getComputedStyle(document.body).getPropertyValue('--success');
|
||||
var warningColor = getComputedStyle(document.body).getPropertyValue('--warning');
|
||||
var dangerColor = getComputedStyle(document.body).getPropertyValue('--danger');
|
||||
var infoColor = getComputedStyle(document.body).getPropertyValue('--info');
|
||||
var darkColor = getComputedStyle(document.body).getPropertyValue('--dark');
|
||||
var lightColor = getComputedStyle(document.body).getPropertyValue('--light');
|
||||
if ($('body').hasClass("dark-theme")) {
|
||||
var chartFontcolor = '#b9c0d3';
|
||||
var chartGridLineColor = '#383e5d';
|
||||
|
||||
var ChartColor = [
|
||||
"#5D62B4",
|
||||
"#54C3BE",
|
||||
"#EF726F",
|
||||
"#F9C446",
|
||||
"rgb(93.0, 98.0, 180.0)",
|
||||
"#21B7EC",
|
||||
"#04BCCC",
|
||||
];
|
||||
var primaryColor = getComputedStyle(document.body).getPropertyValue(
|
||||
"--primary"
|
||||
);
|
||||
var secondaryColor = getComputedStyle(document.body).getPropertyValue(
|
||||
"--secondary"
|
||||
);
|
||||
var successColor = getComputedStyle(document.body).getPropertyValue(
|
||||
"--success"
|
||||
);
|
||||
var warningColor = getComputedStyle(document.body).getPropertyValue(
|
||||
"--warning"
|
||||
);
|
||||
var dangerColor = getComputedStyle(document.body).getPropertyValue("--danger");
|
||||
var infoColor = getComputedStyle(document.body).getPropertyValue("--info");
|
||||
var darkColor = getComputedStyle(document.body).getPropertyValue("--dark");
|
||||
var lightColor = getComputedStyle(document.body).getPropertyValue("--light");
|
||||
if ($("body").hasClass("dark-theme")) {
|
||||
var chartFontcolor = "#b9c0d3";
|
||||
var chartGridLineColor = "#383e5d";
|
||||
} else {
|
||||
var chartFontcolor = '#6c757d';
|
||||
var chartGridLineColor = 'rgba(0,0,0,0.08)';
|
||||
var chartFontcolor = "#6c757d";
|
||||
var chartGridLineColor = "rgba(0,0,0,0.08)";
|
||||
}
|
||||
if ($('canvas').length) {
|
||||
Chart.defaults.global.tooltips.enabled = false;
|
||||
Chart.defaults.global.defaultFontColor = '#354d66';
|
||||
Chart.defaults.global.defaultFontFamily = '"Poppins", sans-serif';
|
||||
Chart.defaults.global.tooltips.custom = function (tooltipModel) {
|
||||
// Tooltip Element
|
||||
var tooltipEl = document.getElementById('chartjs-tooltip');
|
||||
if ($("canvas").length) {
|
||||
Chart.defaults.global.tooltips.enabled = false;
|
||||
Chart.defaults.global.defaultFontColor = "#354d66";
|
||||
Chart.defaults.global.defaultFontFamily = '"Poppins", sans-serif';
|
||||
Chart.defaults.global.tooltips.custom = function (tooltipModel) {
|
||||
// Tooltip Element
|
||||
var tooltipEl = document.getElementById("chartjs-tooltip");
|
||||
|
||||
// Create element on first render
|
||||
if (!tooltipEl) {
|
||||
tooltipEl = document.createElement('div');
|
||||
tooltipEl.id = 'chartjs-tooltip';
|
||||
tooltipEl.innerHTML = "<table></table>";
|
||||
document.body.appendChild(tooltipEl);
|
||||
}
|
||||
// Create element on first render
|
||||
if (!tooltipEl) {
|
||||
tooltipEl = document.createElement("div");
|
||||
tooltipEl.id = "chartjs-tooltip";
|
||||
tooltipEl.innerHTML = "<table></table>";
|
||||
document.body.appendChild(tooltipEl);
|
||||
}
|
||||
|
||||
// Hide if no tooltip
|
||||
if (tooltipModel.opacity === 0) {
|
||||
tooltipEl.style.opacity = 0;
|
||||
return;
|
||||
}
|
||||
// Hide if no tooltip
|
||||
if (tooltipModel.opacity === 0) {
|
||||
tooltipEl.style.opacity = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Set caret Position
|
||||
tooltipEl.classList.remove('above', 'below', 'no-transform');
|
||||
if (tooltipModel.yAlign) {
|
||||
tooltipEl.classList.add(tooltipModel.yAlign);
|
||||
} else {
|
||||
tooltipEl.classList.add('no-transform');
|
||||
}
|
||||
// Set caret Position
|
||||
tooltipEl.classList.remove("above", "below", "no-transform");
|
||||
if (tooltipModel.yAlign) {
|
||||
tooltipEl.classList.add(tooltipModel.yAlign);
|
||||
} else {
|
||||
tooltipEl.classList.add("no-transform");
|
||||
}
|
||||
|
||||
function getBody(bodyItem) {
|
||||
return bodyItem.lines;
|
||||
}
|
||||
function getBody(bodyItem) {
|
||||
return bodyItem.lines;
|
||||
}
|
||||
|
||||
// Set Text
|
||||
if (tooltipModel.body) {
|
||||
var titleLines = tooltipModel.title || [];
|
||||
var bodyLines = tooltipModel.body.map(getBody);
|
||||
// Set Text
|
||||
if (tooltipModel.body) {
|
||||
var titleLines = tooltipModel.title || [];
|
||||
var bodyLines = tooltipModel.body.map(getBody);
|
||||
|
||||
var innerHtml = '<thead>';
|
||||
var innerHtml = "<thead>";
|
||||
|
||||
titleLines.forEach(function (title) {
|
||||
innerHtml += '<tr><th>' + title + '</th></tr>';
|
||||
});
|
||||
innerHtml += '</thead><tbody>';
|
||||
titleLines.forEach(function (title) {
|
||||
innerHtml += "<tr><th>" + title + "</th></tr>";
|
||||
});
|
||||
innerHtml += "</thead><tbody>";
|
||||
|
||||
bodyLines.forEach(function (body, i) {
|
||||
var colors = tooltipModel.labelColors[i];
|
||||
var style = 'background:' + colors.borderColor;
|
||||
style += '; border-color:' + colors.borderColor;
|
||||
style += '; border-width: 2px';
|
||||
var span = '<span style="' + style + '"></span>';
|
||||
innerHtml += '<tr><td>' + span + body + '</td></tr>';
|
||||
});
|
||||
innerHtml += '</tbody>';
|
||||
bodyLines.forEach(function (body, i) {
|
||||
var colors = tooltipModel.labelColors[i];
|
||||
var style = "background:" + colors.borderColor;
|
||||
style += "; border-color:" + colors.borderColor;
|
||||
style += "; border-width: 2px";
|
||||
var span = '<span style="' + style + '"></span>';
|
||||
innerHtml += "<tr><td>" + span + body + "</td></tr>";
|
||||
});
|
||||
innerHtml += "</tbody>";
|
||||
|
||||
var tableRoot = tooltipEl.querySelector('table');
|
||||
tableRoot.innerHTML = innerHtml;
|
||||
}
|
||||
var tableRoot = tooltipEl.querySelector("table");
|
||||
tableRoot.innerHTML = innerHtml;
|
||||
}
|
||||
|
||||
// `this` will be the overall tooltip
|
||||
var position = this._chart.canvas.getBoundingClientRect();
|
||||
// `this` will be the overall tooltip
|
||||
var position = this._chart.canvas.getBoundingClientRect();
|
||||
|
||||
// Display, position, and set styles for font
|
||||
tooltipEl.style.opacity = 1;
|
||||
tooltipEl.style.position = 'absolute';
|
||||
tooltipEl.style.left = position.left + window.pageXOffset + tooltipModel.caretX + 'px';
|
||||
tooltipEl.style.top = position.top + window.pageYOffset + tooltipModel.caretY + 'px';
|
||||
tooltipEl.style.fontFamily = tooltipModel._bodyFontFamily;
|
||||
tooltipEl.style.fontSize = tooltipModel.bodyFontSize + 'px';
|
||||
tooltipEl.style.fontStyle = tooltipModel._bodyFontStyle;
|
||||
tooltipEl.style.padding = tooltipModel.yPadding + 'px ' + tooltipModel.xPadding + 'px';
|
||||
tooltipEl.style.pointerEvents = 'none';
|
||||
}
|
||||
Chart.defaults.global.legend.labels.fontStyle = "italic";
|
||||
Chart.defaults.global.tooltips.intersect = false;
|
||||
// Display, position, and set styles for font
|
||||
tooltipEl.style.opacity = 1;
|
||||
tooltipEl.style.position = "absolute";
|
||||
tooltipEl.style.left =
|
||||
position.left + window.pageXOffset + tooltipModel.caretX + "px";
|
||||
tooltipEl.style.top =
|
||||
position.top + window.pageYOffset + tooltipModel.caretY + "px";
|
||||
tooltipEl.style.fontFamily = tooltipModel._bodyFontFamily;
|
||||
tooltipEl.style.fontSize = tooltipModel.bodyFontSize + "px";
|
||||
tooltipEl.style.fontStyle = tooltipModel._bodyFontStyle;
|
||||
tooltipEl.style.padding =
|
||||
tooltipModel.yPadding + "px " + tooltipModel.xPadding + "px";
|
||||
tooltipEl.style.pointerEvents = "none";
|
||||
};
|
||||
Chart.defaults.global.legend.labels.fontStyle = "italic";
|
||||
Chart.defaults.global.tooltips.intersect = false;
|
||||
}
|
||||
|
||||
(function ($) {
|
||||
'use strict';
|
||||
$(function () {
|
||||
var body = $('body');
|
||||
var contentWrapper = $('.content-wrapper');
|
||||
var scroller = $('.container-scroller');
|
||||
var footer = $('.footer');
|
||||
var sidebar = $('#sidebar');
|
||||
"use strict";
|
||||
$(function () {
|
||||
var body = $("body");
|
||||
var contentWrapper = $(".content-wrapper");
|
||||
var scroller = $(".container-scroller");
|
||||
var footer = $(".footer");
|
||||
var sidebar = $("#sidebar");
|
||||
|
||||
//Add active class to nav-link based on url dynamically
|
||||
//Active class can be hard coded directly in html file also as required
|
||||
if (!$('#sidebar').hasClass("dynamic-active-class-disabled")) {
|
||||
var current = location.pathname.split("/").slice(-1)[0].replace(/^\/|\/$/g, '');
|
||||
$('#sidebar >.nav > li:not(.not-navigation-link) a').each(function () {
|
||||
var $this = $(this);
|
||||
if (current === "") {
|
||||
//for root url
|
||||
if ($this.attr('href').indexOf("index.html") !== -1) {
|
||||
$(this).parents('.nav-item').last().addClass('active');
|
||||
if ($(this).parents('.sub-menu').length) {
|
||||
$(this).addClass('active');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//for other url
|
||||
if ($this.attr('href').indexOf(current) !== -1) {
|
||||
$(this).parents('.nav-item').last().addClass('active');
|
||||
if ($(this).parents('.sub-menu').length) {
|
||||
$(this).addClass('active');
|
||||
}
|
||||
if (current !== "index.html") {
|
||||
$(this).parents('.nav-item').last().find(".nav-link").attr("aria-expanded", "true");
|
||||
if ($(this).parents('.sub-menu').length) {
|
||||
$(this).closest('.collapse').addClass('show');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
//Add active class to nav-link based on url dynamically
|
||||
//Active class can be hard coded directly in html file also as required
|
||||
if (!sidebar.hasClass("dynamic-active-class-disabled")) {
|
||||
var current = location.pathname
|
||||
.split("/")
|
||||
.slice(-1)[0]
|
||||
.replace(/^\/|\/$/g, "");
|
||||
$("#sidebar >.nav > li:not(.not-navigation-link) a").each(
|
||||
function () {
|
||||
var $this = $(this);
|
||||
if (current === "") {
|
||||
//for root url
|
||||
if ($this.attr("href").indexOf("index.html") !== -1) {
|
||||
$(this)
|
||||
.parents(".nav-item")
|
||||
.last()
|
||||
.addClass("active");
|
||||
if ($(this).parents(".sub-menu").length) {
|
||||
$(this).addClass("active");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//for other url
|
||||
if ($this.attr("href").indexOf(current) !== -1) {
|
||||
$(this)
|
||||
.parents(".nav-item")
|
||||
.last()
|
||||
.addClass("active");
|
||||
if ($(this).parents(".sub-menu").length) {
|
||||
$(this).addClass("active");
|
||||
}
|
||||
if (current !== "index.html") {
|
||||
$(this)
|
||||
.parents(".nav-item")
|
||||
.last()
|
||||
.find(".nav-link")
|
||||
.attr("aria-expanded", "true");
|
||||
if ($(this).parents(".sub-menu").length) {
|
||||
$(this)
|
||||
.closest(".collapse")
|
||||
.addClass("show");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Themeswitch function
|
||||
function themeSwitch(url) {
|
||||
var currentURL = window.location.href;
|
||||
var res = currentURL.split("/");
|
||||
var abs_url = currentURL.replace(/demo_.\d*/, url);
|
||||
window.location.href = abs_url;
|
||||
}
|
||||
$("#theme-light-switch").on("click", function (e) {
|
||||
e.preventDefault();
|
||||
themeSwitch('demo_1');
|
||||
});
|
||||
$("#theme-dark-switch").on("click", function (e) {
|
||||
e.preventDefault();
|
||||
themeSwitch('demo_3');
|
||||
});
|
||||
//Close other submenu in sidebar on opening any
|
||||
$("#sidebar > .nav > .nav-item > a[data-toggle='collapse']").on(
|
||||
"click",
|
||||
function () {
|
||||
$("#sidebar > .nav > .nav-item")
|
||||
.find(".collapse.show")
|
||||
.collapse("hide");
|
||||
}
|
||||
);
|
||||
|
||||
$('[data-toggle="minimize"]').on("click", function () {
|
||||
if (
|
||||
body.hasClass("sidebar-toggle-display") ||
|
||||
body.hasClass("sidebar-absolute")
|
||||
) {
|
||||
body.toggleClass("sidebar-hidden");
|
||||
} else {
|
||||
body.toggleClass("sidebar-icon-only");
|
||||
const vw = Math.max(
|
||||
document.documentElement.clientWidth || 0,
|
||||
window.innerWidth || 0
|
||||
);
|
||||
if (vw >= 1200) {
|
||||
localStorage.setItem(
|
||||
"crafty-sidebar-expanded",
|
||||
!body.hasClass("sidebar-icon-only")
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
$(".email-wrapper .mail-list-container .mail-list").on("click", function () {
|
||||
$(".email-wrapper .mail-list-container").addClass("d-none");
|
||||
$(".email-wrapper .mail-view").addClass("d-block");
|
||||
});
|
||||
$(".email-wrapper .mail-back-button").on("click", function () {
|
||||
$(".email-wrapper .mail-list-container").removeClass("d-none");
|
||||
$(".email-wrapper .mail-view").removeClass("d-block");
|
||||
});
|
||||
$(".aside-toggler").on("click", function () {
|
||||
$(".mail-sidebar,.chat-list-wrapper").toggleClass("menu-open");
|
||||
});
|
||||
$("#color-setting").on("click", function () {
|
||||
$("#color-settings").addClass("open");
|
||||
});
|
||||
$("#layout-toggler").on("click", function () {
|
||||
$("#theme-settings").addClass("open");
|
||||
});
|
||||
$("#chat-toggler").on("click", function () {
|
||||
$("#right-sidebar").addClass("open");
|
||||
});
|
||||
//checkbox and radios
|
||||
$(".form-check label,.form-radio label").append(
|
||||
'<i class="input-helper"></i>'
|
||||
);
|
||||
});
|
||||
|
||||
//Close other submenu in sidebar on opening any
|
||||
$("#sidebar > .nav > .nav-item > a[data-toggle='collapse']").on("click", function () {
|
||||
$("#sidebar > .nav > .nav-item").find('.collapse.show').collapse('hide');
|
||||
});
|
||||
$('[data-toggle="tooltip"]').tooltip();
|
||||
|
||||
|
||||
//Change sidebar and content-wrapper height
|
||||
applyStyles();
|
||||
|
||||
function applyStyles() {
|
||||
//Applying perfect scrollbar
|
||||
if (!body.hasClass("rtl")) {
|
||||
if ($('.settings-panel .tab-content .tab-pane.scroll-wrapper').length) {
|
||||
const settingsPanelScroll = new PerfectScrollbar('.settings-panel .tab-content .tab-pane.scroll-wrapper');
|
||||
}
|
||||
if ($('.chats').length) {
|
||||
const chatsScroll = new PerfectScrollbar('.chats');
|
||||
}
|
||||
if ($('.scroll-container').length) {
|
||||
const ScrollContainer = new PerfectScrollbar('.scroll-container');
|
||||
}
|
||||
if (body.hasClass("sidebar-fixed")) {
|
||||
var fixedSidebarScroll = new PerfectScrollbar('#sidebar .nav');
|
||||
}
|
||||
if ($('.ps-enabled').length) {
|
||||
const psEnabled = new PerfectScrollbar('.ps-enabled');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$('[data-toggle="minimize"]').on("click", function () {
|
||||
if ((body.hasClass('sidebar-toggle-display')) || (body.hasClass('sidebar-absolute'))) {
|
||||
body.toggleClass('sidebar-hidden');
|
||||
} else {
|
||||
body.toggleClass('sidebar-icon-only');
|
||||
const vw = Math.max(document.documentElement.clientWidth || 0, window.innerWidth || 0);
|
||||
if (vw >= 1200) {
|
||||
localStorage.setItem('crafty-sidebar-expanded', !body.hasClass('sidebar-icon-only'));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//checkbox and radios
|
||||
$(".form-check label,.form-radio label").append('<i class="input-helper"></i>');
|
||||
});
|
||||
|
||||
$('[data-toggle="tooltip"]').tooltip();
|
||||
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").not(".brand-logo").attr('toggle-status', 'closed');
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").on('click', function () {
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").removeClass("active");
|
||||
$(this).addClass("active");
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").find(".submenu").removeClass("open");
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").not(this).attr('toggle-status', 'closed');
|
||||
var toggleStatus = $(this).attr('toggle-status');
|
||||
if (toggleStatus == 'closed') {
|
||||
$(this).find(".submenu").addClass("open");
|
||||
$(this).attr('toggle-status', 'open');
|
||||
} else {
|
||||
$(this).find(".submenu").removeClass("open");
|
||||
$(this).not(".brand-logo").attr('toggle-status', 'closed');
|
||||
}
|
||||
});
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item")
|
||||
.not(".brand-logo")
|
||||
.attr("toggle-status", "closed");
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").on("click", function () {
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item").removeClass("active");
|
||||
$(this).addClass("active");
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item")
|
||||
.find(".submenu")
|
||||
.removeClass("open");
|
||||
$(".sidebar .sidebar-inner > .nav > .nav-item")
|
||||
.not(this)
|
||||
.attr("toggle-status", "closed");
|
||||
var toggleStatus = $(this).attr("toggle-status");
|
||||
if (toggleStatus == "closed") {
|
||||
$(this).find(".submenu").addClass("open");
|
||||
$(this).attr("toggle-status", "open");
|
||||
} else {
|
||||
$(this).find(".submenu").removeClass("open");
|
||||
$(this).not(".brand-logo").attr("toggle-status", "closed");
|
||||
}
|
||||
});
|
||||
})(jQuery);
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,388 +0,0 @@
|
||||
/*
|
||||
* The MIT License
|
||||
* Copyright (c) 2012 Matias Meno <m@tias.me>
|
||||
*/
|
||||
@-webkit-keyframes passing-through {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30%, 70% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); }
|
||||
100% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(-40px);
|
||||
-moz-transform: translateY(-40px);
|
||||
-ms-transform: translateY(-40px);
|
||||
-o-transform: translateY(-40px);
|
||||
transform: translateY(-40px); } }
|
||||
@-moz-keyframes passing-through {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30%, 70% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); }
|
||||
100% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(-40px);
|
||||
-moz-transform: translateY(-40px);
|
||||
-ms-transform: translateY(-40px);
|
||||
-o-transform: translateY(-40px);
|
||||
transform: translateY(-40px); } }
|
||||
@keyframes passing-through {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30%, 70% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); }
|
||||
100% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(-40px);
|
||||
-moz-transform: translateY(-40px);
|
||||
-ms-transform: translateY(-40px);
|
||||
-o-transform: translateY(-40px);
|
||||
transform: translateY(-40px); } }
|
||||
@-webkit-keyframes slide-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); } }
|
||||
@-moz-keyframes slide-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); } }
|
||||
@keyframes slide-in {
|
||||
0% {
|
||||
opacity: 0;
|
||||
-webkit-transform: translateY(40px);
|
||||
-moz-transform: translateY(40px);
|
||||
-ms-transform: translateY(40px);
|
||||
-o-transform: translateY(40px);
|
||||
transform: translateY(40px); }
|
||||
30% {
|
||||
opacity: 1;
|
||||
-webkit-transform: translateY(0px);
|
||||
-moz-transform: translateY(0px);
|
||||
-ms-transform: translateY(0px);
|
||||
-o-transform: translateY(0px);
|
||||
transform: translateY(0px); } }
|
||||
@-webkit-keyframes pulse {
|
||||
0% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); }
|
||||
10% {
|
||||
-webkit-transform: scale(1.1);
|
||||
-moz-transform: scale(1.1);
|
||||
-ms-transform: scale(1.1);
|
||||
-o-transform: scale(1.1);
|
||||
transform: scale(1.1); }
|
||||
20% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); } }
|
||||
@-moz-keyframes pulse {
|
||||
0% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); }
|
||||
10% {
|
||||
-webkit-transform: scale(1.1);
|
||||
-moz-transform: scale(1.1);
|
||||
-ms-transform: scale(1.1);
|
||||
-o-transform: scale(1.1);
|
||||
transform: scale(1.1); }
|
||||
20% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); } }
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); }
|
||||
10% {
|
||||
-webkit-transform: scale(1.1);
|
||||
-moz-transform: scale(1.1);
|
||||
-ms-transform: scale(1.1);
|
||||
-o-transform: scale(1.1);
|
||||
transform: scale(1.1); }
|
||||
20% {
|
||||
-webkit-transform: scale(1);
|
||||
-moz-transform: scale(1);
|
||||
-ms-transform: scale(1);
|
||||
-o-transform: scale(1);
|
||||
transform: scale(1); } }
|
||||
.dropzone, .dropzone * {
|
||||
box-sizing: border-box; }
|
||||
|
||||
.dropzone {
|
||||
min-height: 150px;
|
||||
border: 2px solid rgba(0, 0, 0, 0.3);
|
||||
background: white;
|
||||
padding: 20px 20px; }
|
||||
.dropzone.dz-clickable {
|
||||
cursor: pointer; }
|
||||
.dropzone.dz-clickable * {
|
||||
cursor: default; }
|
||||
.dropzone.dz-clickable .dz-message, .dropzone.dz-clickable .dz-message * {
|
||||
cursor: pointer; }
|
||||
.dropzone.dz-started .dz-message {
|
||||
display: none; }
|
||||
.dropzone.dz-drag-hover {
|
||||
border-style: solid; }
|
||||
.dropzone.dz-drag-hover .dz-message {
|
||||
opacity: 0.5; }
|
||||
.dropzone .dz-message {
|
||||
text-align: center;
|
||||
margin: 2em 0; }
|
||||
.dropzone .dz-preview {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
margin: 16px;
|
||||
min-height: 100px; }
|
||||
.dropzone .dz-preview:hover {
|
||||
z-index: 1000; }
|
||||
.dropzone .dz-preview:hover .dz-details {
|
||||
opacity: 1; }
|
||||
.dropzone .dz-preview.dz-file-preview .dz-image {
|
||||
border-radius: 20px;
|
||||
background: #999;
|
||||
background: linear-gradient(to bottom, #eee, #ddd); }
|
||||
.dropzone .dz-preview.dz-file-preview .dz-details {
|
||||
opacity: 1; }
|
||||
.dropzone .dz-preview.dz-image-preview {
|
||||
background: white; }
|
||||
.dropzone .dz-preview.dz-image-preview .dz-details {
|
||||
-webkit-transition: opacity 0.2s linear;
|
||||
-moz-transition: opacity 0.2s linear;
|
||||
-ms-transition: opacity 0.2s linear;
|
||||
-o-transition: opacity 0.2s linear;
|
||||
transition: opacity 0.2s linear; }
|
||||
.dropzone .dz-preview .dz-remove {
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
display: block;
|
||||
cursor: pointer;
|
||||
border: none; }
|
||||
.dropzone .dz-preview .dz-remove:hover {
|
||||
text-decoration: underline; }
|
||||
.dropzone .dz-preview:hover .dz-details {
|
||||
opacity: 1; }
|
||||
.dropzone .dz-preview .dz-details {
|
||||
z-index: 20;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
opacity: 0;
|
||||
font-size: 13px;
|
||||
min-width: 100%;
|
||||
max-width: 100%;
|
||||
padding: 2em 1em;
|
||||
text-align: center;
|
||||
color: rgba(0, 0, 0, 0.9);
|
||||
line-height: 150%; }
|
||||
.dropzone .dz-preview .dz-details .dz-size {
|
||||
margin-bottom: 1em;
|
||||
font-size: 16px; }
|
||||
.dropzone .dz-preview .dz-details .dz-filename {
|
||||
white-space: nowrap; }
|
||||
.dropzone .dz-preview .dz-details .dz-filename:hover span {
|
||||
border: 1px solid rgba(200, 200, 200, 0.8);
|
||||
background-color: rgba(255, 255, 255, 0.8); }
|
||||
.dropzone .dz-preview .dz-details .dz-filename:not(:hover) {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis; }
|
||||
.dropzone .dz-preview .dz-details .dz-filename:not(:hover) span {
|
||||
border: 1px solid transparent; }
|
||||
.dropzone .dz-preview .dz-details .dz-filename span, .dropzone .dz-preview .dz-details .dz-size span {
|
||||
background-color: rgba(255, 255, 255, 0.4);
|
||||
padding: 0 0.4em;
|
||||
border-radius: 3px; }
|
||||
.dropzone .dz-preview:hover .dz-image img {
|
||||
-webkit-transform: scale(1.05, 1.05);
|
||||
-moz-transform: scale(1.05, 1.05);
|
||||
-ms-transform: scale(1.05, 1.05);
|
||||
-o-transform: scale(1.05, 1.05);
|
||||
transform: scale(1.05, 1.05);
|
||||
-webkit-filter: blur(8px);
|
||||
filter: blur(8px); }
|
||||
.dropzone .dz-preview .dz-image {
|
||||
border-radius: 20px;
|
||||
overflow: hidden;
|
||||
width: 120px;
|
||||
height: 120px;
|
||||
position: relative;
|
||||
display: block;
|
||||
z-index: 10; }
|
||||
.dropzone .dz-preview .dz-image img {
|
||||
display: block; }
|
||||
.dropzone .dz-preview.dz-success .dz-success-mark {
|
||||
-webkit-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-moz-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-ms-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-o-animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
animation: passing-through 3s cubic-bezier(0.77, 0, 0.175, 1); }
|
||||
.dropzone .dz-preview.dz-error .dz-error-mark {
|
||||
opacity: 1;
|
||||
-webkit-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-moz-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-ms-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
-o-animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1);
|
||||
animation: slide-in 3s cubic-bezier(0.77, 0, 0.175, 1); }
|
||||
.dropzone .dz-preview .dz-success-mark, .dropzone .dz-preview .dz-error-mark {
|
||||
pointer-events: none;
|
||||
opacity: 0;
|
||||
z-index: 500;
|
||||
position: absolute;
|
||||
display: block;
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
margin-left: -27px;
|
||||
margin-top: -27px; }
|
||||
.dropzone .dz-preview .dz-success-mark svg, .dropzone .dz-preview .dz-error-mark svg {
|
||||
display: block;
|
||||
width: 54px;
|
||||
height: 54px; }
|
||||
.dropzone .dz-preview.dz-processing .dz-progress {
|
||||
opacity: 1;
|
||||
-webkit-transition: all 0.2s linear;
|
||||
-moz-transition: all 0.2s linear;
|
||||
-ms-transition: all 0.2s linear;
|
||||
-o-transition: all 0.2s linear;
|
||||
transition: all 0.2s linear; }
|
||||
.dropzone .dz-preview.dz-complete .dz-progress {
|
||||
opacity: 0;
|
||||
-webkit-transition: opacity 0.4s ease-in;
|
||||
-moz-transition: opacity 0.4s ease-in;
|
||||
-ms-transition: opacity 0.4s ease-in;
|
||||
-o-transition: opacity 0.4s ease-in;
|
||||
transition: opacity 0.4s ease-in; }
|
||||
.dropzone .dz-preview:not(.dz-processing) .dz-progress {
|
||||
-webkit-animation: pulse 6s ease infinite;
|
||||
-moz-animation: pulse 6s ease infinite;
|
||||
-ms-animation: pulse 6s ease infinite;
|
||||
-o-animation: pulse 6s ease infinite;
|
||||
animation: pulse 6s ease infinite; }
|
||||
.dropzone .dz-preview .dz-progress {
|
||||
opacity: 1;
|
||||
z-index: 1000;
|
||||
pointer-events: none;
|
||||
position: absolute;
|
||||
height: 16px;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
margin-top: -8px;
|
||||
width: 80px;
|
||||
margin-left: -40px;
|
||||
background: rgba(255, 255, 255, 0.9);
|
||||
-webkit-transform: scale(1);
|
||||
border-radius: 8px;
|
||||
overflow: hidden; }
|
||||
.dropzone .dz-preview .dz-progress .dz-upload {
|
||||
background: #333;
|
||||
background: linear-gradient(to bottom, #666, #444);
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
width: 0;
|
||||
-webkit-transition: width 300ms ease-in-out;
|
||||
-moz-transition: width 300ms ease-in-out;
|
||||
-ms-transition: width 300ms ease-in-out;
|
||||
-o-transition: width 300ms ease-in-out;
|
||||
transition: width 300ms ease-in-out; }
|
||||
.dropzone .dz-preview.dz-error .dz-error-message {
|
||||
display: block; }
|
||||
.dropzone .dz-preview.dz-error:hover .dz-error-message {
|
||||
opacity: 1;
|
||||
pointer-events: auto; }
|
||||
.dropzone .dz-preview .dz-error-message {
|
||||
pointer-events: none;
|
||||
z-index: 1000;
|
||||
position: absolute;
|
||||
display: block;
|
||||
display: none;
|
||||
opacity: 0;
|
||||
-webkit-transition: opacity 0.3s ease;
|
||||
-moz-transition: opacity 0.3s ease;
|
||||
-ms-transition: opacity 0.3s ease;
|
||||
-o-transition: opacity 0.3s ease;
|
||||
transition: opacity 0.3s ease;
|
||||
border-radius: 8px;
|
||||
font-size: 13px;
|
||||
top: 130px;
|
||||
left: -10px;
|
||||
width: 140px;
|
||||
background: #be2626;
|
||||
background: linear-gradient(to bottom, #be2626, #a92222);
|
||||
padding: 0.5em 1.2em;
|
||||
color: white; }
|
||||
.dropzone .dz-preview .dz-error-message:after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: -6px;
|
||||
left: 64px;
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 6px solid transparent;
|
||||
border-right: 6px solid transparent;
|
||||
border-bottom: 6px solid #be2626; }
|
3530
app/frontend/static/assets/vendors/dropzone/dropzone.js
vendored
3530
app/frontend/static/assets/vendors/dropzone/dropzone.js
vendored
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -1,135 +0,0 @@
|
||||
svg {
|
||||
touch-action: none;
|
||||
}
|
||||
|
||||
.jvectormap-container {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
touch-action: none;
|
||||
}
|
||||
|
||||
.jvectormap-tip {
|
||||
position: absolute;
|
||||
display: none;
|
||||
border: solid 1px #CDCDCD;
|
||||
border-radius: 3px;
|
||||
background: #292929;
|
||||
color: white;
|
||||
font-family: sans-serif, Verdana;
|
||||
font-size: smaller;
|
||||
padding: 3px;
|
||||
}
|
||||
|
||||
.jvectormap-zoomin, .jvectormap-zoomout, .jvectormap-goback {
|
||||
position: absolute;
|
||||
left: 10px;
|
||||
border-radius: 3px;
|
||||
background: #292929;
|
||||
padding: 3px;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
line-height: 10px;
|
||||
text-align: center;
|
||||
box-sizing: content-box;
|
||||
}
|
||||
|
||||
.jvectormap-zoomin, .jvectormap-zoomout {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
|
||||
.jvectormap-zoomin {
|
||||
top: 10px;
|
||||
}
|
||||
|
||||
.jvectormap-zoomout {
|
||||
top: 30px;
|
||||
}
|
||||
|
||||
.jvectormap-goback {
|
||||
bottom: 10px;
|
||||
z-index: 1000;
|
||||
padding: 6px;
|
||||
}
|
||||
|
||||
.jvectormap-spinner {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: center no-repeat url(data:image/gif;base64,R0lGODlhIAAgAPMAAP///wAAAMbGxoSEhLa2tpqamjY2NlZWVtjY2OTk5Ly8vB4eHgQEBAAAAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh/hpDcmVhdGVkIHdpdGggYWpheGxvYWQuaW5mbwAh+QQJCgAAACwAAAAAIAAgAAAE5xDISWlhperN52JLhSSdRgwVo1ICQZRUsiwHpTJT4iowNS8vyW2icCF6k8HMMBkCEDskxTBDAZwuAkkqIfxIQyhBQBFvAQSDITM5VDW6XNE4KagNh6Bgwe60smQUB3d4Rz1ZBApnFASDd0hihh12BkE9kjAJVlycXIg7CQIFA6SlnJ87paqbSKiKoqusnbMdmDC2tXQlkUhziYtyWTxIfy6BE8WJt5YJvpJivxNaGmLHT0VnOgSYf0dZXS7APdpB309RnHOG5gDqXGLDaC457D1zZ/V/nmOM82XiHRLYKhKP1oZmADdEAAAh+QQJCgAAACwAAAAAIAAgAAAE6hDISWlZpOrNp1lGNRSdRpDUolIGw5RUYhhHukqFu8DsrEyqnWThGvAmhVlteBvojpTDDBUEIFwMFBRAmBkSgOrBFZogCASwBDEY/CZSg7GSE0gSCjQBMVG023xWBhklAnoEdhQEfyNqMIcKjhRsjEdnezB+A4k8gTwJhFuiW4dokXiloUepBAp5qaKpp6+Ho7aWW54wl7obvEe0kRuoplCGepwSx2jJvqHEmGt6whJpGpfJCHmOoNHKaHx61WiSR92E4lbFoq+B6QDtuetcaBPnW6+O7wDHpIiK9SaVK5GgV543tzjgGcghAgAh+QQJCgAAACwAAAAAIAAgAAAE7hDISSkxpOrN5zFHNWRdhSiVoVLHspRUMoyUakyEe8PTPCATW9A14E0UvuAKMNAZKYUZCiBMuBakSQKG8G2FzUWox2AUtAQFcBKlVQoLgQReZhQlCIJesQXI5B0CBnUMOxMCenoCfTCEWBsJColTMANldx15BGs8B5wlCZ9Po6OJkwmRpnqkqnuSrayqfKmqpLajoiW5HJq7FL1Gr2mMMcKUMIiJgIemy7xZtJsTmsM4xHiKv5KMCXqfyUCJEonXPN2rAOIAmsfB3uPoAK++G+w48edZPK+M6hLJpQg484enXIdQFSS1u6UhksENEQAAIfkECQoAAAAsAAAAACAAIAAABOcQyEmpGKLqzWcZRVUQnZYg1aBSh2GUVEIQ2aQOE+G+cD4ntpWkZQj1JIiZIogDFFyHI0UxQwFugMSOFIPJftfVAEoZLBbcLEFhlQiqGp1Vd140AUklUN3eCA51C1EWMzMCezCBBmkxVIVHBWd3HHl9JQOIJSdSnJ0TDKChCwUJjoWMPaGqDKannasMo6WnM562R5YluZRwur0wpgqZE7NKUm+FNRPIhjBJxKZteWuIBMN4zRMIVIhffcgojwCF117i4nlLnY5ztRLsnOk+aV+oJY7V7m76PdkS4trKcdg0Zc0tTcKkRAAAIfkECQoAAAAsAAAAACAAIAAABO4QyEkpKqjqzScpRaVkXZWQEximw1BSCUEIlDohrft6cpKCk5xid5MNJTaAIkekKGQkWyKHkvhKsR7ARmitkAYDYRIbUQRQjWBwJRzChi9CRlBcY1UN4g0/VNB0AlcvcAYHRyZPdEQFYV8ccwR5HWxEJ02YmRMLnJ1xCYp0Y5idpQuhopmmC2KgojKasUQDk5BNAwwMOh2RtRq5uQuPZKGIJQIGwAwGf6I0JXMpC8C7kXWDBINFMxS4DKMAWVWAGYsAdNqW5uaRxkSKJOZKaU3tPOBZ4DuK2LATgJhkPJMgTwKCdFjyPHEnKxFCDhEAACH5BAkKAAAALAAAAAAgACAAAATzEMhJaVKp6s2nIkolIJ2WkBShpkVRWqqQrhLSEu9MZJKK9y1ZrqYK9WiClmvoUaF8gIQSNeF1Er4MNFn4SRSDARWroAIETg1iVwuHjYB1kYc1mwruwXKC9gmsJXliGxc+XiUCby9ydh1sOSdMkpMTBpaXBzsfhoc5l58Gm5yToAaZhaOUqjkDgCWNHAULCwOLaTmzswadEqggQwgHuQsHIoZCHQMMQgQGubVEcxOPFAcMDAYUA85eWARmfSRQCdcMe0zeP1AAygwLlJtPNAAL19DARdPzBOWSm1brJBi45soRAWQAAkrQIykShQ9wVhHCwCQCACH5BAkKAAAALAAAAAAgACAAAATrEMhJaVKp6s2nIkqFZF2VIBWhUsJaTokqUCoBq+E71SRQeyqUToLA7VxF0JDyIQh/MVVPMt1ECZlfcjZJ9mIKoaTl1MRIl5o4CUKXOwmyrCInCKqcWtvadL2SYhyASyNDJ0uIiRMDjI0Fd30/iI2UA5GSS5UDj2l6NoqgOgN4gksEBgYFf0FDqKgHnyZ9OX8HrgYHdHpcHQULXAS2qKpENRg7eAMLC7kTBaixUYFkKAzWAAnLC7FLVxLWDBLKCwaKTULgEwbLA4hJtOkSBNqITT3xEgfLpBtzE/jiuL04RGEBgwWhShRgQExHBAAh+QQJCgAAACwAAAAAIAAgAAAE7xDISWlSqerNpyJKhWRdlSAVoVLCWk6JKlAqAavhO9UkUHsqlE6CwO1cRdCQ8iEIfzFVTzLdRAmZX3I2SfZiCqGk5dTESJeaOAlClzsJsqwiJwiqnFrb2nS9kmIcgEsjQydLiIlHehhpejaIjzh9eomSjZR+ipslWIRLAgMDOR2DOqKogTB9pCUJBagDBXR6XB0EBkIIsaRsGGMMAxoDBgYHTKJiUYEGDAzHC9EACcUGkIgFzgwZ0QsSBcXHiQvOwgDdEwfFs0sDzt4S6BK4xYjkDOzn0unFeBzOBijIm1Dgmg5YFQwsCMjp1oJ8LyIAACH5BAkKAAAALAAAAAAgACAAAATwEMhJaVKp6s2nIkqFZF2VIBWhUsJaTokqUCoBq+E71SRQeyqUToLA7VxF0JDyIQh/MVVPMt1ECZlfcjZJ9mIKoaTl1MRIl5o4CUKXOwmyrCInCKqcWtvadL2SYhyASyNDJ0uIiUd6GGl6NoiPOH16iZKNlH6KmyWFOggHhEEvAwwMA0N9GBsEC6amhnVcEwavDAazGwIDaH1ipaYLBUTCGgQDA8NdHz0FpqgTBwsLqAbWAAnIA4FWKdMLGdYGEgraigbT0OITBcg5QwPT4xLrROZL6AuQAPUS7bxLpoWidY0JtxLHKhwwMJBTHgPKdEQAACH5BAkKAAAALAAAAAAgACAAAATrEMhJaVKp6s2nIkqFZF2VIBWhUsJaTokqUCoBq+E71SRQeyqUToLA7VxF0JDyIQh/MVVPMt1ECZlfcjZJ9mIKoaTl1MRIl5o4CUKXOwmyrCInCKqcWtvadL2SYhyASyNDJ0uIiUd6GAULDJCRiXo1CpGXDJOUjY+Yip9DhToJA4RBLwMLCwVDfRgbBAaqqoZ1XBMHswsHtxtFaH1iqaoGNgAIxRpbFAgfPQSqpbgGBqUD1wBXeCYp1AYZ19JJOYgH1KwA4UBvQwXUBxPqVD9L3sbp2BNk2xvvFPJd+MFCN6HAAIKgNggY0KtEBAAh+QQJCgAAACwAAAAAIAAgAAAE6BDISWlSqerNpyJKhWRdlSAVoVLCWk6JKlAqAavhO9UkUHsqlE6CwO1cRdCQ8iEIfzFVTzLdRAmZX3I2SfYIDMaAFdTESJeaEDAIMxYFqrOUaNW4E4ObYcCXaiBVEgULe0NJaxxtYksjh2NLkZISgDgJhHthkpU4mW6blRiYmZOlh4JWkDqILwUGBnE6TYEbCgevr0N1gH4At7gHiRpFaLNrrq8HNgAJA70AWxQIH1+vsYMDAzZQPC9VCNkDWUhGkuE5PxJNwiUK4UfLzOlD4WvzAHaoG9nxPi5d+jYUqfAhhykOFwJWiAAAIfkECQoAAAAsAAAAACAAIAAABPAQyElpUqnqzaciSoVkXVUMFaFSwlpOCcMYlErAavhOMnNLNo8KsZsMZItJEIDIFSkLGQoQTNhIsFehRww2CQLKF0tYGKYSg+ygsZIuNqJksKgbfgIGepNo2cIUB3V1B3IvNiBYNQaDSTtfhhx0CwVPI0UJe0+bm4g5VgcGoqOcnjmjqDSdnhgEoamcsZuXO1aWQy8KAwOAuTYYGwi7w5h+Kr0SJ8MFihpNbx+4Erq7BYBuzsdiH1jCAzoSfl0rVirNbRXlBBlLX+BP0XJLAPGzTkAuAOqb0WT5AH7OcdCm5B8TgRwSRKIHQtaLCwg1RAAAOwAAAAAAAAAAAA==);
|
||||
}
|
||||
|
||||
.jvectormap-legend-title {
|
||||
font-weight: bold;
|
||||
font-size: 14px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt {
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h {
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-v {
|
||||
top: 0;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
.jvectormap-legend {
|
||||
background: black;
|
||||
color: white;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h .jvectormap-legend {
|
||||
float: left;
|
||||
margin: 0 10px 10px 0;
|
||||
padding: 3px 3px 1px 3px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h .jvectormap-legend .jvectormap-legend-tick {
|
||||
float: left;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-v .jvectormap-legend {
|
||||
margin: 10px 10px 0 0;
|
||||
padding: 3px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h .jvectormap-legend-tick {
|
||||
width: 40px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h .jvectormap-legend-tick-sample {
|
||||
height: 15px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-v .jvectormap-legend-tick-sample {
|
||||
height: 20px;
|
||||
width: 20px;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.jvectormap-legend-tick-text {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-h .jvectormap-legend-tick-text {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.jvectormap-legend-cnt-v .jvectormap-legend-tick-text {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
line-height: 20px;
|
||||
padding-left: 3px;
|
||||
}
|
File diff suppressed because one or more lines are too long
@ -1,2 +0,0 @@
|
||||
.morris-hover{position:absolute;z-index:1000}.morris-hover.morris-default-style{border-radius:10px;padding:6px;color:#666;background:rgba(255,255,255,0.8);border:solid 2px rgba(230,230,230,0.8);font-family:sans-serif;font-size:12px;text-align:center}.morris-hover.morris-default-style .morris-hover-row-label{font-weight:bold;margin:0.25em 0}
|
||||
.morris-hover.morris-default-style .morris-hover-point{white-space:nowrap;margin:0.1em 0}
|
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user