mirror of
https://gitlab.com/crafty-controller/crafty-4.git
synced 2024-08-30 18:23:09 +00:00
Merge branch 'devops/black' into 'dev'
🏴Black Formatting See merge request crafty-controller/crafty-commander!223
This commit is contained in:
commit
ff1d94505b
@ -2,15 +2,25 @@
|
|||||||
|
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
[*.{js,py,html}]
|
[*]
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
trim_trailing_whitespace = true
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
# end_of_line = lf
|
|
||||||
|
|
||||||
[*.py]
|
[*.py]
|
||||||
indent_style = space
|
profile = black
|
||||||
indent_size = 4
|
# > Handled by Black
|
||||||
|
# indent_style = space
|
||||||
|
# indent_size = 4
|
||||||
|
|
||||||
[*.{js,html}]
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
|
||||||
|
[*.html]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.js]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
|
# Crafty Controller 4.0 - Lint & Build Pipes
|
||||||
|
# [Maintainer: Zedifus(https://gitlab.com/Zedifus)]
|
||||||
|
###################################################
|
||||||
|
# yamllint disable rule:line-length
|
||||||
|
---
|
||||||
stages:
|
stages:
|
||||||
- test
|
- lint
|
||||||
- prod-deployment
|
- prod-deployment
|
||||||
- dev-deployment
|
- dev-deployment
|
||||||
|
|
||||||
@ -7,8 +12,46 @@ variables:
|
|||||||
DOCKER_HOST: tcp://docker:2376
|
DOCKER_HOST: tcp://docker:2376
|
||||||
DOCKER_TLS_CERTDIR: "/certs"
|
DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
|
||||||
|
yamllint:
|
||||||
|
stage: lint
|
||||||
|
image: registry.gitlab.com/pipeline-components/yamllint:latest
|
||||||
|
tags:
|
||||||
|
- 'docker'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
|
||||||
|
when: never
|
||||||
|
script:
|
||||||
|
- yamllint .
|
||||||
|
|
||||||
|
jsonlint:
|
||||||
|
stage: lint
|
||||||
|
image: registry.gitlab.com/pipeline-components/jsonlint:latest
|
||||||
|
tags:
|
||||||
|
- 'docker'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
|
||||||
|
when: never
|
||||||
|
script:
|
||||||
|
- |
|
||||||
|
find . -not -path './.git/*' -name '*.json' -type f -print0 |
|
||||||
|
parallel --will-cite -k -0 -n1 jsonlint -q
|
||||||
|
|
||||||
|
black:
|
||||||
|
stage: lint
|
||||||
|
image: registry.gitlab.com/pipeline-components/black:latest
|
||||||
|
tags:
|
||||||
|
- 'docker'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
|
||||||
|
when: never
|
||||||
|
script:
|
||||||
|
- black --check --verbose -- .
|
||||||
|
|
||||||
pylint:
|
pylint:
|
||||||
stage: test
|
stage: lint
|
||||||
image: python:3.7-slim
|
image: python:3.7-slim
|
||||||
tags:
|
tags:
|
||||||
- 'docker'
|
- 'docker'
|
||||||
@ -152,6 +195,8 @@ win-dev-build:
|
|||||||
--collect-all pytz
|
--collect-all pytz
|
||||||
--collect-all six
|
--collect-all six
|
||||||
|
|
||||||
|
# Download latest:
|
||||||
|
# | https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/dev/download?job=win-dev-build
|
||||||
artifacts:
|
artifacts:
|
||||||
name: "crafty-${CI_RUNNER_TAGS}-${CI_COMMIT_BRANCH}_${CI_COMMIT_SHORT_SHA}"
|
name: "crafty-${CI_RUNNER_TAGS}-${CI_COMMIT_BRANCH}_${CI_COMMIT_SHORT_SHA}"
|
||||||
paths:
|
paths:
|
||||||
@ -159,8 +204,7 @@ win-dev-build:
|
|||||||
- .\crafty_commander.exe
|
- .\crafty_commander.exe
|
||||||
exclude:
|
exclude:
|
||||||
- app\classes\**\*
|
- app\classes\**\*
|
||||||
# Download latest:
|
|
||||||
# | https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/dev/download?job=win-dev-build
|
|
||||||
|
|
||||||
win-prod-build:
|
win-prod-build:
|
||||||
stage: prod-deployment
|
stage: prod-deployment
|
||||||
@ -193,6 +237,8 @@ win-prod-build:
|
|||||||
--collect-all pytz
|
--collect-all pytz
|
||||||
--collect-all six
|
--collect-all six
|
||||||
|
|
||||||
|
# Download latest:
|
||||||
|
# | https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/master/download?job=win-prod-build
|
||||||
artifacts:
|
artifacts:
|
||||||
name: "crafty-${CI_RUNNER_TAGS}-${CI_COMMIT_BRANCH}_${CI_COMMIT_SHORT_SHA}"
|
name: "crafty-${CI_RUNNER_TAGS}-${CI_COMMIT_BRANCH}_${CI_COMMIT_SHORT_SHA}"
|
||||||
paths:
|
paths:
|
||||||
@ -200,5 +246,3 @@ win-prod-build:
|
|||||||
- .\crafty_commander.exe
|
- .\crafty_commander.exe
|
||||||
exclude:
|
exclude:
|
||||||
- app\classes\**\*
|
- app\classes\**\*
|
||||||
# Download latest:
|
|
||||||
# | https://gitlab.com/crafty-controller/crafty-commander/-/jobs/artifacts/master/download?job=win-prod-build
|
|
||||||
|
@ -78,7 +78,9 @@ confidence=
|
|||||||
# --enable=similarities". If you want to run only the classes checker, but have
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||||
# --disable=W".
|
# --disable=W".
|
||||||
disable=abstract-method,
|
disable=C0330,
|
||||||
|
C0326,
|
||||||
|
abstract-method,
|
||||||
attribute-defined-outside-init,
|
attribute-defined-outside-init,
|
||||||
bad-inline-option,
|
bad-inline-option,
|
||||||
bare-except,
|
bare-except,
|
||||||
@ -306,7 +308,7 @@ indent-after-paren=4
|
|||||||
indent-string=' '
|
indent-string=' '
|
||||||
|
|
||||||
# Maximum number of characters on a single line.
|
# Maximum number of characters on a single line.
|
||||||
max-line-length=150
|
max-line-length=88
|
||||||
|
|
||||||
# Maximum number of lines in a module.
|
# Maximum number of lines in a module.
|
||||||
max-module-lines=2000
|
max-module-lines=2000
|
||||||
|
@ -1,3 +1,11 @@
|
|||||||
|
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
|
||||||
|
|
||||||
|
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
|
||||||
|
[![Supported Python Versions](https://shields.io/badge/python-3.7%20%7C%203.8%20%7C%203.9-blue)](https://gitlab.com/crafty-controller/crafty-commander)
|
||||||
|
[![Version(temp-hardcoded)](https://img.shields.io/badge/release-v4.0.0--alpha3.5-orange)](https://gitlab.com/crafty-controller/crafty-commander)
|
||||||
|
[![Code Quality(temp-hardcoded)](https://img.shields.io/badge/code%20quality-10-brightgreen)](https://gitlab.com/crafty-controller/crafty-commander)
|
||||||
|
[![Build Status](https://gitlab.com/crafty-controller/crafty-commander/badges/master/pipeline.svg)](https://gitlab.com/crafty-controller/crafty-commander/-/commits/master)
|
||||||
|
|
||||||
# Crafty Controller 4.0.0-alpha.3.5
|
# Crafty Controller 4.0.0-alpha.3.5
|
||||||
> Python based Control Panel for your Minecraft Server
|
> Python based Control Panel for your Minecraft Server
|
||||||
|
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
from app.classes.models.crafty_permissions import (
|
||||||
|
crafty_permissions,
|
||||||
|
Enum_Permissions_Crafty,
|
||||||
|
)
|
||||||
from app.classes.models.users import ApiKeys
|
from app.classes.models.users import ApiKeys
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Crafty_Perms_Controller:
|
|
||||||
|
|
||||||
|
class Crafty_Perms_Controller:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_defined_crafty_permissions():
|
def list_defined_crafty_permissions():
|
||||||
permissions_list = crafty_permissions.get_permissions_list()
|
permissions_list = crafty_permissions.get_permissions_list()
|
||||||
@ -18,24 +21,34 @@ class Crafty_Perms_Controller:
|
|||||||
return permissions_mask
|
return permissions_mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Crafty, value):
|
def set_permission(
|
||||||
return crafty_permissions.set_permission(permission_mask, permission_tested, value)
|
permission_mask, permission_tested: Enum_Permissions_Crafty, value
|
||||||
|
):
|
||||||
|
return crafty_permissions.set_permission(
|
||||||
|
permission_mask, permission_tested, value
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_create_server(user_id):
|
def can_create_server(user_id):
|
||||||
return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Server_Creation)
|
return crafty_permissions.can_add_in_crafty(
|
||||||
|
user_id, Enum_Permissions_Crafty.Server_Creation
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_add_user(): # Add back argument 'user_id' when you work on this
|
def can_add_user(): # Add back argument 'user_id' when you work on this
|
||||||
#TODO: Complete if we need a User Addition limit
|
|
||||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.User_Config)
|
|
||||||
return True
|
return True
|
||||||
|
# TODO: Complete if we need a User Addition limit
|
||||||
|
# return crafty_permissions.can_add_in_crafty(
|
||||||
|
# user_id, Enum_Permissions_Crafty.User_Config
|
||||||
|
# )
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_add_role(): # Add back argument 'user_id' when you work on this
|
def can_add_role(): # Add back argument 'user_id' when you work on this
|
||||||
#TODO: Complete if we need a Role Addition limit
|
|
||||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Roles_Config)
|
|
||||||
return True
|
return True
|
||||||
|
# TODO: Complete if we need a Role Addition limit
|
||||||
|
# return crafty_permissions.can_add_in_crafty(
|
||||||
|
# user_id, Enum_Permissions_Crafty.Roles_Config
|
||||||
|
# )
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_all_crafty_permissions_quantity_limits():
|
def list_all_crafty_permissions_quantity_limits():
|
||||||
|
@ -5,18 +5,19 @@ from app.classes.models.servers import servers_helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Management_Controller:
|
class Management_Controller:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Host_Stats Methods
|
# Host_Stats Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_latest_hosts_stats():
|
def get_latest_hosts_stats():
|
||||||
return management_helper.get_latest_hosts_stats()
|
return management_helper.get_latest_hosts_stats()
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Commands Methods
|
# Commands Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_unactioned_commands():
|
def get_unactioned_commands():
|
||||||
return management_helper.get_unactioned_commands()
|
return management_helper.get_unactioned_commands()
|
||||||
@ -26,33 +27,51 @@ class Management_Controller:
|
|||||||
server_name = servers_helper.get_server_friendly_name(server_id)
|
server_name = servers_helper.get_server_friendly_name(server_id)
|
||||||
|
|
||||||
# Example: Admin issued command start_server for server Survival
|
# Example: Admin issued command start_server for server Survival
|
||||||
management_helper.add_to_audit_log(user_id, f"issued command {command} for server {server_name}", server_id, remote_ip)
|
management_helper.add_to_audit_log(
|
||||||
|
user_id,
|
||||||
|
f"issued command {command} for server {server_name}",
|
||||||
|
server_id,
|
||||||
|
remote_ip,
|
||||||
|
)
|
||||||
management_helper.add_command(server_id, user_id, remote_ip, command)
|
management_helper.add_command(server_id, user_id, remote_ip, command)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def mark_command_complete(command_id=None):
|
def mark_command_complete(command_id=None):
|
||||||
return management_helper.mark_command_complete(command_id)
|
return management_helper.mark_command_complete(command_id)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Audit_Log Methods
|
# Audit_Log Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_actity_log():
|
def get_actity_log():
|
||||||
return management_helper.get_actity_log()
|
return management_helper.get_actity_log()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
||||||
return management_helper.add_to_audit_log(user_id, log_msg, server_id, source_ip)
|
return management_helper.add_to_audit_log(
|
||||||
|
user_id, log_msg, server_id, source_ip
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
||||||
return management_helper.add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip)
|
return management_helper.add_to_audit_log_raw(
|
||||||
|
user_name, user_id, server_id, log_msg, source_ip
|
||||||
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Schedules Methods
|
# Schedules Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment=None, enabled=True):
|
def create_scheduled_task(
|
||||||
|
server_id,
|
||||||
|
action,
|
||||||
|
interval,
|
||||||
|
interval_type,
|
||||||
|
start_time,
|
||||||
|
command,
|
||||||
|
comment=None,
|
||||||
|
enabled=True,
|
||||||
|
):
|
||||||
return management_helper.create_scheduled_task(
|
return management_helper.create_scheduled_task(
|
||||||
server_id,
|
server_id,
|
||||||
action,
|
action,
|
||||||
@ -61,7 +80,7 @@ class Management_Controller:
|
|||||||
start_time,
|
start_time,
|
||||||
command,
|
command,
|
||||||
comment,
|
comment,
|
||||||
enabled
|
enabled,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -96,16 +115,24 @@ class Management_Controller:
|
|||||||
def get_schedules_enabled():
|
def get_schedules_enabled():
|
||||||
return management_helper.get_schedules_enabled()
|
return management_helper.get_schedules_enabled()
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Backups Methods
|
# Backups Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_backup_config(server_id):
|
def get_backup_config(server_id):
|
||||||
return management_helper.get_backup_config(server_id)
|
return management_helper.get_backup_config(server_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None, excluded_dirs: list = None, compress: bool = False,):
|
def set_backup_config(
|
||||||
return management_helper.set_backup_config(server_id, backup_path, max_backups, excluded_dirs, compress)
|
server_id: int,
|
||||||
|
backup_path: str = None,
|
||||||
|
max_backups: int = None,
|
||||||
|
excluded_dirs: list = None,
|
||||||
|
compress: bool = False,
|
||||||
|
):
|
||||||
|
return management_helper.set_backup_config(
|
||||||
|
server_id, backup_path, max_backups, excluded_dirs, compress
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_excluded_backup_dirs(server_id: int):
|
def get_excluded_backup_dirs(server_id: int):
|
||||||
|
@ -7,8 +7,8 @@ from app.classes.shared.helpers import helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Roles_Controller:
|
|
||||||
|
|
||||||
|
class Roles_Controller:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_roles():
|
def get_all_roles():
|
||||||
return roles_helper.get_all_roles()
|
return roles_helper.get_all_roles()
|
||||||
@ -21,7 +21,6 @@ class Roles_Controller:
|
|||||||
def get_role(role_id):
|
def get_role(role_id):
|
||||||
return roles_helper.get_role(role_id)
|
return roles_helper.get_role(role_id)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_role(role_id: str, role_data=None, permissions_mask: str = "00000000"):
|
def update_role(role_id: str, role_data=None, permissions_mask: str = "00000000"):
|
||||||
if role_data is None:
|
if role_data is None:
|
||||||
@ -34,17 +33,20 @@ class Roles_Controller:
|
|||||||
if key == "role_id":
|
if key == "role_id":
|
||||||
continue
|
continue
|
||||||
elif key == "servers":
|
elif key == "servers":
|
||||||
added_servers = role_data['servers'].difference(base_data['servers'])
|
added_servers = role_data["servers"].difference(base_data["servers"])
|
||||||
removed_servers = base_data['servers'].difference(role_data['servers'])
|
removed_servers = base_data["servers"].difference(role_data["servers"])
|
||||||
elif base_data[key] != role_data[key]:
|
elif base_data[key] != role_data[key]:
|
||||||
up_data[key] = role_data[key]
|
up_data[key] = role_data[key]
|
||||||
up_data['last_update'] = helper.get_time_as_string()
|
up_data["last_update"] = helper.get_time_as_string()
|
||||||
logger.debug(f"role: {role_data} +server:{added_servers} -server{removed_servers}")
|
logger.debug(
|
||||||
|
f"role: {role_data} +server:{added_servers} -server{removed_servers}"
|
||||||
|
)
|
||||||
for server in added_servers:
|
for server in added_servers:
|
||||||
server_permissions.get_or_create(role_id, server, permissions_mask)
|
server_permissions.get_or_create(role_id, server, permissions_mask)
|
||||||
for server in base_data['servers']:
|
for server in base_data["servers"]:
|
||||||
server_permissions.update_role_permission(role_id, server, permissions_mask)
|
server_permissions.update_role_permission(role_id, server, permissions_mask)
|
||||||
# TODO: This is horribly inefficient and we should be using bulk queries but im going for functionality at this point
|
# TODO: This is horribly inefficient and we should be using bulk queries
|
||||||
|
# but im going for functionality at this point
|
||||||
server_permissions.delete_roles_permissions(role_id, removed_servers)
|
server_permissions.delete_roles_permissions(role_id, removed_servers)
|
||||||
if up_data:
|
if up_data:
|
||||||
roles_helper.update_role(role_id, up_data)
|
roles_helper.update_role(role_id, up_data)
|
||||||
@ -56,7 +58,7 @@ class Roles_Controller:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_role(role_id):
|
def remove_role(role_id):
|
||||||
role_data = Roles_Controller.get_role_with_servers(role_id)
|
role_data = Roles_Controller.get_role_with_servers(role_id)
|
||||||
server_permissions.delete_roles_permissions(role_id, role_data['servers'])
|
server_permissions.delete_roles_permissions(role_id, role_data["servers"])
|
||||||
users_helper.remove_roles_from_role_id(role_id)
|
users_helper.remove_roles_from_role_id(role_id)
|
||||||
return roles_helper.remove_role(role_id)
|
return roles_helper.remove_role(role_id)
|
||||||
|
|
||||||
@ -74,7 +76,7 @@ class Roles_Controller:
|
|||||||
servers = set()
|
servers = set()
|
||||||
for s in servers_query:
|
for s in servers_query:
|
||||||
servers.add(s.server_id.server_id)
|
servers.add(s.server_id.server_id)
|
||||||
role['servers'] = servers
|
role["servers"] = servers
|
||||||
# logger.debug("role: ({}) {}".format(role_id, role))
|
# logger.debug("role: ({}) {}".format(role_id, role))
|
||||||
return role
|
return role
|
||||||
else:
|
else:
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
from app.classes.models.server_permissions import (
|
||||||
|
server_permissions,
|
||||||
|
Enum_Permissions_Server,
|
||||||
|
)
|
||||||
from app.classes.models.users import users_helper, ApiKeys
|
from app.classes.models.users import users_helper, ApiKeys
|
||||||
from app.classes.models.roles import roles_helper
|
from app.classes.models.roles import roles_helper
|
||||||
from app.classes.models.servers import servers_helper
|
from app.classes.models.servers import servers_helper
|
||||||
@ -8,8 +11,8 @@ from app.classes.shared.main_models import db_helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Server_Perms_Controller:
|
|
||||||
|
|
||||||
|
class Server_Perms_Controller:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_user_list(server_id):
|
def get_server_user_list(server_id):
|
||||||
return server_permissions.get_server_user_list(server_id)
|
return server_permissions.get_server_user_list(server_id)
|
||||||
@ -42,20 +45,28 @@ class Server_Perms_Controller:
|
|||||||
role_list = server_permissions.get_server_roles(old_server_id)
|
role_list = server_permissions.get_server_roles(old_server_id)
|
||||||
for role in role_list:
|
for role in role_list:
|
||||||
server_permissions.add_role_server(
|
server_permissions.add_role_server(
|
||||||
new_server_id, role.role_id,
|
new_server_id,
|
||||||
server_permissions.get_permissions_mask(int(role.role_id), int(old_server_id)))
|
role.role_id,
|
||||||
#server_permissions.add_role_server(new_server_id, role.role_id, '00001000')
|
server_permissions.get_permissions_mask(
|
||||||
|
int(role.role_id), int(old_server_id)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# server_permissions.add_role_server(new_server_id, role.role_id,"00001000")
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Permissions Methods
|
# Servers Permissions Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_permissions_mask(role_id, server_id):
|
def get_permissions_mask(role_id, server_id):
|
||||||
return server_permissions.get_permissions_mask(role_id, server_id)
|
return server_permissions.get_permissions_mask(role_id, server_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Server, value):
|
def set_permission(
|
||||||
return server_permissions.set_permission(permission_mask, permission_tested, value)
|
permission_mask, permission_tested: Enum_Permissions_Server, value
|
||||||
|
):
|
||||||
|
return server_permissions.set_permission(
|
||||||
|
permission_mask, permission_tested, value
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_role_permissions_list(role_id):
|
def get_role_permissions_list(role_id):
|
||||||
@ -86,7 +97,9 @@ class Server_Perms_Controller:
|
|||||||
roles_list.append(roles_helper.get_role(u.role_id))
|
roles_list.append(roles_helper.get_role(u.role_id))
|
||||||
|
|
||||||
for r in roles_list:
|
for r in roles_list:
|
||||||
role_test = server_permissions.get_role_servers_from_role_id(r.get('role_id'))
|
role_test = server_permissions.get_role_servers_from_role_id(
|
||||||
|
r.get("role_id")
|
||||||
|
)
|
||||||
for t in role_test:
|
for t in role_test:
|
||||||
role_server.append(t)
|
role_server.append(t)
|
||||||
|
|
||||||
@ -94,6 +107,8 @@ class Server_Perms_Controller:
|
|||||||
authorized_servers.append(servers_helper.get_server_data_by_id(s.server_id))
|
authorized_servers.append(servers_helper.get_server_data_by_id(s.server_id))
|
||||||
|
|
||||||
for s in authorized_servers:
|
for s in authorized_servers:
|
||||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
latest = servers_helper.get_latest_server_stats(s.get("server_id"))
|
||||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0]})
|
server_data.append(
|
||||||
|
{"server_data": s, "stats": db_helper.return_rows(latest)[0]}
|
||||||
|
)
|
||||||
return server_data
|
return server_data
|
||||||
|
@ -5,17 +5,21 @@ import json
|
|||||||
from app.classes.controllers.roles_controller import Roles_Controller
|
from app.classes.controllers.roles_controller import Roles_Controller
|
||||||
from app.classes.models.servers import servers_helper
|
from app.classes.models.servers import servers_helper
|
||||||
from app.classes.models.users import users_helper, ApiKeys
|
from app.classes.models.users import users_helper, ApiKeys
|
||||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
from app.classes.models.server_permissions import (
|
||||||
|
server_permissions,
|
||||||
|
Enum_Permissions_Server,
|
||||||
|
)
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.main_models import db_helper
|
from app.classes.shared.main_models import db_helper
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Servers_Controller:
|
class Servers_Controller:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Generic Servers Methods
|
# Generic Servers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_server(
|
def create_server(
|
||||||
name: str,
|
name: str,
|
||||||
@ -27,7 +31,8 @@ class Servers_Controller:
|
|||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
server_stop: str,
|
server_stop: str,
|
||||||
server_type: str,
|
server_type: str,
|
||||||
server_port=25565):
|
server_port=25565,
|
||||||
|
):
|
||||||
return servers_helper.create_server(
|
return servers_helper.create_server(
|
||||||
name,
|
name,
|
||||||
server_uuid,
|
server_uuid,
|
||||||
@ -38,7 +43,8 @@ class Servers_Controller:
|
|||||||
server_log_file,
|
server_log_file,
|
||||||
server_stop,
|
server_stop,
|
||||||
server_type,
|
server_type,
|
||||||
server_port)
|
server_port,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_obj(server_id):
|
def get_server_obj(server_id):
|
||||||
@ -66,8 +72,8 @@ class Servers_Controller:
|
|||||||
for role in roles_list:
|
for role in roles_list:
|
||||||
role_id = role.role_id
|
role_id = role.role_id
|
||||||
role_data = Roles_Controller.get_role_with_servers(role_id)
|
role_data = Roles_Controller.get_role_with_servers(role_id)
|
||||||
role_data['servers'] = {server_id}
|
role_data["servers"] = {server_id}
|
||||||
server_permissions.delete_roles_permissions(role_id, role_data['servers'])
|
server_permissions.delete_roles_permissions(role_id, role_data["servers"])
|
||||||
server_permissions.remove_roles_of_server(server_id)
|
server_permissions.remove_roles_of_server(server_id)
|
||||||
servers_helper.remove_server(server_id)
|
servers_helper.remove_server(server_id)
|
||||||
|
|
||||||
@ -75,9 +81,9 @@ class Servers_Controller:
|
|||||||
def get_server_data_by_id(server_id):
|
def get_server_data_by_id(server_id):
|
||||||
return servers_helper.get_server_data_by_id(server_id)
|
return servers_helper.get_server_data_by_id(server_id)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Methods
|
# Servers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_defined_servers():
|
def get_all_defined_servers():
|
||||||
return servers_helper.get_all_defined_servers()
|
return servers_helper.get_all_defined_servers()
|
||||||
@ -100,17 +106,26 @@ class Servers_Controller:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_authorized_servers_stats_api_key(api_key: ApiKeys):
|
def get_authorized_servers_stats_api_key(api_key: ApiKeys):
|
||||||
server_data = []
|
server_data = []
|
||||||
authorized_servers = Servers_Controller.get_authorized_servers(api_key.user.user_id)
|
authorized_servers = Servers_Controller.get_authorized_servers(
|
||||||
|
api_key.user.user_id
|
||||||
|
)
|
||||||
|
|
||||||
for s in authorized_servers:
|
for s in authorized_servers:
|
||||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
latest = servers_helper.get_latest_server_stats(s.get("server_id"))
|
||||||
key_permissions = server_permissions.get_api_key_permissions_list(api_key, s.get('server_id'))
|
key_permissions = server_permissions.get_api_key_permissions_list(
|
||||||
|
api_key, s.get("server_id")
|
||||||
|
)
|
||||||
if Enum_Permissions_Server.Commands in key_permissions:
|
if Enum_Permissions_Server.Commands in key_permissions:
|
||||||
user_command_permission = True
|
user_command_permission = True
|
||||||
else:
|
else:
|
||||||
user_command_permission = False
|
user_command_permission = False
|
||||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0],
|
server_data.append(
|
||||||
"user_command_permission": user_command_permission})
|
{
|
||||||
|
"server_data": s,
|
||||||
|
"stats": db_helper.return_rows(latest)[0],
|
||||||
|
"user_command_permission": user_command_permission,
|
||||||
|
}
|
||||||
|
)
|
||||||
return server_data
|
return server_data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -119,18 +134,22 @@ class Servers_Controller:
|
|||||||
authorized_servers = Servers_Controller.get_authorized_servers(user_id)
|
authorized_servers = Servers_Controller.get_authorized_servers(user_id)
|
||||||
|
|
||||||
for s in authorized_servers:
|
for s in authorized_servers:
|
||||||
latest = servers_helper.get_latest_server_stats(s.get('server_id'))
|
latest = servers_helper.get_latest_server_stats(s.get("server_id"))
|
||||||
# TODO
|
# TODO
|
||||||
user_permissions = server_permissions.get_user_id_permissions_list(user_id, s.get('server_id'))
|
user_permissions = server_permissions.get_user_id_permissions_list(
|
||||||
|
user_id, s.get("server_id")
|
||||||
|
)
|
||||||
if Enum_Permissions_Server.Commands in user_permissions:
|
if Enum_Permissions_Server.Commands in user_permissions:
|
||||||
user_command_permission = True
|
user_command_permission = True
|
||||||
else:
|
else:
|
||||||
user_command_permission = False
|
user_command_permission = False
|
||||||
server_data.append({
|
server_data.append(
|
||||||
'server_data': s,
|
{
|
||||||
'stats': db_helper.return_rows(latest)[0],
|
"server_data": s,
|
||||||
'user_command_permission': user_command_permission
|
"stats": db_helper.return_rows(latest)[0],
|
||||||
})
|
"user_command_permission": user_command_permission,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return server_data
|
return server_data
|
||||||
|
|
||||||
@ -138,9 +157,9 @@ class Servers_Controller:
|
|||||||
def get_server_friendly_name(server_id):
|
def get_server_friendly_name(server_id):
|
||||||
return servers_helper.get_server_friendly_name(server_id)
|
return servers_helper.get_server_friendly_name(server_id)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers_Stats Methods
|
# Servers_Stats Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_stats_by_id(server_id):
|
def get_server_stats_by_id(server_id):
|
||||||
return servers_helper.get_server_stats_by_id(server_id)
|
return servers_helper.get_server_stats_by_id(server_id)
|
||||||
@ -157,7 +176,9 @@ class Servers_Controller:
|
|||||||
def server_id_authorized(server_id_a, user_id):
|
def server_id_authorized(server_id_a, user_id):
|
||||||
user_roles = users_helper.user_role_query(user_id)
|
user_roles = users_helper.user_role_query(user_id)
|
||||||
for role in user_roles:
|
for role in user_roles:
|
||||||
for server_id_b in server_permissions.get_role_servers_from_role_id(role.role_id):
|
for server_id_b in server_permissions.get_role_servers_from_role_id(
|
||||||
|
role.role_id
|
||||||
|
):
|
||||||
if str(server_id_a) == str(server_id_b.server_id):
|
if str(server_id_a) == str(server_id_b.server_id):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@ -197,17 +218,19 @@ class Servers_Controller:
|
|||||||
def get_update_status(server_id):
|
def get_update_status(server_id):
|
||||||
return servers_helper.get_update_status(server_id)
|
return servers_helper.get_update_status(server_id)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Helpers Methods
|
# Servers Helpers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_banned_players(server_id):
|
def get_banned_players(server_id):
|
||||||
stats = servers_helper.get_server_stats_by_id(server_id)
|
stats = servers_helper.get_server_stats_by_id(server_id)
|
||||||
server_path = stats['server_id']['path']
|
server_path = stats["server_id"]["path"]
|
||||||
path = os.path.join(server_path, 'banned-players.json')
|
path = os.path.join(server_path, "banned-players.json")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(helper.get_os_understandable_path(path), encoding='utf-8') as file:
|
with open(
|
||||||
|
helper.get_os_understandable_path(path), encoding="utf-8"
|
||||||
|
) as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
file.close()
|
file.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
@ -219,18 +242,20 @@ class Servers_Controller:
|
|||||||
def check_for_old_logs(self):
|
def check_for_old_logs(self):
|
||||||
servers = servers_helper.get_all_defined_servers()
|
servers = servers_helper.get_all_defined_servers()
|
||||||
for server in servers:
|
for server in servers:
|
||||||
logs_path = os.path.split(server['log_path'])[0]
|
logs_path = os.path.split(server["log_path"])[0]
|
||||||
latest_log_file = os.path.split(server['log_path'])[1]
|
latest_log_file = os.path.split(server["log_path"])[1]
|
||||||
logs_delete_after = int(server['logs_delete_after'])
|
logs_delete_after = int(server["logs_delete_after"])
|
||||||
if logs_delete_after == 0:
|
if logs_delete_after == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log_files = list(filter(
|
log_files = list(
|
||||||
lambda val: val != latest_log_file,
|
filter(lambda val: val != latest_log_file, os.listdir(logs_path))
|
||||||
os.listdir(logs_path)
|
)
|
||||||
))
|
|
||||||
for log_file in log_files:
|
for log_file in log_files:
|
||||||
log_file_path = os.path.join(logs_path, log_file)
|
log_file_path = os.path.join(logs_path, log_file)
|
||||||
if helper.check_file_exists(log_file_path) and \
|
if helper.check_file_exists(
|
||||||
helper.is_file_older_than_x_days(log_file_path, logs_delete_after):
|
log_file_path
|
||||||
|
) and helper.is_file_older_than_x_days(
|
||||||
|
log_file_path, logs_delete_after
|
||||||
|
):
|
||||||
os.remove(log_file_path)
|
os.remove(log_file_path)
|
||||||
|
@ -2,17 +2,21 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from app.classes.models.users import users_helper
|
from app.classes.models.users import users_helper
|
||||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
from app.classes.models.crafty_permissions import (
|
||||||
|
crafty_permissions,
|
||||||
|
Enum_Permissions_Crafty,
|
||||||
|
)
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Users_Controller:
|
class Users_Controller:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Users Methods
|
# Users Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_users():
|
def get_all_users():
|
||||||
return users_helper.get_all_users()
|
return users_helper.get_all_users()
|
||||||
@ -59,26 +63,31 @@ class Users_Controller:
|
|||||||
if key == "user_id":
|
if key == "user_id":
|
||||||
continue
|
continue
|
||||||
elif key == "roles":
|
elif key == "roles":
|
||||||
added_roles = user_data['roles'].difference(base_data['roles'])
|
added_roles = user_data["roles"].difference(base_data["roles"])
|
||||||
removed_roles = base_data['roles'].difference(user_data['roles'])
|
removed_roles = base_data["roles"].difference(user_data["roles"])
|
||||||
elif key == "password":
|
elif key == "password":
|
||||||
if user_data['password'] is not None and user_data['password'] != "":
|
if user_data["password"] is not None and user_data["password"] != "":
|
||||||
up_data['password'] = helper.encode_pass(user_data['password'])
|
up_data["password"] = helper.encode_pass(user_data["password"])
|
||||||
elif base_data[key] != user_data[key]:
|
elif base_data[key] != user_data[key]:
|
||||||
up_data[key] = user_data[key]
|
up_data[key] = user_data[key]
|
||||||
up_data['last_update'] = helper.get_time_as_string()
|
up_data["last_update"] = helper.get_time_as_string()
|
||||||
up_data['lang'] = user_data['lang']
|
up_data["lang"] = user_data["lang"]
|
||||||
logger.debug(f"user: {user_data} +role:{added_roles} -role:{removed_roles}")
|
logger.debug(f"user: {user_data} +role:{added_roles} -role:{removed_roles}")
|
||||||
for role in added_roles:
|
for role in added_roles:
|
||||||
users_helper.get_or_create(user_id=user_id, role_id=role)
|
users_helper.get_or_create(user_id=user_id, role_id=role)
|
||||||
permissions_mask = user_crafty_data.get('permissions_mask', '000')
|
permissions_mask = user_crafty_data.get("permissions_mask", "000")
|
||||||
|
|
||||||
if 'server_quantity' in user_crafty_data:
|
if "server_quantity" in user_crafty_data:
|
||||||
limit_server_creation = user_crafty_data['server_quantity'][
|
limit_server_creation = user_crafty_data["server_quantity"][
|
||||||
Enum_Permissions_Crafty.Server_Creation.name]
|
Enum_Permissions_Crafty.Server_Creation.name
|
||||||
|
]
|
||||||
|
|
||||||
limit_user_creation = user_crafty_data['server_quantity'][Enum_Permissions_Crafty.User_Config.name]
|
limit_user_creation = user_crafty_data["server_quantity"][
|
||||||
limit_role_creation = user_crafty_data['server_quantity'][Enum_Permissions_Crafty.Roles_Config.name]
|
Enum_Permissions_Crafty.User_Config.name
|
||||||
|
]
|
||||||
|
limit_role_creation = user_crafty_data["server_quantity"][
|
||||||
|
Enum_Permissions_Crafty.Roles_Config.name
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
limit_server_creation = 0
|
limit_server_creation = 0
|
||||||
limit_user_creation = 0
|
limit_user_creation = 0
|
||||||
@ -89,19 +98,44 @@ class Users_Controller:
|
|||||||
permissions_mask,
|
permissions_mask,
|
||||||
limit_server_creation,
|
limit_server_creation,
|
||||||
limit_user_creation,
|
limit_user_creation,
|
||||||
limit_role_creation)
|
limit_role_creation,
|
||||||
|
)
|
||||||
|
|
||||||
users_helper.delete_user_roles(user_id, removed_roles)
|
users_helper.delete_user_roles(user_id, removed_roles)
|
||||||
|
|
||||||
users_helper.update_user(user_id, up_data)
|
users_helper.update_user(user_id, up_data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user(username, password, email="default@example.com", enabled: bool = True, superuser: bool = False):
|
def add_user(
|
||||||
return users_helper.add_user(username, password=password, email=email, enabled=enabled, superuser=superuser)
|
username,
|
||||||
|
password,
|
||||||
|
email="default@example.com",
|
||||||
|
enabled: bool = True,
|
||||||
|
superuser: bool = False,
|
||||||
|
):
|
||||||
|
return users_helper.add_user(
|
||||||
|
username,
|
||||||
|
password=password,
|
||||||
|
email=email,
|
||||||
|
enabled=enabled,
|
||||||
|
superuser=superuser,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_rawpass_user(username, password, email="default@example.com", enabled: bool = True, superuser: bool = False):
|
def add_rawpass_user(
|
||||||
return users_helper.add_rawpass_user(username, password=password, email=email, enabled=enabled, superuser=superuser)
|
username,
|
||||||
|
password,
|
||||||
|
email="default@example.com",
|
||||||
|
enabled: bool = True,
|
||||||
|
superuser: bool = False,
|
||||||
|
):
|
||||||
|
return users_helper.add_rawpass_user(
|
||||||
|
username,
|
||||||
|
password=password,
|
||||||
|
email=email,
|
||||||
|
enabled=enabled,
|
||||||
|
superuser=superuser,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_user(user_id):
|
def remove_user(user_id):
|
||||||
@ -122,16 +156,16 @@ class Users_Controller:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_id_by_api_token(token: str) -> str:
|
def get_user_id_by_api_token(token: str) -> str:
|
||||||
token_data = authentication.check_no_iat(token)
|
token_data = authentication.check_no_iat(token)
|
||||||
return token_data['user_id']
|
return token_data["user_id"]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_by_api_token(token: str):
|
def get_user_by_api_token(token: str):
|
||||||
_, user = authentication.check(token)
|
_, user = authentication.check(token)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
# User Roles Methods
|
# User Roles Methods
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_roles_id(user_id):
|
def get_user_roles_id(user_id):
|
||||||
@ -153,9 +187,9 @@ class Users_Controller:
|
|||||||
def user_role_query(user_id):
|
def user_role_query(user_id):
|
||||||
return users_helper.user_role_query(user_id)
|
return users_helper.user_role_query(user_id)
|
||||||
|
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
# Api Keys Methods
|
# Api Keys Methods
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_api_keys(user_id: str):
|
def get_user_api_keys(user_id: str):
|
||||||
@ -166,10 +200,16 @@ class Users_Controller:
|
|||||||
return users_helper.get_user_api_key(key_id)
|
return users_helper.get_user_api_key(key_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user_api_key(name: str, user_id: str, superuser: bool = False,
|
def add_user_api_key(
|
||||||
|
name: str,
|
||||||
|
user_id: str,
|
||||||
|
superuser: bool = False,
|
||||||
server_permissions_mask: Optional[str] = None,
|
server_permissions_mask: Optional[str] = None,
|
||||||
crafty_permissions_mask: Optional[str] = None):
|
crafty_permissions_mask: Optional[str] = None,
|
||||||
return users_helper.add_user_api_key(name, user_id, superuser, server_permissions_mask, crafty_permissions_mask)
|
):
|
||||||
|
return users_helper.add_user_api_key(
|
||||||
|
name, user_id, superuser, server_permissions_mask, crafty_permissions_mask
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_user_api_keys(user_id: str):
|
def delete_user_api_keys(user_id: str):
|
||||||
|
@ -3,8 +3,9 @@ import socket
|
|||||||
import time
|
import time
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
|
|
||||||
class BedrockPing:
|
class BedrockPing:
|
||||||
magic = b'\x00\xff\xff\x00\xfe\xfe\xfe\xfe\xfd\xfd\xfd\xfd\x12\x34\x56\x78'
|
magic = b"\x00\xff\xff\x00\xfe\xfe\xfe\xfe\xfd\xfd\xfd\xfd\x12\x34\x56\x78"
|
||||||
fields = { # (len, signed)
|
fields = { # (len, signed)
|
||||||
"byte": (1, False),
|
"byte": (1, False),
|
||||||
"long": (8, True),
|
"long": (8, True),
|
||||||
@ -15,9 +16,9 @@ class BedrockPing:
|
|||||||
"string": (2, False), # strlen is ushort
|
"string": (2, False), # strlen is ushort
|
||||||
"bool": (1, False),
|
"bool": (1, False),
|
||||||
"address": (7, False),
|
"address": (7, False),
|
||||||
"uint24le": (3, False)
|
"uint24le": (3, False),
|
||||||
}
|
}
|
||||||
byte_order = 'big'
|
byte_order = "big"
|
||||||
|
|
||||||
def __init__(self, bedrock_addr, bedrock_port, client_guid=0, timeout=5):
|
def __init__(self, bedrock_addr, bedrock_port, client_guid=0, timeout=5):
|
||||||
self.addr = bedrock_addr
|
self.addr = bedrock_addr
|
||||||
@ -42,18 +43,26 @@ class BedrockPing:
|
|||||||
try:
|
try:
|
||||||
f = BedrockPing.fields[pattern[pi]]
|
f = BedrockPing.fields[pattern[pi]]
|
||||||
except IndexError as index_error:
|
except IndexError as index_error:
|
||||||
raise IndexError("Ran out of pattern with additional bytes remaining") from index_error
|
raise IndexError(
|
||||||
|
"Ran out of pattern with additional bytes remaining"
|
||||||
|
) from index_error
|
||||||
if pattern[pi] == "string":
|
if pattern[pi] == "string":
|
||||||
shl = f[0] # string header length
|
shl = f[0] # string header length
|
||||||
sl = int.from_bytes(in_bytes[bi:bi+shl], BedrockPing.byte_order, signed=f[1]) # string length
|
sl = int.from_bytes(
|
||||||
|
in_bytes[bi : bi + shl], BedrockPing.byte_order, signed=f[1]
|
||||||
|
) # string length
|
||||||
l = shl + sl
|
l = shl + sl
|
||||||
ret.append(in_bytes[bi+shl:bi+shl+sl].decode('ascii'))
|
ret.append(in_bytes[bi + shl : bi + shl + sl].decode("ascii"))
|
||||||
elif pattern[pi] == "magic":
|
elif pattern[pi] == "magic":
|
||||||
l = f[0] # length of field
|
l = f[0] # length of field
|
||||||
ret.append(in_bytes[bi : bi + l])
|
ret.append(in_bytes[bi : bi + l])
|
||||||
else:
|
else:
|
||||||
l = f[0] # length of field
|
l = f[0] # length of field
|
||||||
ret.append(int.from_bytes(in_bytes[bi:bi+l], BedrockPing.byte_order, signed=f[1]))
|
ret.append(
|
||||||
|
int.from_bytes(
|
||||||
|
in_bytes[bi : bi + l], BedrockPing.byte_order, signed=f[1]
|
||||||
|
)
|
||||||
|
)
|
||||||
bi += l
|
bi += l
|
||||||
pi += 1
|
pi += 1
|
||||||
return ret
|
return ret
|
||||||
@ -64,8 +73,8 @@ class BedrockPing:
|
|||||||
return time.perf_counter_ns() // 1000000
|
return time.perf_counter_ns() // 1000000
|
||||||
|
|
||||||
def __sendping(self):
|
def __sendping(self):
|
||||||
pack_id = BedrockPing.__byter(0x01, 'byte')
|
pack_id = BedrockPing.__byter(0x01, "byte")
|
||||||
now = BedrockPing.__byter(BedrockPing.__get_time(), 'ulong')
|
now = BedrockPing.__byter(BedrockPing.__get_time(), "ulong")
|
||||||
guid = self.guid_bytes
|
guid = self.guid_bytes
|
||||||
d2s = pack_id + now + BedrockPing.magic + guid
|
d2s = pack_id + now + BedrockPing.magic + guid
|
||||||
# print("S:", d2s)
|
# print("S:", d2s)
|
||||||
@ -73,14 +82,16 @@ class BedrockPing:
|
|||||||
|
|
||||||
def __recvpong(self):
|
def __recvpong(self):
|
||||||
data = self.sock.recv(4096)
|
data = self.sock.recv(4096)
|
||||||
if data[0] == 0x1c:
|
if data[0] == 0x1C:
|
||||||
ret = {}
|
ret = {}
|
||||||
sliced = BedrockPing.__slice(data,["byte","ulong","ulong","magic","string"])
|
sliced = BedrockPing.__slice(
|
||||||
|
data, ["byte", "ulong", "ulong", "magic", "string"]
|
||||||
|
)
|
||||||
if sliced[3] != BedrockPing.magic:
|
if sliced[3] != BedrockPing.magic:
|
||||||
raise ValueError(f"Incorrect magic received ({sliced[3]})")
|
raise ValueError(f"Incorrect magic received ({sliced[3]})")
|
||||||
ret["server_guid"] = sliced[2]
|
ret["server_guid"] = sliced[2]
|
||||||
ret["server_string_raw"] = sliced[4]
|
ret["server_string_raw"] = sliced[4]
|
||||||
server_info = sliced[4].split(';')
|
server_info = sliced[4].split(";")
|
||||||
ret["server_edition"] = server_info[0]
|
ret["server_edition"] = server_info[0]
|
||||||
ret["server_motd"] = (server_info[1], server_info[7])
|
ret["server_motd"] = (server_info[1], server_info[7])
|
||||||
ret["server_protocol_version"] = server_info[2]
|
ret["server_protocol_version"] = server_info[2]
|
||||||
@ -103,5 +114,7 @@ class BedrockPing:
|
|||||||
self.__sendping()
|
self.__sendping()
|
||||||
return self.__recvpong()
|
return self.__recvpong()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
print(f"E: {e}, checking next packet. Retries remaining: {rtr}/{retries}")
|
print(
|
||||||
|
f"E: {e}, checking next packet. Retries remaining: {rtr}/{retries}"
|
||||||
|
)
|
||||||
rtr -= 1
|
rtr -= 1
|
||||||
|
@ -13,23 +13,24 @@ from app.classes.shared.console import console
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Server:
|
class Server:
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.description = data.get('description')
|
self.description = data.get("description")
|
||||||
# print(self.description)
|
# print(self.description)
|
||||||
if isinstance(self.description, dict):
|
if isinstance(self.description, dict):
|
||||||
|
|
||||||
# cat server
|
# cat server
|
||||||
if "translate" in self.description:
|
if "translate" in self.description:
|
||||||
self.description = self.description['translate']
|
self.description = self.description["translate"]
|
||||||
|
|
||||||
# waterfall / bungee
|
# waterfall / bungee
|
||||||
elif 'extra' in self.description:
|
elif "extra" in self.description:
|
||||||
lines = []
|
lines = []
|
||||||
|
|
||||||
description = self.description
|
description = self.description
|
||||||
if 'extra' in description.keys():
|
if "extra" in description.keys():
|
||||||
for e in description['extra']:
|
for e in description["extra"]:
|
||||||
# Conversion format code needed only for Java Version
|
# Conversion format code needed only for Java Version
|
||||||
lines.append(get_code_format("reset"))
|
lines.append(get_code_format("reset"))
|
||||||
if "bold" in e.keys():
|
if "bold" in e.keys():
|
||||||
@ -43,36 +44,36 @@ class Server:
|
|||||||
if "obfuscated" in e.keys():
|
if "obfuscated" in e.keys():
|
||||||
lines.append(get_code_format("obfuscated"))
|
lines.append(get_code_format("obfuscated"))
|
||||||
if "color" in e.keys():
|
if "color" in e.keys():
|
||||||
lines.append(get_code_format(e['color']))
|
lines.append(get_code_format(e["color"]))
|
||||||
# Then append the text
|
# Then append the text
|
||||||
if "text" in e.keys():
|
if "text" in e.keys():
|
||||||
if e['text'] == '\n':
|
if e["text"] == "\n":
|
||||||
lines.append("§§")
|
lines.append("§§")
|
||||||
else:
|
else:
|
||||||
lines.append(e['text'])
|
lines.append(e["text"])
|
||||||
|
|
||||||
total_text = " ".join(lines)
|
total_text = " ".join(lines)
|
||||||
self.description = total_text
|
self.description = total_text
|
||||||
|
|
||||||
# normal MC
|
# normal MC
|
||||||
else:
|
else:
|
||||||
self.description = self.description['text']
|
self.description = self.description["text"]
|
||||||
|
|
||||||
self.icon = base64.b64decode(data.get('favicon', '')[22:])
|
self.icon = base64.b64decode(data.get("favicon", "")[22:])
|
||||||
try:
|
try:
|
||||||
self.players = Players(data['players']).report()
|
self.players = Players(data["players"]).report()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.error("Error geting player information key error")
|
logger.error("Error geting player information key error")
|
||||||
self.players = []
|
self.players = []
|
||||||
self.version = data['version']['name']
|
self.version = data["version"]["name"]
|
||||||
self.protocol = data['version']['protocol']
|
self.protocol = data["version"]["protocol"]
|
||||||
|
|
||||||
|
|
||||||
class Players(list):
|
class Players(list):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
super().__init__(Player(x) for x in data.get('sample', []))
|
super().__init__(Player(x) for x in data.get("sample", []))
|
||||||
self.max = data['max']
|
self.max = data["max"]
|
||||||
self.online = data['online']
|
self.online = data["online"]
|
||||||
|
|
||||||
def report(self):
|
def report(self):
|
||||||
players = []
|
players = []
|
||||||
@ -80,35 +81,34 @@ class Players(list):
|
|||||||
for x in self:
|
for x in self:
|
||||||
players.append(str(x))
|
players.append(str(x))
|
||||||
|
|
||||||
r_data = {
|
r_data = {"online": self.online, "max": self.max, "players": players}
|
||||||
'online': self.online,
|
|
||||||
'max': self.max,
|
|
||||||
'players': players
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.dumps(r_data)
|
return json.dumps(r_data)
|
||||||
|
|
||||||
|
|
||||||
class Player:
|
class Player:
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.id = data['id']
|
self.id = data["id"]
|
||||||
self.name = data['name']
|
self.name = data["name"]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
def get_code_format(format_name):
|
def get_code_format(format_name):
|
||||||
root_dir = os.path.abspath(os.path.curdir)
|
root_dir = os.path.abspath(os.path.curdir)
|
||||||
format_file = os.path.join(root_dir, 'app', 'config', 'motd_format.json')
|
format_file = os.path.join(root_dir, "app", "config", "motd_format.json")
|
||||||
try:
|
try:
|
||||||
with open(format_file, "r", encoding='utf-8') as f:
|
with open(format_file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
if format_name in data.keys():
|
if format_name in data.keys():
|
||||||
return data.get(format_name)
|
return data.get(format_name)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Format MOTD Error: format name {format_name} does not exist")
|
logger.error(f"Format MOTD Error: format name {format_name} does not exist")
|
||||||
console.error(f"Format MOTD Error: format name {format_name} does not exist")
|
console.error(
|
||||||
|
f"Format MOTD Error: format name {format_name} does not exist"
|
||||||
|
)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -128,10 +128,10 @@ def ping(ip, port):
|
|||||||
if not k:
|
if not k:
|
||||||
return 0
|
return 0
|
||||||
k = k[0]
|
k = k[0]
|
||||||
i |= (k & 0x7f) << (j * 7)
|
i |= (k & 0x7F) << (j * 7)
|
||||||
j += 1
|
j += 1
|
||||||
if j > 5:
|
if j > 5:
|
||||||
raise ValueError('var_int too big')
|
raise ValueError("var_int too big")
|
||||||
if not k & 0x80:
|
if not k & 0x80:
|
||||||
return i
|
return i
|
||||||
|
|
||||||
@ -143,15 +143,15 @@ def ping(ip, port):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
host = ip.encode('utf-8')
|
host = ip.encode("utf-8")
|
||||||
data = b'' # wiki.vg/Server_List_Ping
|
data = b"" # wiki.vg/Server_List_Ping
|
||||||
data += b'\x00' # packet ID
|
data += b"\x00" # packet ID
|
||||||
data += b'\x04' # protocol variant
|
data += b"\x04" # protocol variant
|
||||||
data += struct.pack('>b', len(host)) + host
|
data += struct.pack(">b", len(host)) + host
|
||||||
data += struct.pack('>H', port)
|
data += struct.pack(">H", port)
|
||||||
data += b'\x01' # next state
|
data += b"\x01" # next state
|
||||||
data = struct.pack('>b', len(data)) + data
|
data = struct.pack(">b", len(data)) + data
|
||||||
sock.sendall(data + b'\x01\x00') # handshake + status ping
|
sock.sendall(data + b"\x01\x00") # handshake + status ping
|
||||||
length = read_var_int() # full packet length
|
length = read_var_int() # full packet length
|
||||||
if length < 10:
|
if length < 10:
|
||||||
if length < 0:
|
if length < 0:
|
||||||
@ -161,7 +161,7 @@ def ping(ip, port):
|
|||||||
|
|
||||||
sock.recv(1) # packet type, 0 for pings
|
sock.recv(1) # packet type, 0 for pings
|
||||||
length = read_var_int() # string length
|
length = read_var_int() # string length
|
||||||
data = b''
|
data = b""
|
||||||
while len(data) != length:
|
while len(data) != length:
|
||||||
chunk = sock.recv(length - len(data))
|
chunk = sock.recv(length - len(data))
|
||||||
if not chunk:
|
if not chunk:
|
||||||
@ -176,12 +176,13 @@ def ping(ip, port):
|
|||||||
finally:
|
finally:
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
|
|
||||||
# For the rest of requests see wiki.vg/Protocol
|
# For the rest of requests see wiki.vg/Protocol
|
||||||
def ping_bedrock(ip, port):
|
def ping_bedrock(ip, port):
|
||||||
rd = random.Random()
|
rd = random.Random()
|
||||||
try:
|
try:
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
rd.seed(''.join(re.findall('..', '%012x' % uuid.getnode())))
|
rd.seed("".join(re.findall("..", "%012x" % uuid.getnode())))
|
||||||
client_guid = uuid.UUID(int=rd.getrandbits(32)).int
|
client_guid = uuid.UUID(int=rd.getrandbits(32)).int
|
||||||
except:
|
except:
|
||||||
client_guid = 0
|
client_guid = 0
|
||||||
|
@ -1,44 +1,45 @@
|
|||||||
import pprint
|
import pprint
|
||||||
import os
|
import os
|
||||||
|
|
||||||
class ServerProps:
|
|
||||||
|
|
||||||
|
class ServerProps:
|
||||||
def __init__(self, filepath):
|
def __init__(self, filepath):
|
||||||
self.filepath = filepath
|
self.filepath = filepath
|
||||||
self.props = self._parse()
|
self.props = self._parse()
|
||||||
|
|
||||||
def _parse(self):
|
def _parse(self):
|
||||||
"""Loads and parses the file specified in self.filepath"""
|
# Loads and parses the file specified in self.filepath
|
||||||
with open(self.filepath, encoding='utf-8') as fp:
|
with open(self.filepath, encoding="utf-8") as fp:
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
d = {}
|
d = {}
|
||||||
if os.path.exists(".header"):
|
if os.path.exists(".header"):
|
||||||
os.remove(".header")
|
os.remove(".header")
|
||||||
while line:
|
while line:
|
||||||
if '#' != line[0]:
|
if "#" != line[0]:
|
||||||
s = line
|
s = line
|
||||||
s1 = s[:s.find('=')]
|
s1 = s[: s.find("=")]
|
||||||
if '\n' in s:
|
if "\n" in s:
|
||||||
s2 = s[s.find('=')+1:s.find('\n')]
|
s2 = s[s.find("=") + 1 : s.find("\n")]
|
||||||
else:
|
else:
|
||||||
s2 = s[s.find('=')+1:]
|
s2 = s[s.find("=") + 1 :]
|
||||||
d[s1] = s2
|
d[s1] = s2
|
||||||
else:
|
else:
|
||||||
with open(".header", "a+", encoding='utf-8') as h:
|
with open(".header", "a+", encoding="utf-8") as h:
|
||||||
h.write(line)
|
h.write(line)
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def print(self):
|
def print(self):
|
||||||
"""Prints the properties dictionary (using pprint)"""
|
# Prints the properties dictionary (using pprint)
|
||||||
pprint.pprint(self.props)
|
pprint.pprint(self.props)
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
"""Returns the properties dictionary"""
|
# Returns the properties dictionary
|
||||||
return self.props
|
return self.props
|
||||||
|
|
||||||
def update(self, key, val):
|
def update(self, key, val):
|
||||||
"""Updates property in the properties dictionary [ update("pvp", "true") ] and returns boolean condition"""
|
# Updates property in the properties dictionary [ update("pvp", "true") ]
|
||||||
|
# and returns boolean condition
|
||||||
if key in self.props.keys():
|
if key in self.props.keys():
|
||||||
self.props[key] = val
|
self.props[key] = val
|
||||||
return True
|
return True
|
||||||
@ -46,10 +47,10 @@ class ServerProps:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Writes to the new file"""
|
# Writes to the new file
|
||||||
with open(self.filepath, "a+", encoding='utf-8') as f:
|
with open(self.filepath, "a+", encoding="utf-8") as f:
|
||||||
f.truncate(0)
|
f.truncate(0)
|
||||||
with open(".header", encoding='utf-8') as header:
|
with open(".header", encoding="utf-8") as header:
|
||||||
line = header.readline()
|
line = header.readline()
|
||||||
while line:
|
while line:
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
@ -18,8 +18,8 @@ try:
|
|||||||
except ModuleNotFoundError as err:
|
except ModuleNotFoundError as err:
|
||||||
helper.auto_installer_fix(err)
|
helper.auto_installer_fix(err)
|
||||||
|
|
||||||
class ServerJars:
|
|
||||||
|
|
||||||
|
class ServerJars:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.base_url = "https://serverjars.com"
|
self.base_url = "https://serverjars.com"
|
||||||
|
|
||||||
@ -41,8 +41,8 @@ class ServerJars:
|
|||||||
logger.error(f"Unable to parse serverjar.com api result due to error: {e}")
|
logger.error(f"Unable to parse serverjar.com api result due to error: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
api_result = api_data.get('status')
|
api_result = api_data.get("status")
|
||||||
api_response = api_data.get('response', {})
|
api_response = api_data.get("response", {})
|
||||||
|
|
||||||
if api_result != "success":
|
if api_result != "success":
|
||||||
logger.error(f"Api returned a failed status: {api_result}")
|
logger.error(f"Api returned a failed status: {api_result}")
|
||||||
@ -55,7 +55,7 @@ class ServerJars:
|
|||||||
cache_file = helper.serverjar_cache
|
cache_file = helper.serverjar_cache
|
||||||
cache = {}
|
cache = {}
|
||||||
try:
|
try:
|
||||||
with open(cache_file, "r", encoding='utf-8') as f:
|
with open(cache_file, "r", encoding="utf-8") as f:
|
||||||
cache = json.load(f)
|
cache = json.load(f)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -65,7 +65,7 @@ class ServerJars:
|
|||||||
|
|
||||||
def get_serverjar_data(self):
|
def get_serverjar_data(self):
|
||||||
data = self._read_cache()
|
data = self._read_cache()
|
||||||
return data.get('servers')
|
return data.get("servers")
|
||||||
|
|
||||||
def get_serverjar_data_sorted(self):
|
def get_serverjar_data_sorted(self):
|
||||||
data = self.get_serverjar_data()
|
data = self.get_serverjar_data()
|
||||||
@ -80,10 +80,10 @@ class ServerJars:
|
|||||||
try:
|
try:
|
||||||
return int(x)
|
return int(x)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
temp = x.split('-')
|
temp = x.split("-")
|
||||||
return to_int(temp[0]) + str_to_int(temp[1]) / 100000
|
return to_int(temp[0]) + str_to_int(temp[1]) / 100000
|
||||||
|
|
||||||
sort_key_fn = lambda x: [to_int(y) for y in x.split('.')]
|
sort_key_fn = lambda x: [to_int(y) for y in x.split(".")]
|
||||||
|
|
||||||
for key in data.keys():
|
for key in data.keys():
|
||||||
data[key] = sorted(data[key], key=sort_key_fn)
|
data[key] = sorted(data[key], key=sort_key_fn)
|
||||||
@ -125,10 +125,7 @@ class ServerJars:
|
|||||||
if cache_old:
|
if cache_old:
|
||||||
logger.info("Cache file is over 1 day old, refreshing")
|
logger.info("Cache file is over 1 day old, refreshing")
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
data = {
|
data = {"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"), "servers": {}}
|
||||||
'last_refreshed': now.strftime("%m/%d/%Y, %H:%M:%S"),
|
|
||||||
'servers': {}
|
|
||||||
}
|
|
||||||
|
|
||||||
jar_types = self._get_server_type_list()
|
jar_types = self._get_server_type_list()
|
||||||
|
|
||||||
@ -140,36 +137,39 @@ class ServerJars:
|
|||||||
# jar versions for this server
|
# jar versions for this server
|
||||||
versions = self._get_jar_details(s)
|
versions = self._get_jar_details(s)
|
||||||
|
|
||||||
# add these versions (a list) to the dict with a key of the server type
|
# add these versions (a list) to the dict with
|
||||||
data['servers'].update({
|
# a key of the server type
|
||||||
s: versions
|
data["servers"].update({s: versions})
|
||||||
})
|
|
||||||
|
|
||||||
# save our cache
|
# save our cache
|
||||||
try:
|
try:
|
||||||
with open(cache_file, "w", encoding='utf-8') as f:
|
with open(cache_file, "w", encoding="utf-8") as f:
|
||||||
f.write(json.dumps(data, indent=4))
|
f.write(json.dumps(data, indent=4))
|
||||||
logger.info("Cache file refreshed")
|
logger.info("Cache file refreshed")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
||||||
|
|
||||||
def _get_jar_details(self, jar_type='servers'):
|
def _get_jar_details(self, jar_type="servers"):
|
||||||
url = f'/api/fetchAll/{jar_type}'
|
url = f"/api/fetchAll/{jar_type}"
|
||||||
response = self._get_api_result(url)
|
response = self._get_api_result(url)
|
||||||
temp = []
|
temp = []
|
||||||
for v in response:
|
for v in response:
|
||||||
temp.append(v.get('version'))
|
temp.append(v.get("version"))
|
||||||
time.sleep(.5)
|
time.sleep(0.5)
|
||||||
return temp
|
return temp
|
||||||
|
|
||||||
def _get_server_type_list(self):
|
def _get_server_type_list(self):
|
||||||
url = '/api/fetchTypes/'
|
url = "/api/fetchTypes/"
|
||||||
response = self._get_api_result(url)
|
response = self._get_api_result(url)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def download_jar(self, server, version, path, server_id):
|
def download_jar(self, server, version, path, server_id):
|
||||||
update_thread = threading.Thread(target=self.a_download_jar, daemon=True, args=(server, version, path, server_id))
|
update_thread = threading.Thread(
|
||||||
|
target=self.a_download_jar,
|
||||||
|
daemon=True,
|
||||||
|
args=(server, version, path, server_id),
|
||||||
|
)
|
||||||
update_thread.start()
|
update_thread.start()
|
||||||
|
|
||||||
def a_download_jar(self, server, version, path, server_id):
|
def a_download_jar(self, server, version, path, server_id):
|
||||||
@ -178,14 +178,13 @@ class ServerJars:
|
|||||||
fetch_url = f"{self.base_url}/api/fetchJar/{server}/{version}"
|
fetch_url = f"{self.base_url}/api/fetchJar/{server}/{version}"
|
||||||
server_users = server_permissions.get_server_user_list(server_id)
|
server_users = server_permissions.get_server_user_list(server_id)
|
||||||
|
|
||||||
|
# We need to make sure the server is registered before
|
||||||
#We need to make sure the server is registered before we submit a db update for it's stats.
|
# we submit a db update for it's stats.
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
Servers_Controller.set_download(server_id)
|
Servers_Controller.set_download(server_id)
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
websocket_helper.broadcast_user(user, "send_start_reload", {})
|
||||||
})
|
|
||||||
|
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
@ -194,25 +193,27 @@ class ServerJars:
|
|||||||
# open a file stream
|
# open a file stream
|
||||||
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
||||||
try:
|
try:
|
||||||
with open(path, 'wb') as output:
|
with open(path, "wb") as output:
|
||||||
shutil.copyfileobj(r.raw, output)
|
shutil.copyfileobj(r.raw, output)
|
||||||
Servers_Controller.finish_download(server_id)
|
Servers_Controller.finish_download(server_id)
|
||||||
|
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
websocket_helper.broadcast_user(user, 'notification', "Executable download finished")
|
websocket_helper.broadcast_user(
|
||||||
|
user, "notification", "Executable download finished"
|
||||||
|
)
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
websocket_helper.broadcast_user(user, "send_start_reload", {})
|
||||||
})
|
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to save jar to {path} due to error:{e}")
|
logger.error(f"Unable to save jar to {path} due to error:{e}")
|
||||||
Servers_Controller.finish_download(server_id)
|
Servers_Controller.finish_download(server_id)
|
||||||
server_users = server_permissions.get_server_user_list(server_id)
|
server_users = server_permissions.get_server_user_list(server_id)
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
websocket_helper.broadcast_user(user, 'notification', "Executable download finished")
|
websocket_helper.broadcast_user(
|
||||||
|
user, "notification", "Executable download finished"
|
||||||
|
)
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
websocket_helper.broadcast_user(user, "send_start_reload", {})
|
||||||
})
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ from app.classes.shared.helpers import helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Stats:
|
|
||||||
|
|
||||||
|
class Stats:
|
||||||
def __init__(self, controller):
|
def __init__(self, controller):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
|
|
||||||
@ -24,30 +24,26 @@ class Stats:
|
|||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
cpu_freq = psutil._common.scpufreq(current=0, min=0, max=0)
|
cpu_freq = psutil._common.scpufreq(current=0, min=0, max=0)
|
||||||
node_stats = {
|
node_stats = {
|
||||||
'boot_time': str(boot_time),
|
"boot_time": str(boot_time),
|
||||||
'cpu_usage': psutil.cpu_percent(interval=0.5) / psutil.cpu_count(),
|
"cpu_usage": psutil.cpu_percent(interval=0.5) / psutil.cpu_count(),
|
||||||
'cpu_count': psutil.cpu_count(),
|
"cpu_count": psutil.cpu_count(),
|
||||||
'cpu_cur_freq': round(cpu_freq[0], 2),
|
"cpu_cur_freq": round(cpu_freq[0], 2),
|
||||||
'cpu_max_freq': cpu_freq[2],
|
"cpu_max_freq": cpu_freq[2],
|
||||||
'mem_percent': psutil.virtual_memory()[2],
|
"mem_percent": psutil.virtual_memory()[2],
|
||||||
'mem_usage': helper.human_readable_file_size(psutil.virtual_memory()[3]),
|
"mem_usage": helper.human_readable_file_size(psutil.virtual_memory()[3]),
|
||||||
'mem_total': helper.human_readable_file_size(psutil.virtual_memory()[0]),
|
"mem_total": helper.human_readable_file_size(psutil.virtual_memory()[0]),
|
||||||
'disk_data': self._all_disk_usage()
|
"disk_data": self._all_disk_usage(),
|
||||||
}
|
}
|
||||||
# server_stats = self.get_servers_stats()
|
# server_stats = self.get_servers_stats()
|
||||||
# data['servers'] = server_stats
|
# data['servers'] = server_stats
|
||||||
data['node_stats'] = node_stats
|
data["node_stats"] = node_stats
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_process_stats(process):
|
def _get_process_stats(process):
|
||||||
if process is None:
|
if process is None:
|
||||||
process_stats = {
|
process_stats = {"cpu_usage": 0, "memory_usage": 0, "mem_percentage": 0}
|
||||||
'cpu_usage': 0,
|
|
||||||
'memory_usage': 0,
|
|
||||||
'mem_percentage': 0
|
|
||||||
}
|
|
||||||
return process_stats
|
return process_stats
|
||||||
else:
|
else:
|
||||||
process_pid = process.pid
|
process_pid = process.pid
|
||||||
@ -63,23 +59,25 @@ class Stats:
|
|||||||
# this is a faster way of getting data for a process
|
# this is a faster way of getting data for a process
|
||||||
with p.oneshot():
|
with p.oneshot():
|
||||||
process_stats = {
|
process_stats = {
|
||||||
'cpu_usage': real_cpu,
|
"cpu_usage": real_cpu,
|
||||||
'memory_usage': helper.human_readable_file_size(p.memory_info()[0]),
|
"memory_usage": helper.human_readable_file_size(p.memory_info()[0]),
|
||||||
'mem_percentage': round(p.memory_percent(), 0)
|
"mem_percentage": round(p.memory_percent(), 0),
|
||||||
}
|
}
|
||||||
return process_stats
|
return process_stats
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to get process details for pid: {process_pid} due to error: {e}")
|
logger.error(
|
||||||
|
f"Unable to get process details for pid: {process_pid} Error: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
# Dummy Data
|
# Dummy Data
|
||||||
process_stats = {
|
process_stats = {
|
||||||
'cpu_usage': 0,
|
"cpu_usage": 0,
|
||||||
'memory_usage': 0,
|
"memory_usage": 0,
|
||||||
}
|
}
|
||||||
return process_stats
|
return process_stats
|
||||||
|
|
||||||
# shamelessly stolen from https://github.com/giampaolo/psutil/blob/master/scripts/disk_usage.py
|
# Source: https://github.com/giampaolo/psutil/blob/master/scripts/disk_usage.py
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _all_disk_usage():
|
def _all_disk_usage():
|
||||||
disk_data = []
|
disk_data = []
|
||||||
@ -87,7 +85,7 @@ class Stats:
|
|||||||
|
|
||||||
for part in psutil.disk_partitions(all=False):
|
for part in psutil.disk_partitions(all=False):
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
if 'cdrom' in part.opts or part.fstype == '':
|
if "cdrom" in part.opts or part.fstype == "":
|
||||||
# skip cd-rom drives with no disk in it; they may raise
|
# skip cd-rom drives with no disk in it; they may raise
|
||||||
# ENOENT, pop-up a Windows GUI error for a non-ready
|
# ENOENT, pop-up a Windows GUI error for a non-ready
|
||||||
# partition or just hang.
|
# partition or just hang.
|
||||||
@ -95,13 +93,13 @@ class Stats:
|
|||||||
usage = psutil.disk_usage(part.mountpoint)
|
usage = psutil.disk_usage(part.mountpoint)
|
||||||
disk_data.append(
|
disk_data.append(
|
||||||
{
|
{
|
||||||
'device': part.device,
|
"device": part.device,
|
||||||
'total': helper.human_readable_file_size(usage.total),
|
"total": helper.human_readable_file_size(usage.total),
|
||||||
'used': helper.human_readable_file_size(usage.used),
|
"used": helper.human_readable_file_size(usage.used),
|
||||||
'free': helper.human_readable_file_size(usage.free),
|
"free": helper.human_readable_file_size(usage.free),
|
||||||
'percent_used': int(usage.percent),
|
"percent_used": int(usage.percent),
|
||||||
'fs': part.fstype,
|
"fs": part.fstype,
|
||||||
'mount': part.mountpoint
|
"mount": part.mountpoint,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -128,22 +126,20 @@ class Stats:
|
|||||||
# server_settings = server.get('server_settings', {})
|
# server_settings = server.get('server_settings', {})
|
||||||
# server_data = server.get('server_data_obj', {})
|
# server_data = server.get('server_data_obj', {})
|
||||||
|
|
||||||
|
|
||||||
# TODO: search server properties file for possible override of 127.0.0.1
|
# TODO: search server properties file for possible override of 127.0.0.1
|
||||||
internal_ip = server['server_ip']
|
internal_ip = server["server_ip"]
|
||||||
server_port = server['server_port']
|
server_port = server["server_port"]
|
||||||
|
|
||||||
logger.debug("Pinging {internal_ip} on port {server_port}")
|
logger.debug("Pinging {internal_ip} on port {server_port}")
|
||||||
if servers_helper.get_server_type_by_id(server_id) != 'minecraft-bedrock':
|
if servers_helper.get_server_type_by_id(server_id) != "minecraft-bedrock":
|
||||||
int_mc_ping = ping(internal_ip, int(server_port))
|
int_mc_ping = ping(internal_ip, int(server_port))
|
||||||
|
|
||||||
|
|
||||||
ping_data = {}
|
ping_data = {}
|
||||||
|
|
||||||
# if we got a good ping return, let's parse it
|
# if we got a good ping return, let's parse it
|
||||||
if int_mc_ping:
|
if int_mc_ping:
|
||||||
ping_data = Stats.parse_server_ping(int_mc_ping)
|
ping_data = Stats.parse_server_ping(int_mc_ping)
|
||||||
return ping_data['players']
|
return ping_data["players"]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -156,21 +152,20 @@ class Stats:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info(f"Unable to read json from ping_obj: {e}")
|
logger.info(f"Unable to read json from ping_obj: {e}")
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
server_icon = base64.encodebytes(ping_obj.icon)
|
server_icon = base64.encodebytes(ping_obj.icon)
|
||||||
server_icon = server_icon.decode('utf-8')
|
server_icon = server_icon.decode("utf-8")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
server_icon = False
|
server_icon = False
|
||||||
logger.info(f"Unable to read the server icon : {e}")
|
logger.info(f"Unable to read the server icon : {e}")
|
||||||
|
|
||||||
ping_data = {
|
ping_data = {
|
||||||
'online': online_stats.get("online", 0),
|
"online": online_stats.get("online", 0),
|
||||||
'max': online_stats.get('max', 0),
|
"max": online_stats.get("max", 0),
|
||||||
'players': online_stats.get('players', 0),
|
"players": online_stats.get("players", 0),
|
||||||
'server_description': ping_obj.description,
|
"server_description": ping_obj.description,
|
||||||
'server_version': ping_obj.version,
|
"server_version": ping_obj.version,
|
||||||
'server_icon': server_icon
|
"server_icon": server_icon,
|
||||||
}
|
}
|
||||||
|
|
||||||
return ping_data
|
return ping_data
|
||||||
@ -179,59 +174,62 @@ class Stats:
|
|||||||
def parse_server_RakNet_ping(ping_obj: object):
|
def parse_server_RakNet_ping(ping_obj: object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
server_icon = base64.encodebytes(ping_obj['icon'])
|
server_icon = base64.encodebytes(ping_obj["icon"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
server_icon = False
|
server_icon = False
|
||||||
logger.info(f"Unable to read the server icon : {e}")
|
logger.info(f"Unable to read the server icon : {e}")
|
||||||
ping_data = {
|
ping_data = {
|
||||||
'online': ping_obj['server_player_count'],
|
"online": ping_obj["server_player_count"],
|
||||||
'max': ping_obj['server_player_max'],
|
"max": ping_obj["server_player_max"],
|
||||||
'players': [],
|
"players": [],
|
||||||
'server_description': ping_obj['server_edition'],
|
"server_description": ping_obj["server_edition"],
|
||||||
'server_version': ping_obj['server_version_name'],
|
"server_version": ping_obj["server_version_name"],
|
||||||
'server_icon': server_icon
|
"server_icon": server_icon,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return ping_data
|
return ping_data
|
||||||
|
|
||||||
|
|
||||||
def record_stats(self):
|
def record_stats(self):
|
||||||
stats_to_send = self.get_node_stats()
|
stats_to_send = self.get_node_stats()
|
||||||
node_stats = stats_to_send.get('node_stats')
|
node_stats = stats_to_send.get("node_stats")
|
||||||
|
|
||||||
Host_Stats.insert({
|
Host_Stats.insert(
|
||||||
Host_Stats.boot_time: node_stats.get('boot_time', "Unknown"),
|
{
|
||||||
Host_Stats.cpu_usage: round(node_stats.get('cpu_usage', 0), 2),
|
Host_Stats.boot_time: node_stats.get("boot_time", "Unknown"),
|
||||||
Host_Stats.cpu_cores: node_stats.get('cpu_count', 0),
|
Host_Stats.cpu_usage: round(node_stats.get("cpu_usage", 0), 2),
|
||||||
Host_Stats.cpu_cur_freq: node_stats.get('cpu_cur_freq', 0),
|
Host_Stats.cpu_cores: node_stats.get("cpu_count", 0),
|
||||||
Host_Stats.cpu_max_freq: node_stats.get('cpu_max_freq', 0),
|
Host_Stats.cpu_cur_freq: node_stats.get("cpu_cur_freq", 0),
|
||||||
Host_Stats.mem_usage: node_stats.get('mem_usage', "0 MB"),
|
Host_Stats.cpu_max_freq: node_stats.get("cpu_max_freq", 0),
|
||||||
Host_Stats.mem_percent: node_stats.get('mem_percent', 0),
|
Host_Stats.mem_usage: node_stats.get("mem_usage", "0 MB"),
|
||||||
Host_Stats.mem_total: node_stats.get('mem_total', "0 MB"),
|
Host_Stats.mem_percent: node_stats.get("mem_percent", 0),
|
||||||
Host_Stats.disk_json: node_stats.get('disk_data', '{}')
|
Host_Stats.mem_total: node_stats.get("mem_total", "0 MB"),
|
||||||
}).execute()
|
Host_Stats.disk_json: node_stats.get("disk_data", "{}"),
|
||||||
|
}
|
||||||
|
).execute()
|
||||||
|
|
||||||
# server_stats = stats_to_send.get('servers')#
|
# server_stats = stats_to_send.get("servers")
|
||||||
#
|
|
||||||
# for server in server_stats:
|
# for server in server_stats:
|
||||||
# Server_Stats.insert({
|
# Server_Stats.insert(
|
||||||
# Server_Stats.server_id: server.get('id', 0),
|
# {
|
||||||
# Server_Stats.started: server.get('started', ""),
|
# Server_Stats.server_id: server.get("id", 0),
|
||||||
# Server_Stats.running: server.get('running', False),
|
# Server_Stats.started: server.get("started", ""),
|
||||||
# Server_Stats.cpu: server.get('cpu', 0),
|
# Server_Stats.running: server.get("running", False),
|
||||||
# Server_Stats.mem: server.get('mem', 0),
|
# Server_Stats.cpu: server.get("cpu", 0),
|
||||||
# Server_Stats.mem_percent: server.get('mem_percent', 0),
|
# Server_Stats.mem: server.get("mem", 0),
|
||||||
# Server_Stats.world_name: server.get('world_name', ""),
|
# Server_Stats.mem_percent: server.get("mem_percent", 0),
|
||||||
# Server_Stats.world_size: server.get('world_size', ""),
|
# Server_Stats.world_name: server.get("world_name", ""),
|
||||||
# Server_Stats.server_port: server.get('server_port', ""),
|
# Server_Stats.world_size: server.get("world_size", ""),
|
||||||
# Server_Stats.int_ping_results: server.get('int_ping_results', False),
|
# Server_Stats.server_port: server.get("server_port", ""),
|
||||||
|
# Server_Stats.int_ping_results: server.get(
|
||||||
|
# "int_ping_results", False
|
||||||
|
# ),
|
||||||
# Server_Stats.online: server.get("online", False),
|
# Server_Stats.online: server.get("online", False),
|
||||||
# Server_Stats.max: server.get("max", False),
|
# Server_Stats.max: server.get("max", False),
|
||||||
# Server_Stats.players: server.get("players", False),
|
# Server_Stats.players: server.get("players", False),
|
||||||
# Server_Stats.desc: server.get("desc", False),
|
# Server_Stats.desc: server.get("desc", False),
|
||||||
# Server_Stats.version: server.get("version", False)
|
# Server_Stats.version: server.get("version", False),
|
||||||
# }).execute()
|
# }
|
||||||
|
# ).execute()
|
||||||
|
|
||||||
# delete old data
|
# delete old data
|
||||||
max_age = helper.get_setting("history_max_age")
|
max_age = helper.get_setting("history_max_age")
|
||||||
@ -239,4 +237,6 @@ class Stats:
|
|||||||
last_week = now.day - max_age
|
last_week = now.day - max_age
|
||||||
|
|
||||||
Host_Stats.delete().where(Host_Stats.time < last_week).execute()
|
Host_Stats.delete().where(Host_Stats.time < last_week).execute()
|
||||||
|
|
||||||
|
|
||||||
# Server_Stats.delete().where(Server_Stats.created < last_week).execute()
|
# Server_Stats.delete().where(Server_Stats.created < last_week).execute()
|
||||||
|
@ -5,25 +5,32 @@ from app.classes.shared.permission_helper import permission_helper
|
|||||||
from app.classes.models.users import Users, ApiKeys
|
from app.classes.models.users import Users, ApiKeys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DoesNotExist
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
ForeignKeyField,
|
||||||
|
CharField,
|
||||||
|
IntegerField,
|
||||||
|
DoesNotExist,
|
||||||
|
)
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# User_Crafty Class
|
# User_Crafty Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class User_Crafty(Model):
|
class User_Crafty(Model):
|
||||||
user_id = ForeignKeyField(Users, backref='users_crafty')
|
user_id = ForeignKeyField(Users, backref="users_crafty")
|
||||||
permissions = CharField(default="00000000")
|
permissions = CharField(default="00000000")
|
||||||
limit_server_creation = IntegerField(default=-1)
|
limit_server_creation = IntegerField(default=-1)
|
||||||
limit_user_creation = IntegerField(default=0)
|
limit_user_creation = IntegerField(default=0)
|
||||||
@ -33,22 +40,23 @@ class User_Crafty(Model):
|
|||||||
created_role = IntegerField(default=0)
|
created_role = IntegerField(default=0)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_crafty'
|
table_name = "user_crafty"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
#************************************************************************************************
|
|
||||||
|
# **********************************************************************************
|
||||||
# Crafty Permissions Class
|
# Crafty Permissions Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Enum_Permissions_Crafty(Enum):
|
class Enum_Permissions_Crafty(Enum):
|
||||||
Server_Creation = 0
|
Server_Creation = 0
|
||||||
User_Config = 1
|
User_Config = 1
|
||||||
Roles_Config = 2
|
Roles_Config = 2
|
||||||
|
|
||||||
class Permissions_Crafty:
|
|
||||||
|
|
||||||
#************************************************************************************************
|
class Permissions_Crafty:
|
||||||
|
# **********************************************************************************
|
||||||
# Crafty Permissions Methods
|
# Crafty Permissions Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_permissions_list():
|
def get_permissions_list():
|
||||||
permissions_list = []
|
permissions_list = []
|
||||||
@ -67,15 +75,17 @@ class Permissions_Crafty:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def has_permission(permission_mask, permission_tested: Enum_Permissions_Crafty):
|
def has_permission(permission_mask, permission_tested: Enum_Permissions_Crafty):
|
||||||
result = False
|
result = False
|
||||||
if permission_mask[permission_tested.value] == '1':
|
if permission_mask[permission_tested.value] == "1":
|
||||||
result = True
|
result = True
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Crafty, value):
|
def set_permission(
|
||||||
|
permission_mask, permission_tested: Enum_Permissions_Crafty, value
|
||||||
|
):
|
||||||
l = list(permission_mask)
|
l = list(permission_mask)
|
||||||
l[permission_tested.value] = str(value)
|
l[permission_tested.value] = str(value)
|
||||||
permission_mask = ''.join(l)
|
permission_mask = "".join(l)
|
||||||
return permission_mask
|
return permission_mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -84,7 +94,7 @@ class Permissions_Crafty:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_crafty_permissions_mask(user_id):
|
def get_crafty_permissions_mask(user_id):
|
||||||
permissions_mask = ''
|
permissions_mask = ""
|
||||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||||
permissions_mask = user_crafty.permissions
|
permissions_mask = user_crafty.permissions
|
||||||
return permissions_mask
|
return permissions_mask
|
||||||
@ -102,21 +112,24 @@ class Permissions_Crafty:
|
|||||||
def get_permission_quantity_list(user_id):
|
def get_permission_quantity_list(user_id):
|
||||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||||
quantity_list = {
|
quantity_list = {
|
||||||
Enum_Permissions_Crafty.Server_Creation.name: user_crafty.limit_server_creation,
|
Enum_Permissions_Crafty.Server_Creation.name: user_crafty.limit_server_creation, # pylint: disable=line-too-long
|
||||||
Enum_Permissions_Crafty.User_Config.name: user_crafty.limit_user_creation,
|
Enum_Permissions_Crafty.User_Config.name: user_crafty.limit_user_creation,
|
||||||
Enum_Permissions_Crafty.Roles_Config.name: user_crafty.limit_role_creation,
|
Enum_Permissions_Crafty.Roles_Config.name: user_crafty.limit_role_creation,
|
||||||
}
|
}
|
||||||
return quantity_list
|
return quantity_list
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# User_Crafty Methods
|
# User_Crafty Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_User_Crafty(user_id):
|
def get_User_Crafty(user_id):
|
||||||
try:
|
try:
|
||||||
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
user_crafty = (
|
||||||
|
User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
||||||
|
)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
user_crafty = User_Crafty.insert({
|
user_crafty = User_Crafty.insert(
|
||||||
|
{
|
||||||
User_Crafty.user_id: user_id,
|
User_Crafty.user_id: user_id,
|
||||||
User_Crafty.permissions: "000",
|
User_Crafty.permissions: "000",
|
||||||
User_Crafty.limit_server_creation: 0,
|
User_Crafty.limit_server_creation: 0,
|
||||||
@ -125,32 +138,45 @@ class Permissions_Crafty:
|
|||||||
User_Crafty.created_server: 0,
|
User_Crafty.created_server: 0,
|
||||||
User_Crafty.created_user: 0,
|
User_Crafty.created_user: 0,
|
||||||
User_Crafty.created_role: 0,
|
User_Crafty.created_role: 0,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
||||||
return user_crafty
|
return user_crafty
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user_crafty(user_id, uc_permissions):
|
def add_user_crafty(user_id, uc_permissions):
|
||||||
user_crafty = User_Crafty.insert({User_Crafty.user_id: user_id, User_Crafty.permissions: uc_permissions}).execute()
|
user_crafty = User_Crafty.insert(
|
||||||
|
{User_Crafty.user_id: user_id, User_Crafty.permissions: uc_permissions}
|
||||||
|
).execute()
|
||||||
return user_crafty
|
return user_crafty
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_or_update_user(user_id, permissions_mask, limit_server_creation, limit_user_creation, limit_role_creation):
|
def add_or_update_user(
|
||||||
|
user_id,
|
||||||
|
permissions_mask,
|
||||||
|
limit_server_creation,
|
||||||
|
limit_user_creation,
|
||||||
|
limit_role_creation,
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
user_crafty = (
|
||||||
|
User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
||||||
|
)
|
||||||
user_crafty.permissions = permissions_mask
|
user_crafty.permissions = permissions_mask
|
||||||
user_crafty.limit_server_creation = limit_server_creation
|
user_crafty.limit_server_creation = limit_server_creation
|
||||||
user_crafty.limit_user_creation = limit_user_creation
|
user_crafty.limit_user_creation = limit_user_creation
|
||||||
user_crafty.limit_role_creation = limit_role_creation
|
user_crafty.limit_role_creation = limit_role_creation
|
||||||
User_Crafty.save(user_crafty)
|
User_Crafty.save(user_crafty)
|
||||||
except:
|
except:
|
||||||
User_Crafty.insert({
|
User_Crafty.insert(
|
||||||
|
{
|
||||||
User_Crafty.user_id: user_id,
|
User_Crafty.user_id: user_id,
|
||||||
User_Crafty.permissions: permissions_mask,
|
User_Crafty.permissions: permissions_mask,
|
||||||
User_Crafty.limit_server_creation: limit_server_creation,
|
User_Crafty.limit_server_creation: limit_server_creation,
|
||||||
User_Crafty.limit_user_creation: limit_user_creation,
|
User_Crafty.limit_user_creation: limit_user_creation,
|
||||||
User_Crafty.limit_role_creation: limit_role_creation
|
User_Crafty.limit_role_creation: limit_role_creation,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_created_quantity_list(user_id):
|
def get_created_quantity_list(user_id):
|
||||||
@ -173,7 +199,10 @@ class Permissions_Crafty:
|
|||||||
can = crafty_permissions.has_permission(user_crafty.permissions, permission)
|
can = crafty_permissions.has_permission(user_crafty.permissions, permission)
|
||||||
limit_list = crafty_permissions.get_permission_quantity_list(user_id)
|
limit_list = crafty_permissions.get_permission_quantity_list(user_id)
|
||||||
quantity_list = crafty_permissions.get_created_quantity_list(user_id)
|
quantity_list = crafty_permissions.get_created_quantity_list(user_id)
|
||||||
return can and ((quantity_list[permission.name] < limit_list[permission.name]) or limit_list[permission.name] == -1 )
|
return can and (
|
||||||
|
(quantity_list[permission.name] < limit_list[permission.name])
|
||||||
|
or limit_list[permission.name] == -1
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_server_creation(user_id):
|
def add_server_creation(user_id):
|
||||||
@ -188,12 +217,15 @@ class Permissions_Crafty:
|
|||||||
if user.superuser and key.superuser:
|
if user.superuser and key.superuser:
|
||||||
return crafty_permissions.get_permissions_list()
|
return crafty_permissions.get_permissions_list()
|
||||||
else:
|
else:
|
||||||
user_permissions_mask = crafty_permissions.get_crafty_permissions_mask(user.user_id)
|
user_permissions_mask = crafty_permissions.get_crafty_permissions_mask(
|
||||||
|
user.user_id
|
||||||
|
)
|
||||||
key_permissions_mask: str = key.crafty_permissions
|
key_permissions_mask: str = key.crafty_permissions
|
||||||
permissions_mask = permission_helper.combine_masks(user_permissions_mask, key_permissions_mask)
|
permissions_mask = permission_helper.combine_masks(
|
||||||
|
user_permissions_mask, key_permissions_mask
|
||||||
|
)
|
||||||
permissions_list = crafty_permissions.get_permissions(permissions_mask)
|
permissions_list = crafty_permissions.get_permissions(permissions_mask)
|
||||||
return permissions_list
|
return permissions_list
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
crafty_permissions = Permissions_Crafty()
|
crafty_permissions = Permissions_Crafty()
|
||||||
|
@ -9,38 +9,51 @@ from app.classes.shared.main_models import db_helper
|
|||||||
from app.classes.web.websocket_helper import websocket_helper
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DateTimeField, FloatField, TextField, AutoField, BooleanField
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
ForeignKeyField,
|
||||||
|
CharField,
|
||||||
|
IntegerField,
|
||||||
|
DateTimeField,
|
||||||
|
FloatField,
|
||||||
|
TextField,
|
||||||
|
AutoField,
|
||||||
|
BooleanField,
|
||||||
|
)
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Audit_Log Class
|
# Audit_Log Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Audit_Log(Model):
|
class Audit_Log(Model):
|
||||||
audit_id = AutoField()
|
audit_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
user_name = CharField(default="")
|
user_name = CharField(default="")
|
||||||
user_id = IntegerField(default=0, index=True)
|
user_id = IntegerField(default=0, index=True)
|
||||||
source_ip = CharField(default='127.0.0.1')
|
source_ip = CharField(default="127.0.0.1")
|
||||||
server_id = IntegerField(default=None, index=True) # When auditing global events, use server ID 0
|
server_id = IntegerField(
|
||||||
log_msg = TextField(default='')
|
default=None, index=True
|
||||||
|
) # When auditing global events, use server ID 0
|
||||||
|
log_msg = TextField(default="")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Host_Stats Class
|
# Host_Stats Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Host_Stats(Model):
|
class Host_Stats(Model):
|
||||||
time = DateTimeField(default=datetime.datetime.now, index=True)
|
time = DateTimeField(default=datetime.datetime.now, index=True)
|
||||||
boot_time = CharField(default="")
|
boot_time = CharField(default="")
|
||||||
@ -58,16 +71,16 @@ class Host_Stats(Model):
|
|||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Commands Class
|
# Commands Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Commands(Model):
|
class Commands(Model):
|
||||||
command_id = AutoField()
|
command_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
server_id = ForeignKeyField(Servers, backref='server', index=True)
|
server_id = ForeignKeyField(Servers, backref="server", index=True)
|
||||||
user = ForeignKeyField(Users, backref='user', index=True)
|
user = ForeignKeyField(Users, backref="user", index=True)
|
||||||
source_ip = CharField(default='127.0.0.1')
|
source_ip = CharField(default="127.0.0.1")
|
||||||
command = CharField(default='')
|
command = CharField(default="")
|
||||||
executed = BooleanField(default=False)
|
executed = BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -75,9 +88,9 @@ class Commands(Model):
|
|||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Webhooks Class
|
# Webhooks Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Webhooks(Model):
|
class Webhooks(Model):
|
||||||
id = AutoField()
|
id = AutoField()
|
||||||
name = CharField(max_length=64, unique=True, index=True)
|
name = CharField(max_length=64, unique=True, index=True)
|
||||||
@ -91,12 +104,12 @@ class Webhooks(Model):
|
|||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Schedules Class
|
# Schedules Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Schedules(Model):
|
class Schedules(Model):
|
||||||
schedule_id = IntegerField(unique=True, primary_key=True)
|
schedule_id = IntegerField(unique=True, primary_key=True)
|
||||||
server_id = ForeignKeyField(Servers, backref='schedule_server')
|
server_id = ForeignKeyField(Servers, backref="schedule_server")
|
||||||
enabled = BooleanField()
|
enabled = BooleanField()
|
||||||
action = CharField()
|
action = CharField()
|
||||||
interval = IntegerField()
|
interval = IntegerField()
|
||||||
@ -110,44 +123,48 @@ class Schedules(Model):
|
|||||||
delay = IntegerField(default=0)
|
delay = IntegerField(default=0)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'schedules'
|
table_name = "schedules"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Backups Class
|
# Backups Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Backups(Model):
|
class Backups(Model):
|
||||||
excluded_dirs = CharField(null=True)
|
excluded_dirs = CharField(null=True)
|
||||||
max_backups = IntegerField()
|
max_backups = IntegerField()
|
||||||
server_id = ForeignKeyField(Servers, backref='backups_server')
|
server_id = ForeignKeyField(Servers, backref="backups_server")
|
||||||
compress = BooleanField(default=False)
|
compress = BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'backups'
|
table_name = "backups"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
class helpers_management:
|
class helpers_management:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Host_Stats Methods
|
# Host_Stats Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_latest_hosts_stats():
|
def get_latest_hosts_stats():
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
query = Host_Stats.select().order_by(Host_Stats.id.desc()).get()
|
query = Host_Stats.select().order_by(Host_Stats.id.desc()).get()
|
||||||
return model_to_dict(query)
|
return model_to_dict(query)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Commands Methods
|
# Commands Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_command(server_id, user_id, remote_ip, command):
|
def add_command(server_id, user_id, remote_ip, command):
|
||||||
Commands.insert({
|
Commands.insert(
|
||||||
|
{
|
||||||
Commands.server_id: server_id,
|
Commands.server_id: server_id,
|
||||||
Commands.user: user_id,
|
Commands.user: user_id,
|
||||||
Commands.source_ip: remote_ip,
|
Commands.source_ip: remote_ip,
|
||||||
Commands.command: command
|
Commands.command: command,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_unactioned_commands():
|
def get_unactioned_commands():
|
||||||
@ -158,13 +175,13 @@ class helpers_management:
|
|||||||
def mark_command_complete(command_id=None):
|
def mark_command_complete(command_id=None):
|
||||||
if command_id is not None:
|
if command_id is not None:
|
||||||
logger.debug(f"Marking Command {command_id} completed")
|
logger.debug(f"Marking Command {command_id} completed")
|
||||||
Commands.update({
|
Commands.update({Commands.executed: True}).where(
|
||||||
Commands.executed: True
|
Commands.command_id == command_id
|
||||||
}).where(Commands.command_id == command_id).execute()
|
).execute()
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Audit_Log Methods
|
# Audit_Log Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_actity_log():
|
def get_actity_log():
|
||||||
q = Audit_Log.select()
|
q = Audit_Log.select()
|
||||||
@ -179,22 +196,24 @@ class helpers_management:
|
|||||||
|
|
||||||
server_users = server_permissions.get_server_user_list(server_id)
|
server_users = server_permissions.get_server_user_list(server_id)
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
websocket_helper.broadcast_user(user,'notification', audit_msg)
|
websocket_helper.broadcast_user(user, "notification", audit_msg)
|
||||||
|
|
||||||
Audit_Log.insert({
|
Audit_Log.insert(
|
||||||
Audit_Log.user_name: user_data['username'],
|
{
|
||||||
|
Audit_Log.user_name: user_data["username"],
|
||||||
Audit_Log.user_id: user_id,
|
Audit_Log.user_id: user_id,
|
||||||
Audit_Log.server_id: server_id,
|
Audit_Log.server_id: server_id,
|
||||||
Audit_Log.log_msg: audit_msg,
|
Audit_Log.log_msg: audit_msg,
|
||||||
Audit_Log.source_ip: source_ip
|
Audit_Log.source_ip: source_ip,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
# deletes records when they're more than 100
|
# deletes records when they're more than 100
|
||||||
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
||||||
for item in ordered:
|
for item in ordered:
|
||||||
if not helper.get_setting('max_audit_entries'):
|
if not helper.get_setting("max_audit_entries"):
|
||||||
max_entries = 300
|
max_entries = 300
|
||||||
else:
|
else:
|
||||||
max_entries = helper.get_setting('max_audit_entries')
|
max_entries = helper.get_setting("max_audit_entries")
|
||||||
if Audit_Log.select().count() > max_entries:
|
if Audit_Log.select().count() > max_entries:
|
||||||
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
||||||
else:
|
else:
|
||||||
@ -202,28 +221,31 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
def add_to_audit_log_raw(user_name, user_id, server_id, log_msg, source_ip):
|
||||||
Audit_Log.insert({
|
Audit_Log.insert(
|
||||||
|
{
|
||||||
Audit_Log.user_name: user_name,
|
Audit_Log.user_name: user_name,
|
||||||
Audit_Log.user_id: user_id,
|
Audit_Log.user_id: user_id,
|
||||||
Audit_Log.server_id: server_id,
|
Audit_Log.server_id: server_id,
|
||||||
Audit_Log.log_msg: log_msg,
|
Audit_Log.log_msg: log_msg,
|
||||||
Audit_Log.source_ip: source_ip
|
Audit_Log.source_ip: source_ip,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
# deletes records when they're more than 100
|
# deletes records when they're more than 100
|
||||||
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
ordered = Audit_Log.select().order_by(+Audit_Log.created)
|
||||||
for item in ordered:
|
for item in ordered:
|
||||||
# configurable through app/config/config.json
|
# configurable through app/config/config.json
|
||||||
if not helper.get_setting('max_audit_entries'):
|
if not helper.get_setting("max_audit_entries"):
|
||||||
max_entries = 300
|
max_entries = 300
|
||||||
else:
|
else:
|
||||||
max_entries = helper.get_setting('max_audit_entries')
|
max_entries = helper.get_setting("max_audit_entries")
|
||||||
if Audit_Log.select().count() > max_entries:
|
if Audit_Log.select().count() > max_entries:
|
||||||
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
Audit_Log.delete().where(Audit_Log.audit_id == item.audit_id).execute()
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
#************************************************************************************************
|
|
||||||
|
# **********************************************************************************
|
||||||
# Schedules Methods
|
# Schedules Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_scheduled_task(
|
def create_scheduled_task(
|
||||||
server_id,
|
server_id,
|
||||||
@ -235,10 +257,12 @@ class helpers_management:
|
|||||||
comment=None,
|
comment=None,
|
||||||
enabled=True,
|
enabled=True,
|
||||||
one_time=False,
|
one_time=False,
|
||||||
cron_string='* * * * *',
|
cron_string="* * * * *",
|
||||||
parent=None,
|
parent=None,
|
||||||
delay=0):
|
delay=0,
|
||||||
sch_id = Schedules.insert({
|
):
|
||||||
|
sch_id = Schedules.insert(
|
||||||
|
{
|
||||||
Schedules.server_id: server_id,
|
Schedules.server_id: server_id,
|
||||||
Schedules.action: action,
|
Schedules.action: action,
|
||||||
Schedules.enabled: enabled,
|
Schedules.enabled: enabled,
|
||||||
@ -250,9 +274,9 @@ class helpers_management:
|
|||||||
Schedules.one_time: one_time,
|
Schedules.one_time: one_time,
|
||||||
Schedules.cron_string: cron_string,
|
Schedules.cron_string: cron_string,
|
||||||
Schedules.parent: parent,
|
Schedules.parent: parent,
|
||||||
Schedules.delay: delay
|
Schedules.delay: delay,
|
||||||
|
}
|
||||||
}).execute()
|
).execute()
|
||||||
return sch_id
|
return sch_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -282,7 +306,11 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_child_schedules_by_server(schedule_id, server_id):
|
def get_child_schedules_by_server(schedule_id, server_id):
|
||||||
return Schedules.select().where(Schedules.server_id == server_id, Schedules.parent == schedule_id).execute()
|
return (
|
||||||
|
Schedules.select()
|
||||||
|
.where(Schedules.server_id == server_id, Schedules.parent == schedule_id)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_child_schedules(schedule_id):
|
def get_child_schedules(schedule_id):
|
||||||
@ -297,19 +325,21 @@ class helpers_management:
|
|||||||
# pylint: disable=singleton-comparison
|
# pylint: disable=singleton-comparison
|
||||||
return Schedules.select().where(Schedules.enabled == True).execute()
|
return Schedules.select().where(Schedules.enabled == True).execute()
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Backups Methods
|
# Backups Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_backup_config(server_id):
|
def get_backup_config(server_id):
|
||||||
try:
|
try:
|
||||||
row = Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
row = (
|
||||||
|
Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
||||||
|
)
|
||||||
conf = {
|
conf = {
|
||||||
"backup_path": row.server_id.backup_path,
|
"backup_path": row.server_id.backup_path,
|
||||||
"excluded_dirs": row.excluded_dirs,
|
"excluded_dirs": row.excluded_dirs,
|
||||||
"max_backups": row.max_backups,
|
"max_backups": row.max_backups,
|
||||||
"server_id": row.server_id.server_id,
|
"server_id": row.server_id.server_id,
|
||||||
"compress": row.compress
|
"compress": row.compress,
|
||||||
}
|
}
|
||||||
except IndexError:
|
except IndexError:
|
||||||
conf = {
|
conf = {
|
||||||
@ -322,7 +352,13 @@ class helpers_management:
|
|||||||
return conf
|
return conf
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None, excluded_dirs: list = None, compress: bool = False):
|
def set_backup_config(
|
||||||
|
server_id: int,
|
||||||
|
backup_path: str = None,
|
||||||
|
max_backups: int = None,
|
||||||
|
excluded_dirs: list = None,
|
||||||
|
compress: bool = False,
|
||||||
|
):
|
||||||
logger.debug(f"Updating server {server_id} backup config with {locals()}")
|
logger.debug(f"Updating server {server_id} backup config with {locals()}")
|
||||||
if Backups.select().where(Backups.server_id == server_id).count() != 0:
|
if Backups.select().where(Backups.server_id == server_id).count() != 0:
|
||||||
new_row = False
|
new_row = False
|
||||||
@ -332,33 +368,41 @@ class helpers_management:
|
|||||||
"excluded_dirs": None,
|
"excluded_dirs": None,
|
||||||
"max_backups": 0,
|
"max_backups": 0,
|
||||||
"server_id": server_id,
|
"server_id": server_id,
|
||||||
"compress": False
|
"compress": False,
|
||||||
}
|
}
|
||||||
new_row = True
|
new_row = True
|
||||||
if max_backups is not None:
|
if max_backups is not None:
|
||||||
conf['max_backups'] = max_backups
|
conf["max_backups"] = max_backups
|
||||||
if excluded_dirs is not None:
|
if excluded_dirs is not None:
|
||||||
dirs_to_exclude = ",".join(excluded_dirs)
|
dirs_to_exclude = ",".join(excluded_dirs)
|
||||||
conf['excluded_dirs'] = dirs_to_exclude
|
conf["excluded_dirs"] = dirs_to_exclude
|
||||||
conf['compress'] = compress
|
conf["compress"] = compress
|
||||||
if not new_row:
|
if not new_row:
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
if backup_path is not None:
|
if backup_path is not None:
|
||||||
u1 = Servers.update(backup_path=backup_path).where(Servers.server_id == server_id).execute()
|
u1 = (
|
||||||
|
Servers.update(backup_path=backup_path)
|
||||||
|
.where(Servers.server_id == server_id)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
u1 = 0
|
u1 = 0
|
||||||
u2 = Backups.update(conf).where(Backups.server_id == server_id).execute()
|
u2 = (
|
||||||
|
Backups.update(conf).where(Backups.server_id == server_id).execute()
|
||||||
|
)
|
||||||
logger.debug(f"Updating existing backup record. {u1}+{u2} rows affected")
|
logger.debug(f"Updating existing backup record. {u1}+{u2} rows affected")
|
||||||
else:
|
else:
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
conf["server_id"] = server_id
|
conf["server_id"] = server_id
|
||||||
if backup_path is not None:
|
if backup_path is not None:
|
||||||
Servers.update(backup_path=backup_path).where(Servers.server_id == server_id)
|
Servers.update(backup_path=backup_path).where(
|
||||||
|
Servers.server_id == server_id
|
||||||
|
)
|
||||||
Backups.create(**conf)
|
Backups.create(**conf)
|
||||||
logger.debug("Creating new backup record.")
|
logger.debug("Creating new backup record.")
|
||||||
|
|
||||||
def get_excluded_backup_dirs(self, server_id: int):
|
def get_excluded_backup_dirs(self, server_id: int):
|
||||||
excluded_dirs = self.get_backup_config(server_id)['excluded_dirs']
|
excluded_dirs = self.get_backup_config(server_id)["excluded_dirs"]
|
||||||
if excluded_dirs is not None and excluded_dirs != "":
|
if excluded_dirs is not None and excluded_dirs != "":
|
||||||
dir_list = excluded_dirs.split(",")
|
dir_list = excluded_dirs.split(",")
|
||||||
else:
|
else:
|
||||||
@ -372,7 +416,10 @@ class helpers_management:
|
|||||||
excluded_dirs = ",".join(dir_list)
|
excluded_dirs = ",".join(dir_list)
|
||||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"Not adding {dir_to_add} to excluded directories - already in the excluded directory list for server ID {server_id}")
|
logger.debug(
|
||||||
|
f"Not adding {dir_to_add} to excluded directories - "
|
||||||
|
f"already in the excluded directory list for server ID {server_id}"
|
||||||
|
)
|
||||||
|
|
||||||
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
|
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
|
||||||
dir_list = self.get_excluded_backup_dirs()
|
dir_list = self.get_excluded_backup_dirs()
|
||||||
@ -381,14 +428,19 @@ class helpers_management:
|
|||||||
excluded_dirs = ",".join(dir_list)
|
excluded_dirs = ",".join(dir_list)
|
||||||
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
|
||||||
else:
|
else:
|
||||||
logger.debug(f"Not removing {dir_to_del} from excluded directories - not in the excluded directory list for server ID {server_id}")
|
logger.debug(
|
||||||
|
f"Not removing {dir_to_del} from excluded directories - "
|
||||||
|
f"not in the excluded directory list for server ID {server_id}"
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clear_unexecuted_commands():
|
def clear_unexecuted_commands():
|
||||||
Commands.update({
|
Commands.update(
|
||||||
|
{
|
||||||
Commands.executed: True
|
Commands.executed: True
|
||||||
# pylint: disable=singleton-comparison
|
# pylint: disable=singleton-comparison
|
||||||
}).where(Commands.executed == False).execute()
|
}
|
||||||
|
).where(Commands.executed == False).execute()
|
||||||
|
|
||||||
|
|
||||||
management_helper = helpers_management()
|
management_helper = helpers_management()
|
||||||
|
@ -4,22 +4,29 @@ import datetime
|
|||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, CharField, DoesNotExist, AutoField, DateTimeField
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
CharField,
|
||||||
|
DoesNotExist,
|
||||||
|
AutoField,
|
||||||
|
DateTimeField,
|
||||||
|
)
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Roles Class
|
# Roles Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Roles(Model):
|
class Roles(Model):
|
||||||
role_id = AutoField()
|
role_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
@ -30,9 +37,10 @@ class Roles(Model):
|
|||||||
table_name = "roles"
|
table_name = "roles"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
#************************************************************************************************
|
|
||||||
|
# **********************************************************************************
|
||||||
# Roles Helpers
|
# Roles Helpers
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class helper_roles:
|
class helper_roles:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_roles():
|
def get_all_roles():
|
||||||
@ -52,10 +60,12 @@ class helper_roles:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_role(role_name):
|
def add_role(role_name):
|
||||||
role_id = Roles.insert({
|
role_id = Roles.insert(
|
||||||
|
{
|
||||||
Roles.role_name: role_name.lower(),
|
Roles.role_name: role_name.lower(),
|
||||||
Roles.created: helper.get_time_as_string()
|
Roles.created: helper.get_time_as_string(),
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
return role_id
|
return role_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -74,4 +84,5 @@ class helper_roles:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
roles_helper = helper_roles()
|
roles_helper = helper_roles()
|
||||||
|
@ -7,35 +7,43 @@ from app.classes.shared.helpers import helper
|
|||||||
from app.classes.shared.permission_helper import permission_helper
|
from app.classes.shared.permission_helper import permission_helper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, CompositeKey, JOIN
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
ForeignKeyField,
|
||||||
|
CharField,
|
||||||
|
CompositeKey,
|
||||||
|
JOIN,
|
||||||
|
)
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Role Servers Class
|
# Role Servers Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Role_Servers(Model):
|
class Role_Servers(Model):
|
||||||
role_id = ForeignKeyField(Roles, backref='role_server')
|
role_id = ForeignKeyField(Roles, backref="role_server")
|
||||||
server_id = ForeignKeyField(Servers, backref='role_server')
|
server_id = ForeignKeyField(Servers, backref="role_server")
|
||||||
permissions = CharField(default="00000000")
|
permissions = CharField(default="00000000")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'role_servers'
|
table_name = "role_servers"
|
||||||
primary_key = CompositeKey('role_id', 'server_id')
|
primary_key = CompositeKey("role_id", "server_id")
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
#************************************************************************************************
|
|
||||||
|
# **********************************************************************************
|
||||||
# Servers Permissions Class
|
# Servers Permissions Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Enum_Permissions_Server(Enum):
|
class Enum_Permissions_Server(Enum):
|
||||||
Commands = 0
|
Commands = 0
|
||||||
Terminal = 1
|
Terminal = 1
|
||||||
@ -46,11 +54,13 @@ class Enum_Permissions_Server(Enum):
|
|||||||
Config = 6
|
Config = 6
|
||||||
Players = 7
|
Players = 7
|
||||||
|
|
||||||
class Permissions_Servers:
|
|
||||||
|
|
||||||
|
class Permissions_Servers:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_or_create(role_id, server, permissions_mask):
|
def get_or_create(role_id, server, permissions_mask):
|
||||||
return Role_Servers.get_or_create(role_id=role_id, server_id=server, permissions=permissions_mask)
|
return Role_Servers.get_or_create(
|
||||||
|
role_id=role_id, server_id=server, permissions=permissions_mask
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_permissions_list():
|
def get_permissions_list():
|
||||||
@ -69,13 +79,15 @@ class Permissions_Servers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def has_permission(permission_mask, permission_tested: Enum_Permissions_Server):
|
def has_permission(permission_mask, permission_tested: Enum_Permissions_Server):
|
||||||
return permission_mask[permission_tested.value] == '1'
|
return permission_mask[permission_tested.value] == "1"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_permission(permission_mask, permission_tested: Enum_Permissions_Server, value):
|
def set_permission(
|
||||||
|
permission_mask, permission_tested: Enum_Permissions_Server, value
|
||||||
|
):
|
||||||
list_perms = list(permission_mask)
|
list_perms = list(permission_mask)
|
||||||
list_perms[permission_tested.value] = str(value)
|
list_perms[permission_tested.value] = str(value)
|
||||||
permission_mask = ''.join(list_perms)
|
permission_mask = "".join(list_perms)
|
||||||
return permission_mask
|
return permission_mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -86,50 +98,71 @@ class Permissions_Servers:
|
|||||||
def get_token_permissions(permissions_mask, api_permissions_mask):
|
def get_token_permissions(permissions_mask, api_permissions_mask):
|
||||||
permissions_list = []
|
permissions_list = []
|
||||||
for member in Enum_Permissions_Server.__members__.items():
|
for member in Enum_Permissions_Server.__members__.items():
|
||||||
if permission_helper.both_have_perm(permissions_mask, api_permissions_mask, member[1]):
|
if permission_helper.both_have_perm(
|
||||||
|
permissions_mask, api_permissions_mask, member[1]
|
||||||
|
):
|
||||||
permissions_list.append(member[1])
|
permissions_list.append(member[1])
|
||||||
return permissions_list
|
return permissions_list
|
||||||
|
|
||||||
|
# **********************************************************************************
|
||||||
#************************************************************************************************
|
|
||||||
# Role_Servers Methods
|
# Role_Servers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_role_servers_from_role_id(roleid):
|
def get_role_servers_from_role_id(roleid):
|
||||||
return Role_Servers.select().where(Role_Servers.role_id == roleid)
|
return Role_Servers.select().where(Role_Servers.role_id == roleid)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_servers_from_role(role_id):
|
def get_servers_from_role(role_id):
|
||||||
return Role_Servers.select().join(Servers, JOIN.INNER).where(Role_Servers.role_id == role_id)
|
return (
|
||||||
|
Role_Servers.select()
|
||||||
|
.join(Servers, JOIN.INNER)
|
||||||
|
.where(Role_Servers.role_id == role_id)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_roles_from_server(server_id):
|
def get_roles_from_server(server_id):
|
||||||
return Role_Servers.select().join(Roles, JOIN.INNER).where(Role_Servers.server_id == server_id)
|
return (
|
||||||
|
Role_Servers.select()
|
||||||
|
.join(Roles, JOIN.INNER)
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
||||||
servers = Role_Servers.insert({Role_Servers.server_id: server_id, Role_Servers.role_id: role_id,
|
servers = Role_Servers.insert(
|
||||||
Role_Servers.permissions: rs_permissions}).execute()
|
{
|
||||||
|
Role_Servers.server_id: server_id,
|
||||||
|
Role_Servers.role_id: role_id,
|
||||||
|
Role_Servers.permissions: rs_permissions,
|
||||||
|
}
|
||||||
|
).execute()
|
||||||
return servers
|
return servers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_permissions_mask(role_id, server_id):
|
def get_permissions_mask(role_id, server_id):
|
||||||
permissions_mask = ''
|
permissions_mask = ""
|
||||||
role_server = Role_Servers.select().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id == server_id).get()
|
role_server = (
|
||||||
|
Role_Servers.select()
|
||||||
|
.where(Role_Servers.role_id == role_id)
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
.get()
|
||||||
|
)
|
||||||
permissions_mask = role_server.permissions
|
permissions_mask = role_server.permissions
|
||||||
return permissions_mask
|
return permissions_mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_roles(server_id):
|
def get_server_roles(server_id):
|
||||||
role_list = []
|
role_list = []
|
||||||
roles = Role_Servers.select().where(Role_Servers.server_id == server_id).execute()
|
roles = (
|
||||||
|
Role_Servers.select().where(Role_Servers.server_id == server_id).execute()
|
||||||
|
)
|
||||||
for role in roles:
|
for role in roles:
|
||||||
role_list.append(role.role_id)
|
role_list.append(role.role_id)
|
||||||
return role_list
|
return role_list
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_role_permissions_list(role_id):
|
def get_role_permissions_list(role_id):
|
||||||
permissions_mask = '00000000'
|
permissions_mask = "00000000"
|
||||||
role_server = Role_Servers.get_or_none(Role_Servers.role_id == role_id)
|
role_server = Role_Servers.get_or_none(Role_Servers.role_id == role_id)
|
||||||
if role_server is not None:
|
if role_server is not None:
|
||||||
permissions_mask = role_server.permissions
|
permissions_mask = role_server.permissions
|
||||||
@ -138,7 +171,12 @@ class Permissions_Servers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_role_permission(role_id, server_id, permissions_mask):
|
def update_role_permission(role_id, server_id, permissions_mask):
|
||||||
role_server = Role_Servers.select().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id == server_id).get()
|
role_server = (
|
||||||
|
Role_Servers.select()
|
||||||
|
.where(Role_Servers.role_id == role_id)
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
.get()
|
||||||
|
)
|
||||||
role_server.permissions = permissions_mask
|
role_server.permissions = permissions_mask
|
||||||
Role_Servers.save(role_server)
|
Role_Servers.save(role_server)
|
||||||
|
|
||||||
@ -146,12 +184,21 @@ class Permissions_Servers:
|
|||||||
def delete_roles_permissions(role_id, removed_servers=None):
|
def delete_roles_permissions(role_id, removed_servers=None):
|
||||||
if removed_servers is None:
|
if removed_servers is None:
|
||||||
removed_servers = {}
|
removed_servers = {}
|
||||||
return Role_Servers.delete().where(Role_Servers.role_id == role_id).where(Role_Servers.server_id.in_(removed_servers)).execute()
|
return (
|
||||||
|
Role_Servers.delete()
|
||||||
|
.where(Role_Servers.role_id == role_id)
|
||||||
|
.where(Role_Servers.server_id.in_(removed_servers))
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_roles_of_server(server_id):
|
def remove_roles_of_server(server_id):
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
return Role_Servers.delete().where(Role_Servers.server_id == server_id).execute()
|
return (
|
||||||
|
Role_Servers.delete()
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_id_permissions_mask(user_id, server_id: str):
|
def get_user_id_permissions_mask(user_id, server_id: str):
|
||||||
@ -161,14 +208,19 @@ class Permissions_Servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_permissions_mask(user: Users, server_id: str):
|
def get_user_permissions_mask(user: Users, server_id: str):
|
||||||
if user.superuser:
|
if user.superuser:
|
||||||
permissions_mask = '1' * len(server_permissions.get_permissions_list())
|
permissions_mask = "1" * len(server_permissions.get_permissions_list())
|
||||||
else:
|
else:
|
||||||
roles_list = users_helper.get_user_roles_id(user.user_id)
|
roles_list = users_helper.get_user_roles_id(user.user_id)
|
||||||
role_server = Role_Servers.select().where(Role_Servers.role_id.in_(roles_list)).where(Role_Servers.server_id == server_id).execute()
|
role_server = (
|
||||||
|
Role_Servers.select()
|
||||||
|
.where(Role_Servers.role_id.in_(roles_list))
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
permissions_mask = role_server[0].permissions
|
permissions_mask = role_server[0].permissions
|
||||||
except IndexError:
|
except IndexError:
|
||||||
permissions_mask = '0' * len(server_permissions.get_permissions_list())
|
permissions_mask = "0" * len(server_permissions.get_permissions_list())
|
||||||
return permissions_mask
|
return permissions_mask
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -197,7 +249,9 @@ class Permissions_Servers:
|
|||||||
if user.superuser:
|
if user.superuser:
|
||||||
permissions_list = server_permissions.get_permissions_list()
|
permissions_list = server_permissions.get_permissions_list()
|
||||||
else:
|
else:
|
||||||
permissions_mask = server_permissions.get_user_permissions_mask(user, server_id)
|
permissions_mask = server_permissions.get_user_permissions_mask(
|
||||||
|
user, server_id
|
||||||
|
)
|
||||||
permissions_list = server_permissions.get_permissions(permissions_mask)
|
permissions_list = server_permissions.get_permissions(permissions_mask)
|
||||||
return permissions_list
|
return permissions_list
|
||||||
|
|
||||||
@ -212,11 +266,18 @@ class Permissions_Servers:
|
|||||||
if user.superuser and key.superuser:
|
if user.superuser and key.superuser:
|
||||||
return server_permissions.get_permissions_list()
|
return server_permissions.get_permissions_list()
|
||||||
else:
|
else:
|
||||||
roles_list = users_helper.get_user_roles_id(user['user_id'])
|
roles_list = users_helper.get_user_roles_id(user["user_id"])
|
||||||
role_server = Role_Servers.select().where(Role_Servers.role_id.in_(roles_list)).where(Role_Servers.server_id == server_id).execute()
|
role_server = (
|
||||||
|
Role_Servers.select()
|
||||||
|
.where(Role_Servers.role_id.in_(roles_list))
|
||||||
|
.where(Role_Servers.server_id == server_id)
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
user_permissions_mask = role_server[0].permissions
|
user_permissions_mask = role_server[0].permissions
|
||||||
key_permissions_mask = key.server_permissions
|
key_permissions_mask = key.server_permissions
|
||||||
permissions_mask = permission_helper.combine_masks(user_permissions_mask, key_permissions_mask)
|
permissions_mask = permission_helper.combine_masks(
|
||||||
|
user_permissions_mask, key_permissions_mask
|
||||||
|
)
|
||||||
permissions_list = server_permissions.get_permissions(permissions_mask)
|
permissions_list = server_permissions.get_permissions(permissions_mask)
|
||||||
return permissions_list
|
return permissions_list
|
||||||
|
|
||||||
|
@ -5,21 +5,31 @@ from app.classes.shared.helpers import helper
|
|||||||
from app.classes.shared.main_models import db_helper
|
from app.classes.shared.main_models import db_helper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, IntegerField, FloatField
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
ForeignKeyField,
|
||||||
|
CharField,
|
||||||
|
AutoField,
|
||||||
|
DateTimeField,
|
||||||
|
BooleanField,
|
||||||
|
IntegerField,
|
||||||
|
FloatField,
|
||||||
|
)
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Class
|
# Servers Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Servers(Model):
|
class Servers(Model):
|
||||||
server_id = AutoField()
|
server_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
@ -45,13 +55,13 @@ class Servers(Model):
|
|||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Stats Class
|
# Servers Stats Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Server_Stats(Model):
|
class Server_Stats(Model):
|
||||||
stats_id = AutoField()
|
stats_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
server_id = ForeignKeyField(Servers, backref='server', index=True)
|
server_id = ForeignKeyField(Servers, backref="server", index=True)
|
||||||
started = CharField(default="")
|
started = CharField(default="")
|
||||||
running = BooleanField(default=False)
|
running = BooleanField(default=False)
|
||||||
cpu = FloatField(default=0)
|
cpu = FloatField(default=0)
|
||||||
@ -72,20 +82,19 @@ class Server_Stats(Model):
|
|||||||
crashed = BooleanField(default=False)
|
crashed = BooleanField(default=False)
|
||||||
downloading = BooleanField(default=False)
|
downloading = BooleanField(default=False)
|
||||||
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = "server_stats"
|
table_name = "server_stats"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Class
|
# Servers Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class helper_servers:
|
class helper_servers:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Generic Servers Methods
|
# Generic Servers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_server(
|
def create_server(
|
||||||
name: str,
|
name: str,
|
||||||
@ -97,8 +106,10 @@ class helper_servers:
|
|||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
server_stop: str,
|
server_stop: str,
|
||||||
server_type: str,
|
server_type: str,
|
||||||
server_port=25565):
|
server_port=25565,
|
||||||
return Servers.insert({
|
):
|
||||||
|
return Servers.insert(
|
||||||
|
{
|
||||||
Servers.server_name: name,
|
Servers.server_name: name,
|
||||||
Servers.server_uuid: server_uuid,
|
Servers.server_uuid: server_uuid,
|
||||||
Servers.path: server_dir,
|
Servers.path: server_dir,
|
||||||
@ -111,9 +122,9 @@ class helper_servers:
|
|||||||
Servers.server_port: server_port,
|
Servers.server_port: server_port,
|
||||||
Servers.stop_command: server_stop,
|
Servers.stop_command: server_stop,
|
||||||
Servers.backup_path: backup_path,
|
Servers.backup_path: backup_path,
|
||||||
Servers.type: server_type
|
Servers.type: server_type,
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_obj(server_id):
|
def get_server_obj(server_id):
|
||||||
@ -141,9 +152,9 @@ class helper_servers:
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers Methods
|
# Servers Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_all_defined_servers():
|
def get_all_defined_servers():
|
||||||
query = Servers.select()
|
query = Servers.select()
|
||||||
@ -155,28 +166,54 @@ class helper_servers:
|
|||||||
server_data = []
|
server_data = []
|
||||||
try:
|
try:
|
||||||
for s in servers:
|
for s in servers:
|
||||||
latest = Server_Stats.select().where(Server_Stats.server_id == s.get('server_id')).order_by(Server_Stats.created.desc()).limit(1)
|
latest = (
|
||||||
server_data.append({'server_data': s, "stats": db_helper.return_rows(latest)[0], "user_command_permission":True})
|
Server_Stats.select()
|
||||||
|
.where(Server_Stats.server_id == s.get("server_id"))
|
||||||
|
.order_by(Server_Stats.created.desc())
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
server_data.append(
|
||||||
|
{
|
||||||
|
"server_data": s,
|
||||||
|
"stats": db_helper.return_rows(latest)[0],
|
||||||
|
"user_command_permission": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
except IndexError as ex:
|
except IndexError as ex:
|
||||||
logger.error(f"Stats collection failed with error: {ex}. Was a server just created?")
|
logger.error(
|
||||||
|
f"Stats collection failed with error: {ex}. Was a server just created?"
|
||||||
|
)
|
||||||
return server_data
|
return server_data
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_friendly_name(server_id):
|
def get_server_friendly_name(server_id):
|
||||||
server_data = servers_helper.get_server_data_by_id(server_id)
|
server_data = servers_helper.get_server_data_by_id(server_id)
|
||||||
friendly_name = f"{server_data.get('server_name', None)} with ID: {server_data.get('server_id', 0)}"
|
friendly_name = (
|
||||||
|
f"{server_data.get('server_name', None)} "
|
||||||
|
f"with ID: {server_data.get('server_id', 0)}"
|
||||||
|
)
|
||||||
return friendly_name
|
return friendly_name
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Servers_Stats Methods
|
# Servers_Stats Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_latest_server_stats(server_id):
|
def get_latest_server_stats(server_id):
|
||||||
return Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).limit(1)
|
return (
|
||||||
|
Server_Stats.select()
|
||||||
|
.where(Server_Stats.server_id == server_id)
|
||||||
|
.order_by(Server_Stats.created.desc())
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_stats_by_id(server_id):
|
def get_server_stats_by_id(server_id):
|
||||||
stats = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).limit(1)
|
stats = (
|
||||||
|
Server_Stats.select()
|
||||||
|
.where(Server_Stats.server_id == server_id)
|
||||||
|
.order_by(Server_Stats.created.desc())
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
return db_helper.return_rows(stats)[0]
|
return db_helper.return_rows(stats)[0]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -188,28 +225,37 @@ class helper_servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def sever_crashed(server_id):
|
def sever_crashed(server_id):
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(crashed=True).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(crashed=True).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_download(server_id):
|
def set_download(server_id):
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(downloading=True).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(downloading=True).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def finish_download(server_id):
|
def finish_download(server_id):
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(downloading=False).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(downloading=False).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_download_status(server_id):
|
def get_download_status(server_id):
|
||||||
download_status = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
download_status = (
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||||
|
)
|
||||||
return download_status.downloading
|
return download_status.downloading
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def server_crash_reset(server_id):
|
def server_crash_reset(server_id):
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(crashed=False).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(crashed=False).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_crashed(server_id):
|
def is_crashed(server_id):
|
||||||
@ -228,11 +274,15 @@ class helper_servers:
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f"Database entry not found! {ex}")
|
logger.error(f"Database entry not found! {ex}")
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(updating=value).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(updating=value).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_update_status(server_id):
|
def get_update_status(server_id):
|
||||||
update_status = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
update_status = (
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||||
|
)
|
||||||
return update_status.updating
|
return update_status.updating
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -245,22 +295,32 @@ class helper_servers:
|
|||||||
logger.error(f"Database entry not found! {ex}")
|
logger.error(f"Database entry not found! {ex}")
|
||||||
return
|
return
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(first_run=False).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(first_run=False).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_first_run(server_id):
|
def get_first_run(server_id):
|
||||||
first_run = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
first_run = (
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||||
|
)
|
||||||
return first_run.first_run
|
return first_run.first_run
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_TTL_without_player(server_id):
|
def get_TTL_without_player(server_id):
|
||||||
last_stat = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).first()
|
last_stat = (
|
||||||
last_stat_with_player = (Server_Stats
|
Server_Stats.select()
|
||||||
.select()
|
.where(Server_Stats.server_id == server_id)
|
||||||
|
.order_by(Server_Stats.created.desc())
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
last_stat_with_player = (
|
||||||
|
Server_Stats.select()
|
||||||
.where(Server_Stats.server_id == server_id)
|
.where(Server_Stats.server_id == server_id)
|
||||||
.where(Server_Stats.online > 0)
|
.where(Server_Stats.online > 0)
|
||||||
.order_by(Server_Stats.created.desc())
|
.order_by(Server_Stats.created.desc())
|
||||||
.first())
|
.first()
|
||||||
|
)
|
||||||
return last_stat.created - last_stat_with_player.created
|
return last_stat.created - last_stat_with_player.created
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -279,11 +339,15 @@ class helper_servers:
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f"Database entry not found! {ex}")
|
logger.error(f"Database entry not found! {ex}")
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(waiting_start=value).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(waiting_start=value).where(
|
||||||
|
Server_Stats.server_id == server_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_waiting_start(server_id):
|
def get_waiting_start(server_id):
|
||||||
waiting_start = Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
waiting_start = (
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id).get()
|
||||||
|
)
|
||||||
return waiting_start.waiting_start
|
return waiting_start.waiting_start
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,22 +6,33 @@ from app.classes.models.roles import Roles, roles_helper
|
|||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, CompositeKey, DoesNotExist, JOIN
|
from peewee import (
|
||||||
|
SqliteDatabase,
|
||||||
|
Model,
|
||||||
|
ForeignKeyField,
|
||||||
|
CharField,
|
||||||
|
AutoField,
|
||||||
|
DateTimeField,
|
||||||
|
BooleanField,
|
||||||
|
CompositeKey,
|
||||||
|
DoesNotExist,
|
||||||
|
JOIN,
|
||||||
|
)
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Users Class
|
# Users Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class Users(Model):
|
class Users(Model):
|
||||||
user_id = AutoField()
|
user_id = AutoField()
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
@ -34,7 +45,7 @@ class Users(Model):
|
|||||||
enabled = BooleanField(default=True)
|
enabled = BooleanField(default=True)
|
||||||
superuser = BooleanField(default=False)
|
superuser = BooleanField(default=False)
|
||||||
lang = CharField(default="en_EN")
|
lang = CharField(default="en_EN")
|
||||||
support_logs = CharField(default = '')
|
support_logs = CharField(default="")
|
||||||
valid_tokens_from = DateTimeField(default=datetime.datetime.now)
|
valid_tokens_from = DateTimeField(default=datetime.datetime.now)
|
||||||
server_order = CharField(default="")
|
server_order = CharField(default="")
|
||||||
preparing = BooleanField(default=False)
|
preparing = BooleanField(default=False)
|
||||||
@ -44,40 +55,40 @@ class Users(Model):
|
|||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
# API Keys Class
|
# API Keys Class
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
class ApiKeys(Model):
|
class ApiKeys(Model):
|
||||||
token_id = AutoField()
|
token_id = AutoField()
|
||||||
name = CharField(default='', unique=True, index=True)
|
name = CharField(default="", unique=True, index=True)
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
user_id = ForeignKeyField(Users, backref='api_token', index=True)
|
user_id = ForeignKeyField(Users, backref="api_token", index=True)
|
||||||
server_permissions = CharField(default='00000000')
|
server_permissions = CharField(default="00000000")
|
||||||
crafty_permissions = CharField(default='000')
|
crafty_permissions = CharField(default="000")
|
||||||
superuser = BooleanField(default=False)
|
superuser = BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'api_keys'
|
table_name = "api_keys"
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# User Roles Class
|
# User Roles Class
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
class User_Roles(Model):
|
class User_Roles(Model):
|
||||||
user_id = ForeignKeyField(Users, backref='user_role')
|
user_id = ForeignKeyField(Users, backref="user_role")
|
||||||
role_id = ForeignKeyField(Roles, backref='user_role')
|
role_id = ForeignKeyField(Roles, backref="user_role")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_roles'
|
table_name = "user_roles"
|
||||||
primary_key = CompositeKey('user_id', 'role_id')
|
primary_key = CompositeKey("user_id", "role_id")
|
||||||
database = database
|
database = database
|
||||||
|
|
||||||
#************************************************************************************************
|
|
||||||
# Users Helpers
|
|
||||||
#************************************************************************************************
|
|
||||||
class helper_users:
|
|
||||||
|
|
||||||
|
# **********************************************************************************
|
||||||
|
# Users Helpers
|
||||||
|
# **********************************************************************************
|
||||||
|
class helper_users:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_by_id(user_id):
|
def get_by_id(user_id):
|
||||||
return Users.get_by_id(user_id)
|
return Users.get_by_id(user_id)
|
||||||
@ -107,19 +118,19 @@ class helper_users:
|
|||||||
def get_user(user_id):
|
def get_user(user_id):
|
||||||
if user_id == 0:
|
if user_id == 0:
|
||||||
return {
|
return {
|
||||||
'user_id': 0,
|
"user_id": 0,
|
||||||
'created': '10/24/2019, 11:34:00',
|
"created": "10/24/2019, 11:34:00",
|
||||||
'last_login': '10/24/2019, 11:34:00',
|
"last_login": "10/24/2019, 11:34:00",
|
||||||
'last_update': '10/24/2019, 11:34:00',
|
"last_update": "10/24/2019, 11:34:00",
|
||||||
'last_ip': "127.27.23.89",
|
"last_ip": "127.27.23.89",
|
||||||
'username': "SYSTEM",
|
"username": "SYSTEM",
|
||||||
'password': None,
|
"password": None,
|
||||||
'email': "default@example.com",
|
"email": "default@example.com",
|
||||||
'enabled': True,
|
"enabled": True,
|
||||||
'superuser': True,
|
"superuser": True,
|
||||||
'roles': [],
|
"roles": [],
|
||||||
'servers': [],
|
"servers": [],
|
||||||
'support_logs': '',
|
"support_logs": "",
|
||||||
}
|
}
|
||||||
user = model_to_dict(Users.get(Users.user_id == user_id))
|
user = model_to_dict(Users.get(Users.user_id == user_id))
|
||||||
|
|
||||||
@ -147,31 +158,47 @@ class helper_users:
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user(username: str, password: str = None, email: Optional[str] = None, enabled: bool = True, superuser: bool = False) -> str:
|
def add_user(
|
||||||
|
username: str,
|
||||||
|
password: str = None,
|
||||||
|
email: Optional[str] = None,
|
||||||
|
enabled: bool = True,
|
||||||
|
superuser: bool = False,
|
||||||
|
) -> str:
|
||||||
if password is not None:
|
if password is not None:
|
||||||
pw_enc = helper.encode_pass(password)
|
pw_enc = helper.encode_pass(password)
|
||||||
else:
|
else:
|
||||||
pw_enc = None
|
pw_enc = None
|
||||||
user_id = Users.insert({
|
user_id = Users.insert(
|
||||||
|
{
|
||||||
Users.username: username.lower(),
|
Users.username: username.lower(),
|
||||||
Users.password: pw_enc,
|
Users.password: pw_enc,
|
||||||
Users.email: email,
|
Users.email: email,
|
||||||
Users.enabled: enabled,
|
Users.enabled: enabled,
|
||||||
Users.superuser: superuser,
|
Users.superuser: superuser,
|
||||||
Users.created: helper.get_time_as_string()
|
Users.created: helper.get_time_as_string(),
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
return user_id
|
return user_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_rawpass_user(username: str, password: str = None, email: Optional[str] = None, enabled: bool = True, superuser: bool = False) -> str:
|
def add_rawpass_user(
|
||||||
user_id = Users.insert({
|
username: str,
|
||||||
|
password: str = None,
|
||||||
|
email: Optional[str] = None,
|
||||||
|
enabled: bool = True,
|
||||||
|
superuser: bool = False,
|
||||||
|
) -> str:
|
||||||
|
user_id = Users.insert(
|
||||||
|
{
|
||||||
Users.username: username.lower(),
|
Users.username: username.lower(),
|
||||||
Users.password: password,
|
Users.password: password,
|
||||||
Users.email: email,
|
Users.email: email,
|
||||||
Users.enabled: enabled,
|
Users.enabled: enabled,
|
||||||
Users.superuser: superuser,
|
Users.superuser: superuser,
|
||||||
Users.created: helper.get_time_as_string()
|
Users.created: helper.get_time_as_string(),
|
||||||
}).execute()
|
}
|
||||||
|
).execute()
|
||||||
return user_id
|
return user_id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -183,7 +210,9 @@ class helper_users:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_server_order(user_id, user_server_order):
|
def update_server_order(user_id, user_server_order):
|
||||||
Users.update(server_order = user_server_order).where(Users.user_id == user_id).execute()
|
Users.update(server_order=user_server_order).where(
|
||||||
|
Users.user_id == user_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_order(user_id):
|
def get_server_order(user_id):
|
||||||
@ -208,7 +237,9 @@ class helper_users:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_support_path(user_id, support_path):
|
def set_support_path(user_id, support_path):
|
||||||
Users.update(support_logs = support_path).where(Users.user_id == user_id).execute()
|
Users.update(support_logs=support_path).where(
|
||||||
|
Users.user_id == user_id
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_prepare(user_id):
|
def set_prepare(user_id):
|
||||||
@ -229,9 +260,9 @@ class helper_users:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# User_Roles Methods
|
# User_Roles Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_or_create(user_id, role_id):
|
def get_or_create(user_id, role_id):
|
||||||
@ -242,7 +273,7 @@ class helper_users:
|
|||||||
roles_list = []
|
roles_list = []
|
||||||
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
||||||
for r in roles:
|
for r in roles:
|
||||||
roles_list.append(roles_helper.get_role(r.role_id)['role_id'])
|
roles_list.append(roles_helper.get_role(r.role_id)["role_id"])
|
||||||
return roles_list
|
return roles_list
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -250,33 +281,37 @@ class helper_users:
|
|||||||
roles_list = []
|
roles_list = []
|
||||||
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
roles = User_Roles.select().where(User_Roles.user_id == user_id)
|
||||||
for r in roles:
|
for r in roles:
|
||||||
roles_list.append(roles_helper.get_role(r.role_id)['role_name'])
|
roles_list.append(roles_helper.get_role(r.role_id)["role_name"])
|
||||||
return roles_list
|
return roles_list
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_role_to_user(user_id, role_id):
|
def add_role_to_user(user_id, role_id):
|
||||||
User_Roles.insert({
|
User_Roles.insert(
|
||||||
User_Roles.user_id: user_id,
|
{User_Roles.user_id: user_id, User_Roles.role_id: role_id}
|
||||||
User_Roles.role_id: role_id
|
).execute()
|
||||||
}).execute()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user_roles(user: Union[dict, Users]):
|
def add_user_roles(user: Union[dict, Users]):
|
||||||
if isinstance(user, dict):
|
if isinstance(user, dict):
|
||||||
user_id = user['user_id']
|
user_id = user["user_id"]
|
||||||
else:
|
else:
|
||||||
user_id = user.user_id
|
user_id = user.user_id
|
||||||
|
|
||||||
# I just copied this code from get_user, it had those TODOs & comments made by mac - Lukas
|
# I just copied this code from get_user,
|
||||||
|
# it had those TODOs & comments made by mac - Lukas
|
||||||
|
|
||||||
roles_query = User_Roles.select().join(Roles, JOIN.INNER).where(User_Roles.user_id == user_id)
|
roles_query = (
|
||||||
|
User_Roles.select()
|
||||||
|
.join(Roles, JOIN.INNER)
|
||||||
|
.where(User_Roles.user_id == user_id)
|
||||||
|
)
|
||||||
# TODO: this query needs to be narrower
|
# TODO: this query needs to be narrower
|
||||||
roles = set()
|
roles = set()
|
||||||
for r in roles_query:
|
for r in roles_query:
|
||||||
roles.add(r.role_id.role_id)
|
roles.add(r.role_id.role_id)
|
||||||
|
|
||||||
if isinstance(user, dict):
|
if isinstance(user, dict):
|
||||||
user['roles'] = roles
|
user["roles"] = roles
|
||||||
else:
|
else:
|
||||||
user.roles = roles
|
user.roles = roles
|
||||||
|
|
||||||
@ -293,15 +328,17 @@ class helper_users:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_user_roles(user_id, removed_roles):
|
def delete_user_roles(user_id, removed_roles):
|
||||||
User_Roles.delete().where(User_Roles.user_id == user_id).where(User_Roles.role_id.in_(removed_roles)).execute()
|
User_Roles.delete().where(User_Roles.user_id == user_id).where(
|
||||||
|
User_Roles.role_id.in_(removed_roles)
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_roles_from_role_id(role_id):
|
def remove_roles_from_role_id(role_id):
|
||||||
User_Roles.delete().where(User_Roles.role_id == role_id).execute()
|
User_Roles.delete().where(User_Roles.role_id == role_id).execute()
|
||||||
|
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
# ApiKeys Methods
|
# ApiKeys Methods
|
||||||
# ************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_api_keys(user_id: str):
|
def get_user_api_keys(user_id: str):
|
||||||
@ -317,14 +354,25 @@ class helper_users:
|
|||||||
user_id: str,
|
user_id: str,
|
||||||
superuser: bool = False,
|
superuser: bool = False,
|
||||||
server_permissions_mask: Optional[str] = None,
|
server_permissions_mask: Optional[str] = None,
|
||||||
crafty_permissions_mask: Optional[str] = None):
|
crafty_permissions_mask: Optional[str] = None,
|
||||||
return ApiKeys.insert({
|
):
|
||||||
|
return ApiKeys.insert(
|
||||||
|
{
|
||||||
ApiKeys.name: name,
|
ApiKeys.name: name,
|
||||||
ApiKeys.user_id: user_id,
|
ApiKeys.user_id: user_id,
|
||||||
**({ApiKeys.server_permissions: server_permissions_mask} if server_permissions_mask is not None else {}),
|
**(
|
||||||
**({ApiKeys.crafty_permissions: crafty_permissions_mask} if crafty_permissions_mask is not None else {}),
|
{ApiKeys.server_permissions: server_permissions_mask}
|
||||||
ApiKeys.superuser: superuser
|
if server_permissions_mask is not None
|
||||||
}).execute()
|
else {}
|
||||||
|
),
|
||||||
|
**(
|
||||||
|
{ApiKeys.crafty_permissions: crafty_permissions_mask}
|
||||||
|
if crafty_permissions_mask is not None
|
||||||
|
else {}
|
||||||
|
),
|
||||||
|
ApiKeys.superuser: superuser,
|
||||||
|
}
|
||||||
|
).execute()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_user_api_keys(user_id: str):
|
def delete_user_api_keys(user_id: str):
|
||||||
@ -335,5 +383,4 @@ class helper_users:
|
|||||||
ApiKeys.delete().where(ApiKeys.token_id == key_id).execute()
|
ApiKeys.delete().where(ApiKeys.token_id == key_id).execute()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
users_helper = helper_users()
|
users_helper = helper_users()
|
||||||
|
@ -14,12 +14,13 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Authentication:
|
class Authentication:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.secret = "my secret"
|
self.secret = "my secret"
|
||||||
self.secret = helper.get_setting('apikey_secret', None)
|
self.secret = helper.get_setting("apikey_secret", None)
|
||||||
|
|
||||||
if self.secret is None or self.secret == 'random':
|
if self.secret is None or self.secret == "random":
|
||||||
self.secret = helper.random_string_generator(64)
|
self.secret = helper.random_string_generator(64)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -27,13 +28,9 @@ class Authentication:
|
|||||||
if extra is None:
|
if extra is None:
|
||||||
extra = {}
|
extra = {}
|
||||||
return jwt.encode(
|
return jwt.encode(
|
||||||
{
|
{"user_id": user_id, "iat": int(time.time()), **extra},
|
||||||
'user_id': user_id,
|
|
||||||
'iat': int(time.time()),
|
|
||||||
**extra
|
|
||||||
},
|
|
||||||
authentication.secret,
|
authentication.secret,
|
||||||
algorithm="HS256"
|
algorithm="HS256",
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -49,23 +46,26 @@ class Authentication:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check(token) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
def check(
|
||||||
|
token,
|
||||||
|
) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
||||||
try:
|
try:
|
||||||
data = jwt.decode(token, authentication.secret, algorithms=["HS256"])
|
data = jwt.decode(token, authentication.secret, algorithms=["HS256"])
|
||||||
except PyJWTError as error:
|
except PyJWTError as error:
|
||||||
logger.debug("Error while checking JWT token: ", exc_info=error)
|
logger.debug("Error while checking JWT token: ", exc_info=error)
|
||||||
return None
|
return None
|
||||||
iat: int = data['iat']
|
iat: int = data["iat"]
|
||||||
key: Optional[ApiKeys] = None
|
key: Optional[ApiKeys] = None
|
||||||
if 'token_id' in data:
|
if "token_id" in data:
|
||||||
key_id = data['token_id']
|
key_id = data["token_id"]
|
||||||
key = users_helper.get_user_api_key(key_id)
|
key = users_helper.get_user_api_key(key_id)
|
||||||
if key is None:
|
if key is None:
|
||||||
return None
|
return None
|
||||||
user_id: str = data['user_id']
|
user_id: str = data["user_id"]
|
||||||
user = users_helper.get_user(user_id)
|
user = users_helper.get_user(user_id)
|
||||||
# TODO: Have a cache or something so we don't constantly have to query the database
|
# TODO: Have a cache or something so we don't constantly
|
||||||
if int(user.get('valid_tokens_from').timestamp()) < iat:
|
# have to query the database
|
||||||
|
if int(user.get("valid_tokens_from").timestamp()) < iat:
|
||||||
# Success!
|
# Success!
|
||||||
return key, data, user
|
return key, data, user
|
||||||
else:
|
else:
|
||||||
|
@ -11,8 +11,8 @@ from app.classes.web.websocket_helper import websocket_helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class MainPrompt(cmd.Cmd):
|
|
||||||
|
|
||||||
|
class MainPrompt(cmd.Cmd):
|
||||||
def __init__(self, tasks_manager, migration_manager):
|
def __init__(self, tasks_manager, migration_manager):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.tasks_manager = tasks_manager
|
self.tasks_manager = tasks_manager
|
||||||
@ -31,42 +31,47 @@ class MainPrompt(cmd.Cmd):
|
|||||||
self.universal_exit()
|
self.universal_exit()
|
||||||
|
|
||||||
def do_migrations(self, line):
|
def do_migrations(self, line):
|
||||||
if line == 'up':
|
if line == "up":
|
||||||
self.migration_manager.up()
|
self.migration_manager.up()
|
||||||
elif line == 'down':
|
elif line == "down":
|
||||||
self.migration_manager.down()
|
self.migration_manager.down()
|
||||||
elif line == 'done':
|
elif line == "done":
|
||||||
console.info(self.migration_manager.done)
|
console.info(self.migration_manager.done)
|
||||||
elif line == 'todo':
|
elif line == "todo":
|
||||||
console.info(self.migration_manager.todo)
|
console.info(self.migration_manager.todo)
|
||||||
elif line == 'diff':
|
elif line == "diff":
|
||||||
console.info(self.migration_manager.diff)
|
console.info(self.migration_manager.diff)
|
||||||
elif line == 'info':
|
elif line == "info":
|
||||||
console.info(f'Done: {self.migration_manager.done}')
|
console.info(f"Done: {self.migration_manager.done}")
|
||||||
console.info(f'FS: {self.migration_manager.todo}')
|
console.info(f"FS: {self.migration_manager.todo}")
|
||||||
console.info(f'Todo: {self.migration_manager.diff}')
|
console.info(f"Todo: {self.migration_manager.diff}")
|
||||||
elif line.startswith('add '):
|
elif line.startswith("add "):
|
||||||
migration_name = line[len('add '):]
|
migration_name = line[len("add ") :]
|
||||||
self.migration_manager.create(migration_name, False)
|
self.migration_manager.create(migration_name, False)
|
||||||
else:
|
else:
|
||||||
console.info('Unknown migration command')
|
console.info("Unknown migration command")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def do_threads(_line):
|
def do_threads(_line):
|
||||||
for thread in threading.enumerate():
|
for thread in threading.enumerate():
|
||||||
if sys.version_info >= (3, 8):
|
if sys.version_info >= (3, 8):
|
||||||
print(f'Name: {thread.name} Identifier: {thread.ident} TID/PID: {thread.native_id}')
|
print(
|
||||||
|
f"Name: {thread.name} Identifier: "
|
||||||
|
f"{thread.ident} TID/PID: {thread.native_id}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f'Name: {thread.name} Identifier: {thread.ident}')
|
print(f"Name: {thread.name} Identifier: {thread.ident}")
|
||||||
|
|
||||||
def do_import3(self, _line):
|
def do_import3(self, _line):
|
||||||
import3.start_import()
|
import3.start_import()
|
||||||
|
|
||||||
def universal_exit(self):
|
def universal_exit(self):
|
||||||
logger.info("Stopping all server daemons / threads")
|
logger.info("Stopping all server daemons / threads")
|
||||||
console.info("Stopping all server daemons / threads - This may take a few seconds")
|
console.info(
|
||||||
|
"Stopping all server daemons / threads - This may take a few seconds"
|
||||||
|
)
|
||||||
websocket_helper.disconnect_all()
|
websocket_helper.disconnect_all()
|
||||||
console.info('Waiting for main thread to stop')
|
console.info("Waiting for main thread to stop")
|
||||||
while True:
|
while True:
|
||||||
if self.tasks_manager.get_main_thread_run_status():
|
if self.tasks_manager.get_main_thread_run_status():
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -12,16 +12,18 @@ except ModuleNotFoundError as ex:
|
|||||||
logger.critical(f"Import Error: Unable to load {ex.name} module", exc_info=True)
|
logger.critical(f"Import Error: Unable to load {ex.name} module", exc_info=True)
|
||||||
print(f"Import Error: Unable to load {ex.name} module")
|
print(f"Import Error: Unable to load {ex.name} module")
|
||||||
from app.classes.shared.installer import installer
|
from app.classes.shared.installer import installer
|
||||||
installer.do_install()
|
|
||||||
class Console:
|
|
||||||
|
|
||||||
|
installer.do_install()
|
||||||
|
|
||||||
|
|
||||||
|
class Console:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
if 'colorama' in sys.modules:
|
if "colorama" in sys.modules:
|
||||||
init()
|
init()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def do_print(message, color):
|
def do_print(message, color):
|
||||||
if 'termcolor' in sys.modules or 'colorama' in sys.modules:
|
if "termcolor" in sys.modules or "colorama" in sys.modules:
|
||||||
print(colored(message, color))
|
print(colored(message, color))
|
||||||
else:
|
else:
|
||||||
print(message)
|
print(message)
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
class CraftyException(Exception):
|
class CraftyException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseException(CraftyException):
|
class DatabaseException(CraftyException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SchemaError(DatabaseException):
|
class SchemaError(DatabaseException):
|
||||||
pass
|
pass
|
||||||
|
@ -6,12 +6,9 @@ from zipfile import ZipFile, ZIP_DEFLATED
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class FileHelpers:
|
class FileHelpers:
|
||||||
allowed_quotes = [
|
allowed_quotes = ['"', "'", "`"]
|
||||||
"\"",
|
|
||||||
"'",
|
|
||||||
"`"
|
|
||||||
]
|
|
||||||
|
|
||||||
def del_dirs(self, path):
|
def del_dirs(self, path):
|
||||||
path = pathlib.Path(path)
|
path = pathlib.Path(path)
|
||||||
@ -59,43 +56,60 @@ class FileHelpers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def make_archive(path_to_destination, path_to_zip):
|
def make_archive(path_to_destination, path_to_zip):
|
||||||
# create a ZipFile object
|
# create a ZipFile object
|
||||||
path_to_destination += '.zip'
|
path_to_destination += ".zip"
|
||||||
with ZipFile(path_to_destination, 'w') as z:
|
with ZipFile(path_to_destination, "w") as z:
|
||||||
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
||||||
ziproot = path_to_zip
|
ziproot = path_to_zip
|
||||||
for file in files:
|
for file in files:
|
||||||
try:
|
try:
|
||||||
logger.info(f"backing up: {os.path.join(root, file)}")
|
logger.info(f"backing up: {os.path.join(root, file)}")
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, ""), file))
|
z.write(
|
||||||
|
os.path.join(root, file),
|
||||||
|
os.path.join(root.replace(ziproot, ""), file),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, "/"), file))
|
z.write(
|
||||||
|
os.path.join(root, file),
|
||||||
|
os.path.join(root.replace(ziproot, "/"), file),
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error backing up: {os.path.join(root, file)}! - Error was: {e}")
|
logger.warning(
|
||||||
|
f"Error backing up: {os.path.join(root, file)}!"
|
||||||
|
f" - Error was: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_compressed_archive(path_to_destination, path_to_zip):
|
def make_compressed_archive(path_to_destination, path_to_zip):
|
||||||
# create a ZipFile object
|
# create a ZipFile object
|
||||||
path_to_destination += '.zip'
|
path_to_destination += ".zip"
|
||||||
with ZipFile(path_to_destination, 'w', ZIP_DEFLATED) as z:
|
with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as z:
|
||||||
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
for root, _dirs, files in os.walk(path_to_zip, topdown=True):
|
||||||
ziproot = path_to_zip
|
ziproot = path_to_zip
|
||||||
for file in files:
|
for file in files:
|
||||||
try:
|
try:
|
||||||
logger.info(f"backing up: {os.path.join(root, file)}")
|
logger.info(f"backing up: {os.path.join(root, file)}")
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, ""), file))
|
z.write(
|
||||||
|
os.path.join(root, file),
|
||||||
|
os.path.join(root.replace(ziproot, ""), file),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
z.write(os.path.join(root, file), os.path.join(root.replace(ziproot, "/"), file))
|
z.write(
|
||||||
|
os.path.join(root, file),
|
||||||
|
os.path.join(root.replace(ziproot, "/"), file),
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error backing up: {os.path.join(root, file)}! - Error was: {e}")
|
logger.warning(
|
||||||
|
f"Error backing up: {os.path.join(root, file)}!"
|
||||||
|
f" - Error was: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
file_helper = FileHelpers()
|
file_helper = FileHelpers()
|
||||||
|
@ -37,28 +37,27 @@ except ModuleNotFoundError as err:
|
|||||||
print(f"Import Error: Unable to load {err.name} module")
|
print(f"Import Error: Unable to load {err.name} module")
|
||||||
installer.do_install()
|
installer.do_install()
|
||||||
|
|
||||||
|
|
||||||
class Helpers:
|
class Helpers:
|
||||||
allowed_quotes = [
|
allowed_quotes = ['"', "'", "`"]
|
||||||
"\"",
|
|
||||||
"'",
|
|
||||||
"`"
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.root_dir = os.path.abspath(os.path.curdir)
|
self.root_dir = os.path.abspath(os.path.curdir)
|
||||||
self.config_dir = os.path.join(self.root_dir, 'app', 'config')
|
self.config_dir = os.path.join(self.root_dir, "app", "config")
|
||||||
self.webroot = os.path.join(self.root_dir, 'app', 'frontend')
|
self.webroot = os.path.join(self.root_dir, "app", "frontend")
|
||||||
self.servers_dir = os.path.join(self.root_dir, 'servers')
|
self.servers_dir = os.path.join(self.root_dir, "servers")
|
||||||
self.backup_path = os.path.join(self.root_dir, 'backups')
|
self.backup_path = os.path.join(self.root_dir, "backups")
|
||||||
self.migration_dir = os.path.join(self.root_dir, 'app', 'migrations')
|
self.migration_dir = os.path.join(self.root_dir, "app", "migrations")
|
||||||
|
|
||||||
self.session_file = os.path.join(self.root_dir, 'app', 'config', 'session.lock')
|
self.session_file = os.path.join(self.root_dir, "app", "config", "session.lock")
|
||||||
self.settings_file = os.path.join(self.root_dir, 'app', 'config', 'config.json')
|
self.settings_file = os.path.join(self.root_dir, "app", "config", "config.json")
|
||||||
|
|
||||||
self.ensure_dir_exists(os.path.join(self.root_dir, 'app', 'config', 'db'))
|
self.ensure_dir_exists(os.path.join(self.root_dir, "app", "config", "db"))
|
||||||
self.db_path = os.path.join(self.root_dir, 'app', 'config', 'db', 'crafty.sqlite')
|
self.db_path = os.path.join(
|
||||||
self.serverjar_cache = os.path.join(self.config_dir, 'serverjars.json')
|
self.root_dir, "app", "config", "db", "crafty.sqlite"
|
||||||
self.credits_cache = os.path.join(self.config_dir, 'credits.json')
|
)
|
||||||
|
self.serverjar_cache = os.path.join(self.config_dir, "serverjars.json")
|
||||||
|
self.credits_cache = os.path.join(self.config_dir, "credits.json")
|
||||||
self.passhasher = PasswordHasher()
|
self.passhasher = PasswordHasher()
|
||||||
self.exiting = False
|
self.exiting = False
|
||||||
|
|
||||||
@ -74,7 +73,7 @@ class Helpers:
|
|||||||
|
|
||||||
def check_file_perms(self, path):
|
def check_file_perms(self, path):
|
||||||
try:
|
try:
|
||||||
open(path, "r", encoding='utf-8').close()
|
open(path, "r", encoding="utf-8").close()
|
||||||
logger.info(f"{path} is readable")
|
logger.info(f"{path} is readable")
|
||||||
return True
|
return True
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
@ -97,7 +96,7 @@ class Helpers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def check_internet():
|
def check_internet():
|
||||||
try:
|
try:
|
||||||
requests.get('https://google.com', timeout=1)
|
requests.get("https://google.com", timeout=1)
|
||||||
return True
|
return True
|
||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
@ -105,9 +104,9 @@ class Helpers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def check_port(server_port):
|
def check_port(server_port):
|
||||||
try:
|
try:
|
||||||
ip = get('https://api.ipify.org').content.decode('utf8')
|
ip = get("https://api.ipify.org").content.decode("utf8")
|
||||||
except:
|
except:
|
||||||
ip = 'google.com'
|
ip = "google.com"
|
||||||
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
a_socket.settimeout(20.0)
|
a_socket.settimeout(20.0)
|
||||||
|
|
||||||
@ -125,7 +124,7 @@ class Helpers:
|
|||||||
def check_server_conn(server_port):
|
def check_server_conn(server_port):
|
||||||
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
a_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
a_socket.settimeout(10.0)
|
a_socket.settimeout(10.0)
|
||||||
ip = '127.0.0.1'
|
ip = "127.0.0.1"
|
||||||
|
|
||||||
location = (ip, server_port)
|
location = (ip, server_port)
|
||||||
result_of_check = a_socket.connect_ex(location)
|
result_of_check = a_socket.connect_ex(location)
|
||||||
@ -143,30 +142,41 @@ class Helpers:
|
|||||||
ci = -1 # command index - pointer to the argument we're building in cmd_out
|
ci = -1 # command index - pointer to the argument we're building in cmd_out
|
||||||
np = True # whether we're creating a new argument/parameter
|
np = True # whether we're creating a new argument/parameter
|
||||||
esc = False # whether an escape character was encountered
|
esc = False # whether an escape character was encountered
|
||||||
stch = None # if we're dealing with a quote, save the quote type here. Nested quotes to be dealt with by the command
|
stch = None # if we're dealing with a quote, save the quote type here.
|
||||||
|
# Nested quotes to be dealt with by the command
|
||||||
for c in cmd_in: # for character in string
|
for c in cmd_in: # for character in string
|
||||||
if np: # if set, begin a new argument and increment the command index. Continue the loop.
|
if np: # if set, begin a new argument and increment the command index.
|
||||||
if c == ' ':
|
# Continue the loop.
|
||||||
|
if c == " ":
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
ci += 1
|
ci += 1
|
||||||
cmd_out.append("")
|
cmd_out.append("")
|
||||||
np = False
|
np = False
|
||||||
if esc: # if we encountered an escape character on the last loop, append this char regardless of what it is
|
if esc: # if we encountered an escape character on the last loop,
|
||||||
|
# append this char regardless of what it is
|
||||||
if c not in Helpers.allowed_quotes:
|
if c not in Helpers.allowed_quotes:
|
||||||
cmd_out[ci] += '\\'
|
cmd_out[ci] += "\\"
|
||||||
cmd_out[ci] += c
|
cmd_out[ci] += c
|
||||||
esc = False
|
esc = False
|
||||||
else:
|
else:
|
||||||
if c == '\\': # if the current character is an escape character, set the esc flag and continue to next loop
|
if c == "\\": # if the current character is an escape character,
|
||||||
|
# set the esc flag and continue to next loop
|
||||||
esc = True
|
esc = True
|
||||||
elif c == ' ' and stch is None: # if we encounter a space and are not dealing with a quote,
|
elif (
|
||||||
|
c == " " and stch is None
|
||||||
|
): # if we encounter a space and are not dealing with a quote,
|
||||||
# set the new argument flag and continue to next loop
|
# set the new argument flag and continue to next loop
|
||||||
np = True
|
np = True
|
||||||
elif c == stch: # if we encounter the character that matches our start quote, end the quote and continue to next loop
|
elif (
|
||||||
|
c == stch
|
||||||
|
): # if we encounter the character that matches our start quote,
|
||||||
|
# end the quote and continue to next loop
|
||||||
stch = None
|
stch = None
|
||||||
elif stch is None and (c in Helpers.allowed_quotes): # if we're not in the middle of a quote and we get a quotable character,
|
elif stch is None and (
|
||||||
# start a quote and proceed to the next loop
|
c in Helpers.allowed_quotes
|
||||||
|
): # if we're not in the middle of a quote and we get a quotable
|
||||||
|
# character, start a quote and proceed to the next loop
|
||||||
stch = c
|
stch = c
|
||||||
else: # else, just store the character in the current arg
|
else: # else, just store the character in the current arg
|
||||||
cmd_out[ci] += c
|
cmd_out[ci] += c
|
||||||
@ -175,7 +185,7 @@ class Helpers:
|
|||||||
def get_setting(self, key, default_return=False):
|
def get_setting(self, key, default_return=False):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(self.settings_file, "r", encoding='utf-8') as f:
|
with open(self.settings_file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
if key in data.keys():
|
if key in data.keys():
|
||||||
@ -187,8 +197,12 @@ class Helpers:
|
|||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical(f"Config File Error: Unable to read {self.settings_file} due to {e}")
|
logger.critical(
|
||||||
console.critical(f"Config File Error: Unable to read {self.settings_file} due to {e}")
|
f"Config File Error: Unable to read {self.settings_file} due to {e}"
|
||||||
|
)
|
||||||
|
console.critical(
|
||||||
|
f"Config File Error: Unable to read {self.settings_file} due to {e}"
|
||||||
|
)
|
||||||
|
|
||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
@ -196,10 +210,10 @@ class Helpers:
|
|||||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
try:
|
try:
|
||||||
# doesn't even have to be reachable
|
# doesn't even have to be reachable
|
||||||
s.connect(('10.255.255.255', 1))
|
s.connect(("10.255.255.255", 1))
|
||||||
IP = s.getsockname()[0]
|
IP = s.getsockname()[0]
|
||||||
except Exception:
|
except Exception:
|
||||||
IP = '127.0.0.1'
|
IP = "127.0.0.1"
|
||||||
finally:
|
finally:
|
||||||
s.close()
|
s.close()
|
||||||
return IP
|
return IP
|
||||||
@ -207,7 +221,9 @@ class Helpers:
|
|||||||
def get_version(self):
|
def get_version(self):
|
||||||
version_data = {}
|
version_data = {}
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(self.config_dir, 'version.json'), 'r', encoding='utf-8') as f:
|
with open(
|
||||||
|
os.path.join(self.config_dir, "version.json"), "r", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
version_data = json.load(f)
|
version_data = json.load(f)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -217,9 +233,12 @@ class Helpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_announcements():
|
def get_announcements():
|
||||||
r = requests.get('https://craftycontrol.com/notify.json', timeout=2)
|
r = requests.get("https://craftycontrol.com/notify.json", timeout=2)
|
||||||
data = '[{"id":"1","date":"Unknown","title":"Error getting Announcements","desc":"Error getting ' \
|
data = (
|
||||||
'Announcements","link":""}] '
|
'[{"id":"1","date":"Unknown",'
|
||||||
|
'"title":"Error getting Announcements",'
|
||||||
|
'"desc":"Error getting Announcements","link":""}]'
|
||||||
|
)
|
||||||
|
|
||||||
if r.status_code in [200, 201]:
|
if r.status_code in [200, 201]:
|
||||||
try:
|
try:
|
||||||
@ -229,14 +248,13 @@ class Helpers:
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def get_version_string(self):
|
def get_version_string(self):
|
||||||
|
|
||||||
version_data = self.get_version()
|
version_data = self.get_version()
|
||||||
major = version_data.get('major', '?')
|
major = version_data.get("major", "?")
|
||||||
minor = version_data.get('minor', '?')
|
minor = version_data.get("minor", "?")
|
||||||
sub = version_data.get('sub', '?')
|
sub = version_data.get("sub", "?")
|
||||||
meta = version_data.get('meta', '?')
|
meta = version_data.get("meta", "?")
|
||||||
|
|
||||||
# set some defaults if we don't get version_data from our helper
|
# set some defaults if we don't get version_data from our helper
|
||||||
version = f"{major}.{minor}.{sub}-{meta}"
|
version = f"{major}.{minor}.{sub}-{meta}"
|
||||||
@ -256,25 +274,31 @@ class Helpers:
|
|||||||
# our regex replacements
|
# our regex replacements
|
||||||
# note these are in a tuple
|
# note these are in a tuple
|
||||||
|
|
||||||
user_keywords = self.get_setting('keywords')
|
user_keywords = self.get_setting("keywords")
|
||||||
|
|
||||||
replacements = [
|
replacements = [
|
||||||
(r'(\[.+?/INFO\])', r'<span class="mc-log-info">\1</span>'),
|
(r"(\[.+?/INFO\])", r'<span class="mc-log-info">\1</span>'),
|
||||||
(r'(\[.+?/WARN\])', r'<span class="mc-log-warn">\1</span>'),
|
(r"(\[.+?/WARN\])", r'<span class="mc-log-warn">\1</span>'),
|
||||||
(r'(\[.+?/ERROR\])', r'<span class="mc-log-error">\1</span>'),
|
(r"(\[.+?/ERROR\])", r'<span class="mc-log-error">\1</span>'),
|
||||||
(r'(\[.+?/FATAL\])', r'<span class="mc-log-fatal">\1</span>'),
|
(r"(\[.+?/FATAL\])", r'<span class="mc-log-fatal">\1</span>'),
|
||||||
(r'(\w+?\[/\d+?\.\d+?\.\d+?\.\d+?\:\d+?\])', r'<span class="mc-log-keyword">\1</span>'),
|
(
|
||||||
(r'\[(\d\d:\d\d:\d\d)\]', r'<span class="mc-log-time">[\1]</span>'),
|
r"(\w+?\[/\d+?\.\d+?\.\d+?\.\d+?\:\d+?\])",
|
||||||
(r'(\[.+? INFO\])', r'<span class="mc-log-info">\1</span>'),
|
r'<span class="mc-log-keyword">\1</span>',
|
||||||
(r'(\[.+? WARN\])', r'<span class="mc-log-warn">\1</span>'),
|
),
|
||||||
(r'(\[.+? ERROR\])', r'<span class="mc-log-error">\1</span>'),
|
(r"\[(\d\d:\d\d:\d\d)\]", r'<span class="mc-log-time">[\1]</span>'),
|
||||||
(r'(\[.+? FATAL\])', r'<span class="mc-log-fatal">\1</span>')
|
(r"(\[.+? INFO\])", r'<span class="mc-log-info">\1</span>'),
|
||||||
|
(r"(\[.+? WARN\])", r'<span class="mc-log-warn">\1</span>'),
|
||||||
|
(r"(\[.+? ERROR\])", r'<span class="mc-log-error">\1</span>'),
|
||||||
|
(r"(\[.+? FATAL\])", r'<span class="mc-log-fatal">\1</span>'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# highlight users keywords
|
# highlight users keywords
|
||||||
for keyword in user_keywords:
|
for keyword in user_keywords:
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
search_replace = (r'({})'.format(keyword), r'<span class="mc-log-keyword">\1</span>')
|
search_replace = (
|
||||||
|
r"({})".format(keyword),
|
||||||
|
r'<span class="mc-log-keyword">\1</span>',
|
||||||
|
)
|
||||||
replacements.append(search_replace)
|
replacements.append(search_replace)
|
||||||
|
|
||||||
for old, new in replacements:
|
for old, new in replacements:
|
||||||
@ -282,9 +306,8 @@ class Helpers:
|
|||||||
|
|
||||||
return line
|
return line
|
||||||
|
|
||||||
|
|
||||||
def validate_traversal(self, base_path, filename):
|
def validate_traversal(self, base_path, filename):
|
||||||
logger.debug(f"Validating traversal (\"{base_path}\", \"{filename}\")")
|
logger.debug(f'Validating traversal ("{base_path}", "{filename}")')
|
||||||
base = pathlib.Path(base_path).resolve()
|
base = pathlib.Path(base_path).resolve()
|
||||||
file = pathlib.Path(filename)
|
file = pathlib.Path(filename)
|
||||||
fileabs = base.joinpath(file).resolve()
|
fileabs = base.joinpath(file).resolve()
|
||||||
@ -294,7 +317,6 @@ class Helpers:
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Path traversal detected")
|
raise ValueError("Path traversal detected")
|
||||||
|
|
||||||
|
|
||||||
def tail_file(self, file_name, number_lines=20):
|
def tail_file(self, file_name, number_lines=20):
|
||||||
if not self.check_file_exists(file_name):
|
if not self.check_file_exists(file_name):
|
||||||
logger.warning(f"Unable to find file to tail: {file_name}")
|
logger.warning(f"Unable to find file to tail: {file_name}")
|
||||||
@ -307,7 +329,7 @@ class Helpers:
|
|||||||
line_buffer = number_lines * avg_line_length
|
line_buffer = number_lines * avg_line_length
|
||||||
|
|
||||||
# open our file
|
# open our file
|
||||||
with open(file_name, 'r', encoding='utf-8') as f:
|
with open(file_name, "r", encoding="utf-8") as f:
|
||||||
|
|
||||||
# seek
|
# seek
|
||||||
f.seek(0, 2)
|
f.seek(0, 2)
|
||||||
@ -315,7 +337,8 @@ class Helpers:
|
|||||||
# get file size
|
# get file size
|
||||||
fsize = f.tell()
|
fsize = f.tell()
|
||||||
|
|
||||||
# set pos @ last n chars (buffer from above = number of lines * avg_line_length)
|
# set pos @ last n chars
|
||||||
|
# (buffer from above = number of lines * avg_line_length)
|
||||||
f.seek(max(fsize - line_buffer, 0), 0)
|
f.seek(max(fsize - line_buffer, 0), 0)
|
||||||
|
|
||||||
# read file til the end
|
# read file til the end
|
||||||
@ -323,7 +346,9 @@ class Helpers:
|
|||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f'Unable to read a line in the file:{file_name} - due to error: {e}')
|
logger.warning(
|
||||||
|
f"Unable to read a line in the file:{file_name} - due to error: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
# now we are done getting the lines, let's return it
|
# now we are done getting the lines, let's return it
|
||||||
return lines
|
return lines
|
||||||
@ -332,7 +357,7 @@ class Helpers:
|
|||||||
def check_writeable(path: str):
|
def check_writeable(path: str):
|
||||||
filename = os.path.join(path, "tempfile.txt")
|
filename = os.path.join(path, "tempfile.txt")
|
||||||
try:
|
try:
|
||||||
open(filename, "w", encoding='utf-8').close()
|
open(filename, "w", encoding="utf-8").close()
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
logger.info(f"{filename} is writable")
|
logger.info(f"{filename} is writable")
|
||||||
@ -355,31 +380,36 @@ class Helpers:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def unzipFile(self, zip_path):
|
def unzipFile(self, zip_path):
|
||||||
new_dir_list = zip_path.split('/')
|
new_dir_list = zip_path.split("/")
|
||||||
new_dir = ''
|
new_dir = ""
|
||||||
for i in range(len(new_dir_list) - 1):
|
for i in range(len(new_dir_list) - 1):
|
||||||
if i == 0:
|
if i == 0:
|
||||||
new_dir += new_dir_list[i]
|
new_dir += new_dir_list[i]
|
||||||
else:
|
else:
|
||||||
new_dir += '/'+new_dir_list[i]
|
new_dir += "/" + new_dir_list[i]
|
||||||
|
|
||||||
if helper.check_file_perms(zip_path) and os.path.isfile(zip_path):
|
if helper.check_file_perms(zip_path) and os.path.isfile(zip_path):
|
||||||
helper.ensure_dir_exists(new_dir)
|
helper.ensure_dir_exists(new_dir)
|
||||||
tempDir = tempfile.mkdtemp()
|
tempDir = tempfile.mkdtemp()
|
||||||
try:
|
try:
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
||||||
zip_ref.extractall(tempDir)
|
zip_ref.extractall(tempDir)
|
||||||
for i in enumerate(zip_ref.filelist):
|
for i in enumerate(zip_ref.filelist):
|
||||||
if len(zip_ref.filelist) > 1 or not zip_ref.filelist[i].filename.endswith('/'):
|
if len(zip_ref.filelist) > 1 or not zip_ref.filelist[
|
||||||
|
i
|
||||||
|
].filename.endswith("/"):
|
||||||
break
|
break
|
||||||
|
|
||||||
full_root_path = tempDir
|
full_root_path = tempDir
|
||||||
|
|
||||||
for item in os.listdir(full_root_path):
|
for item in os.listdir(full_root_path):
|
||||||
try:
|
try:
|
||||||
file_helper.move_dir(os.path.join(full_root_path, item), os.path.join(new_dir, item))
|
file_helper.move_dir(
|
||||||
|
os.path.join(full_root_path, item),
|
||||||
|
os.path.join(new_dir, item),
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f'ERROR IN ZIP IMPORT: {ex}')
|
logger.error(f"ERROR IN ZIP IMPORT: {ex}")
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print(ex)
|
print(ex)
|
||||||
else:
|
else:
|
||||||
@ -387,8 +417,8 @@ class Helpers:
|
|||||||
return
|
return
|
||||||
|
|
||||||
def ensure_logging_setup(self):
|
def ensure_logging_setup(self):
|
||||||
log_file = os.path.join(os.path.curdir, 'logs', 'commander.log')
|
log_file = os.path.join(os.path.curdir, "logs", "commander.log")
|
||||||
session_log_file = os.path.join(os.path.curdir, 'logs', 'session.log')
|
session_log_file = os.path.join(os.path.curdir, "logs", "session.log")
|
||||||
|
|
||||||
logger.info("Checking app directory writable")
|
logger.info("Checking app directory writable")
|
||||||
|
|
||||||
@ -402,13 +432,13 @@ class Helpers:
|
|||||||
# ensure the log directory is there
|
# ensure the log directory is there
|
||||||
try:
|
try:
|
||||||
with suppress(FileExistsError):
|
with suppress(FileExistsError):
|
||||||
os.makedirs(os.path.join(self.root_dir, 'logs'))
|
os.makedirs(os.path.join(self.root_dir, "logs"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error(f"Failed to make logs directory with error: {e} ")
|
console.error(f"Failed to make logs directory with error: {e} ")
|
||||||
|
|
||||||
# ensure the log file is there
|
# ensure the log file is there
|
||||||
try:
|
try:
|
||||||
open(log_file, 'a', encoding='utf-8').close()
|
open(log_file, "a", encoding="utf-8").close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.critical(f"Unable to open log file! {e}")
|
console.critical(f"Unable to open log file! {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -426,7 +456,8 @@ class Helpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def calc_percent(source_path, dest_path):
|
def calc_percent(source_path, dest_path):
|
||||||
#calculates percentable of zip from drive. Not with compression. For backups and support logs
|
# calculates percentable of zip from drive. Not with compression.
|
||||||
|
# (For backups and support logs)
|
||||||
source_size = 0
|
source_size = 0
|
||||||
files_count = 0
|
files_count = 0
|
||||||
for path, _dirs, files in os.walk(source_path):
|
for path, _dirs, files in os.walk(source_path):
|
||||||
@ -437,45 +468,39 @@ class Helpers:
|
|||||||
dest_size = os.path.getsize(str(dest_path))
|
dest_size = os.path.getsize(str(dest_path))
|
||||||
percent = round((dest_size / source_size) * 100, 1)
|
percent = round((dest_size / source_size) * 100, 1)
|
||||||
if percent >= 0:
|
if percent >= 0:
|
||||||
results = {
|
results = {"percent": percent, "total_files": files_count}
|
||||||
"percent": percent,
|
|
||||||
"total_files": files_count
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
results = {
|
results = {"percent": 0, "total_files": 0}
|
||||||
"percent": 0,
|
|
||||||
"total_files": 0
|
|
||||||
}
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_file_exists(path: str):
|
def check_file_exists(path: str):
|
||||||
logger.debug(f'Looking for path: {path}')
|
logger.debug(f"Looking for path: {path}")
|
||||||
|
|
||||||
if os.path.exists(path) and os.path.isfile(path):
|
if os.path.exists(path) and os.path.isfile(path):
|
||||||
logger.debug(f'Found path: {path}')
|
logger.debug(f"Found path: {path}")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def human_readable_file_size(num: int, suffix='B'):
|
def human_readable_file_size(num: int, suffix="B"):
|
||||||
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
|
for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
|
||||||
if abs(num) < 1024.0:
|
if abs(num) < 1024.0:
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
return "%3.1f%s%s" % (num, unit, suffix)
|
return "%3.1f%s%s" % (num, unit, suffix)
|
||||||
num /= 1024.0
|
num /= 1024.0
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
return "%.1f%s%s" % (num, 'Y', suffix)
|
return "%.1f%s%s" % (num, "Y", suffix)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_path_exists(path: str):
|
def check_path_exists(path: str):
|
||||||
if not path:
|
if not path:
|
||||||
return False
|
return False
|
||||||
logger.debug(f'Looking for path: {path}')
|
logger.debug(f"Looking for path: {path}")
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
logger.debug(f'Found path: {path}')
|
logger.debug(f"Found path: {path}")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -483,12 +508,12 @@ class Helpers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_file_contents(path: str, lines=100):
|
def get_file_contents(path: str, lines=100):
|
||||||
|
|
||||||
contents = ''
|
contents = ""
|
||||||
|
|
||||||
if os.path.exists(path) and os.path.isfile(path):
|
if os.path.exists(path) and os.path.isfile(path):
|
||||||
try:
|
try:
|
||||||
with open(path, 'r', encoding='utf-8') as f:
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
for line in (f.readlines() [-lines:]):
|
for line in f.readlines()[-lines:]:
|
||||||
contents = contents + line
|
contents = contents + line
|
||||||
|
|
||||||
return contents
|
return contents
|
||||||
@ -497,7 +522,9 @@ class Helpers:
|
|||||||
logger.error(f"Unable to read file: {path}. \n Error: {e}")
|
logger.error(f"Unable to read file: {path}. \n Error: {e}")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.error(f"Unable to read file: {path}. File not found, or isn't a file.")
|
logger.error(
|
||||||
|
f"Unable to read file: {path}. File not found, or isn't a file."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_session_file(self, ignore=False):
|
def create_session_file(self, ignore=False):
|
||||||
@ -510,33 +537,39 @@ class Helpers:
|
|||||||
file_data = self.get_file_contents(self.session_file)
|
file_data = self.get_file_contents(self.session_file)
|
||||||
try:
|
try:
|
||||||
data = json.loads(file_data)
|
data = json.loads(file_data)
|
||||||
pid = data.get('pid')
|
pid = data.get("pid")
|
||||||
started = data.get('started')
|
started = data.get("started")
|
||||||
if psutil.pid_exists(pid):
|
if psutil.pid_exists(pid):
|
||||||
console.critical(f"Another Crafty Controller agent seems to be running...\npid: {pid} \nstarted on: {started}")
|
console.critical(
|
||||||
|
f"Another Crafty Controller agent seems to be running..."
|
||||||
|
f"\npid: {pid} \nstarted on: {started}"
|
||||||
|
)
|
||||||
logger.critical("Found running crafty process. Exiting.")
|
logger.critical("Found running crafty process. Exiting.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
logger.info("No process found for pid. Assuming crafty crashed. Deleting stale session.lock")
|
logger.info(
|
||||||
|
"No process found for pid. Assuming "
|
||||||
|
"crafty crashed. Deleting stale session.lock"
|
||||||
|
)
|
||||||
os.remove(self.session_file)
|
os.remove(self.session_file)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to locate existing session.lock with error: {e} ")
|
logger.error(f"Failed to locate existing session.lock with error: {e} ")
|
||||||
console.error(f"Failed to locate existing session.lock with error: {e} ")
|
console.error(
|
||||||
|
f"Failed to locate existing session.lock with error: {e} "
|
||||||
|
)
|
||||||
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
pid = os.getpid()
|
pid = os.getpid()
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
|
|
||||||
session_data = {
|
session_data = {"pid": pid, "started": now.strftime("%d-%m-%Y, %H:%M:%S")}
|
||||||
'pid': pid,
|
with open(self.session_file, "w", encoding="utf-8") as f:
|
||||||
'started': now.strftime("%d-%m-%Y, %H:%M:%S")
|
|
||||||
}
|
|
||||||
with open(self.session_file, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(session_data, f, indent=True)
|
json.dump(session_data, f, indent=True)
|
||||||
|
|
||||||
# because this is a recursive function, we will return bytes, and set human readable later
|
# because this is a recursive function, we will return bytes,
|
||||||
|
# and set human readable later
|
||||||
def get_dir_size(self, path: str):
|
def get_dir_size(self, path: str):
|
||||||
total = 0
|
total = 0
|
||||||
for entry in os.scandir(path):
|
for entry in os.scandir(path):
|
||||||
@ -548,26 +581,30 @@ class Helpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_dir_by_date(path: str, reverse=False):
|
def list_dir_by_date(path: str, reverse=False):
|
||||||
return [str(p) for p in sorted(pathlib.Path(path).iterdir(), key=os.path.getmtime, reverse=reverse)]
|
return [
|
||||||
|
str(p)
|
||||||
|
for p in sorted(
|
||||||
|
pathlib.Path(path).iterdir(), key=os.path.getmtime, reverse=reverse
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
def get_human_readable_files_sizes(self, paths: list):
|
def get_human_readable_files_sizes(self, paths: list):
|
||||||
sizes = []
|
sizes = []
|
||||||
for p in paths:
|
for p in paths:
|
||||||
sizes.append({
|
sizes.append(
|
||||||
"path": p,
|
{"path": p, "size": self.human_readable_file_size(os.stat(p).st_size)}
|
||||||
"size": self.human_readable_file_size(os.stat(p).st_size)
|
)
|
||||||
})
|
|
||||||
return sizes
|
return sizes
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def base64_encode_string(fun_str: str):
|
def base64_encode_string(fun_str: str):
|
||||||
s_bytes = str(fun_str).encode('utf-8')
|
s_bytes = str(fun_str).encode("utf-8")
|
||||||
b64_bytes = base64.encodebytes(s_bytes)
|
b64_bytes = base64.encodebytes(s_bytes)
|
||||||
return b64_bytes.decode('utf-8')
|
return b64_bytes.decode("utf-8")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def base64_decode_string(fun_str: str):
|
def base64_decode_string(fun_str: str):
|
||||||
s_bytes = str(fun_str).encode('utf-8')
|
s_bytes = str(fun_str).encode("utf-8")
|
||||||
b64_bytes = base64.decodebytes(s_bytes)
|
b64_bytes = base64.decodebytes(s_bytes)
|
||||||
return b64_bytes.decode("utf-8")
|
return b64_bytes.decode("utf-8")
|
||||||
|
|
||||||
@ -578,7 +615,8 @@ class Helpers:
|
|||||||
"""
|
"""
|
||||||
ensures a directory exists
|
ensures a directory exists
|
||||||
|
|
||||||
Checks for the existence of a directory, if the directory isn't there, this function creates the directory
|
Checks for the existence of a directory, if the directory isn't there,
|
||||||
|
this function creates the directory
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path (string): the path you are checking for
|
path (string): the path you are checking for
|
||||||
@ -666,23 +704,23 @@ class Helpers:
|
|||||||
random_generator() = G8sjO2
|
random_generator() = G8sjO2
|
||||||
random_generator(3, abcdef) = adf
|
random_generator(3, abcdef) = adf
|
||||||
"""
|
"""
|
||||||
return ''.join(random.choice(chars) for x in range(size))
|
return "".join(random.choice(chars) for x in range(size))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_os_windows():
|
def is_os_windows():
|
||||||
if os.name == 'nt':
|
if os.name == "nt":
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def wtol_path(w_path):
|
def wtol_path(w_path):
|
||||||
l_path = w_path.replace('\\', '/')
|
l_path = w_path.replace("\\", "/")
|
||||||
return l_path
|
return l_path
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ltow_path(l_path):
|
def ltow_path(l_path):
|
||||||
w_path = l_path.replace('/', '\\')
|
w_path = l_path.replace("/", "\\")
|
||||||
return w_path
|
return w_path
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -694,10 +732,10 @@ class Helpers:
|
|||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
if self.check_file_exists(default_file):
|
if self.check_file_exists(default_file):
|
||||||
with open(default_file, 'r', encoding='utf-8') as f:
|
with open(default_file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
del_json = helper.get_setting('delete_default_json')
|
del_json = helper.get_setting("delete_default_json")
|
||||||
|
|
||||||
if del_json:
|
if del_json:
|
||||||
os.remove(default_file)
|
os.remove(default_file)
|
||||||
@ -714,14 +752,15 @@ class Helpers:
|
|||||||
dir_list.append(item)
|
dir_list.append(item)
|
||||||
else:
|
else:
|
||||||
unsorted_files.append(item)
|
unsorted_files.append(item)
|
||||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||||
|
unsorted_files, key=str.casefold
|
||||||
|
)
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
@ -729,15 +768,15 @@ class Helpers:
|
|||||||
{filename}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</div><li>
|
</div><li>
|
||||||
\n"""\
|
\n"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if filename != "crafty_managed.txt":
|
if filename != "crafty_managed.txt":
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-nested d-block tree-ctx-item tree-file tree-item"
|
class="tree-nested d-block tree-ctx-item tree-file tree-item"
|
||||||
data-path="{dpath}"
|
data-path="{dpath}"
|
||||||
data-name="{filename}"
|
data-name="{filename}"
|
||||||
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{filename}</li>"""
|
onclick="clickOnFile(event)"><span style="margin-right: 6px;">
|
||||||
|
<i class="far fa-file"></i></span>{filename}</li>"""
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -750,49 +789,45 @@ class Helpers:
|
|||||||
dir_list.append(item)
|
dir_list.append(item)
|
||||||
else:
|
else:
|
||||||
unsorted_files.append(item)
|
unsorted_files.append(item)
|
||||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||||
output += \
|
unsorted_files, key=str.casefold
|
||||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
)
|
||||||
|
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{filename}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</div><li>"""\
|
</div><li>"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if filename != "crafty_managed.txt":
|
if filename != "crafty_managed.txt":
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-nested d-block tree-ctx-item tree-file tree-item"
|
class="tree-nested d-block tree-ctx-item tree-file tree-item"
|
||||||
data-path="{dpath}"
|
data-path="{dpath}"
|
||||||
data-name="{filename}"
|
data-name="{filename}"
|
||||||
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{filename}</li>"""
|
onclick="clickOnFile(event)"><span style="margin-right: 6px;">
|
||||||
output += '</ul>\n'
|
<i class="far fa-file"></i></span>{filename}</li>"""
|
||||||
|
output += "</ul>\n"
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_zip_tree(folder, output=""):
|
def generate_zip_tree(folder, output=""):
|
||||||
file_list = os.listdir(folder)
|
file_list = os.listdir(folder)
|
||||||
file_list = sorted(file_list, key=str.casefold)
|
file_list = sorted(file_list, key=str.casefold)
|
||||||
output += \
|
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
|
||||||
|
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="radio" name="root_path" value="{dpath}">
|
<input type="radio" name="root_path" value="{dpath}">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -801,24 +836,20 @@ class Helpers:
|
|||||||
{filename}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>
|
</input></div><li>
|
||||||
\n"""\
|
\n"""
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_zip_dir(folder, output=""):
|
def generate_zip_dir(folder, output=""):
|
||||||
file_list = os.listdir(folder)
|
file_list = os.listdir(folder)
|
||||||
file_list = sorted(file_list, key=str.casefold)
|
file_list = sorted(file_list, key=str.casefold)
|
||||||
output += \
|
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
|
||||||
|
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="radio" name="root_path" value="{dpath}">
|
<input type="radio" name="root_path" value="{dpath}">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -826,34 +857,32 @@ class Helpers:
|
|||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{filename}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>"""\
|
</input></div><li>"""
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def unzipServer(zip_path, user_id):
|
def unzipServer(zip_path, user_id):
|
||||||
if helper.check_file_perms(zip_path):
|
if helper.check_file_perms(zip_path):
|
||||||
tempDir = tempfile.mkdtemp()
|
tempDir = tempfile.mkdtemp()
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
||||||
# extracts archive to temp directory
|
# extracts archive to temp directory
|
||||||
zip_ref.extractall(tempDir)
|
zip_ref.extractall(tempDir)
|
||||||
if user_id:
|
if user_id:
|
||||||
websocket_helper.broadcast_user(user_id, 'send_temp_path',{
|
websocket_helper.broadcast_user(
|
||||||
'path': tempDir
|
user_id, "send_temp_path", {"path": tempDir}
|
||||||
})
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def backup_select(path, user_id):
|
def backup_select(path, user_id):
|
||||||
if user_id:
|
if user_id:
|
||||||
websocket_helper.broadcast_user(user_id, 'send_temp_path',{
|
websocket_helper.broadcast_user(user_id, "send_temp_path", {"path": path})
|
||||||
'path': path
|
|
||||||
})
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def unzip_backup_archive(backup_path, zip_name):
|
def unzip_backup_archive(backup_path, zip_name):
|
||||||
zip_path = os.path.join(backup_path, zip_name)
|
zip_path = os.path.join(backup_path, zip_name)
|
||||||
if helper.check_file_perms(zip_path):
|
if helper.check_file_perms(zip_path):
|
||||||
tempDir = tempfile.mkdtemp()
|
tempDir = tempfile.mkdtemp()
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
||||||
# extracts archive to temp directory
|
# extracts archive to temp directory
|
||||||
zip_ref.extractall(tempDir)
|
zip_ref.extractall(tempDir)
|
||||||
return tempDir
|
return tempDir
|
||||||
@ -862,14 +891,19 @@ class Helpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def in_path(parent_path, child_path):
|
def in_path(parent_path, child_path):
|
||||||
# Smooth out relative path names, note: if you are concerned about symbolic links, you should use os.path.realpath too
|
# Smooth out relative path names, note: if you are concerned about
|
||||||
|
# symbolic links, you should use os.path.realpath too
|
||||||
parent_path = os.path.abspath(parent_path)
|
parent_path = os.path.abspath(parent_path)
|
||||||
child_path = os.path.abspath(child_path)
|
child_path = os.path.abspath(child_path)
|
||||||
|
|
||||||
# Compare the common path of the parent and child path with the common path of just the parent path.
|
# Compare the common path of the parent and child path with the
|
||||||
# Using the commonpath method on just the parent path will regularise the path name in the same way
|
# common path of just the parent path. Using the commonpath method
|
||||||
# as the comparison that deals with both paths, removing any trailing path separator
|
# on just the parent path will regularise the path name in the same way
|
||||||
return os.path.commonpath([parent_path]) == os.path.commonpath([parent_path, child_path])
|
# as the comparison that deals with both paths, removing any trailing
|
||||||
|
# path separator
|
||||||
|
return os.path.commonpath([parent_path]) == os.path.commonpath(
|
||||||
|
[parent_path, child_path]
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def in_path_old(x, y):
|
def in_path_old(x, y):
|
||||||
@ -901,7 +935,6 @@ class Helpers:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_prefix(text, prefix):
|
def remove_prefix(text, prefix):
|
||||||
if text.startswith(prefix):
|
if text.startswith(prefix):
|
||||||
@ -912,9 +945,10 @@ class Helpers:
|
|||||||
def getLangPage(text):
|
def getLangPage(text):
|
||||||
lang = text.split("_")[0]
|
lang = text.split("_")[0]
|
||||||
region = text.split("_")[1]
|
region = text.split("_")[1]
|
||||||
if region == 'EN':
|
if region == "EN":
|
||||||
return 'en'
|
return "en"
|
||||||
else:
|
else:
|
||||||
return lang + "-" + region
|
return lang + "-" + region
|
||||||
|
|
||||||
|
|
||||||
helper = Helpers()
|
helper = Helpers()
|
||||||
|
@ -8,15 +8,23 @@ from app.classes.shared.console import console
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class import3:
|
|
||||||
|
|
||||||
|
class import3:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.controller = Controller()
|
self.controller = Controller()
|
||||||
|
|
||||||
def start_import(self):
|
def start_import(self):
|
||||||
folder = os.path.normpath(input("Please input the path to the migrations folder in your installation of Crafty 3: "))
|
folder = os.path.normpath(
|
||||||
|
input(
|
||||||
|
"Please input the path to the migrations folder "
|
||||||
|
"in your installation of Crafty 3: "
|
||||||
|
)
|
||||||
|
)
|
||||||
if not os.path.exists(folder):
|
if not os.path.exists(folder):
|
||||||
console.info("Crafty cannot find the path you entered. Does Crafty's user have permission to access it?")
|
console.info(
|
||||||
|
"Crafty cannot find the path you entered. "
|
||||||
|
"Does Crafty's user have permission to access it?"
|
||||||
|
)
|
||||||
console.info("Please run the import3 command again and enter a valid path.")
|
console.info("Please run the import3 command again and enter a valid path.")
|
||||||
else:
|
else:
|
||||||
with open(os.path.join(folder, "users.json"), encoding="utf-8") as f:
|
with open(os.path.join(folder, "users.json"), encoding="utf-8") as f:
|
||||||
@ -31,28 +39,57 @@ class import3:
|
|||||||
# If there is only one user to import json needs to call the data differently
|
# If there is only one user to import json needs to call the data differently
|
||||||
if isinstance(json_data, list):
|
if isinstance(json_data, list):
|
||||||
for user in json_data:
|
for user in json_data:
|
||||||
users_helper.add_rawpass_user(user['username'], user['password'])
|
users_helper.add_rawpass_user(user["username"], user["password"])
|
||||||
console.info(f"Imported user {user['username']} from Crafty 3")
|
console.info(f"Imported user {user['username']} from Crafty 3")
|
||||||
logger.info(f"Imported user {user['username']} from Crafty 3")
|
logger.info(f"Imported user {user['username']} from Crafty 3")
|
||||||
else:
|
else:
|
||||||
console.info("There is only one user detected. Cannot create duplicate Admin account.")
|
console.info(
|
||||||
logger.info("There is only one user detected. Cannot create duplicate Admin account.")
|
"There is only one user detected. "
|
||||||
|
"Cannot create duplicate Admin account."
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"There is only one user detected. "
|
||||||
|
"Cannot create duplicate Admin account."
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def import_servers(json_data, controller):
|
def import_servers(json_data, controller):
|
||||||
# If there is only one server to import json needs to call the data differently
|
# If there is only one server to import json needs to call the data differently
|
||||||
if isinstance(json_data, list):
|
if isinstance(json_data, list):
|
||||||
for server in json_data:
|
for server in json_data:
|
||||||
new_server_id = controller.import_jar_server(server_name=server['server_name'], server_path=server['server_path'],
|
new_server_id = controller.import_jar_server(
|
||||||
server_jar=server['server_jar'], min_mem=(int(server['memory_min'])/1000),
|
server_name=server["server_name"],
|
||||||
max_mem=(int(server['memory_max'])/1000), port=server['server_port'])
|
server_path=server["server_path"],
|
||||||
console.info(f"Imported server {server['server_name']}[{server['id']}] from Crafty 3 to new server id {new_server_id}")
|
server_jar=server["server_jar"],
|
||||||
logger.info(f"Imported server {server['server_name']}[{server['id']}] from Crafty 3 to new server id {new_server_id}")
|
min_mem=(int(server["memory_min"]) / 1000),
|
||||||
|
max_mem=(int(server["memory_max"]) / 1000),
|
||||||
|
port=server["server_port"],
|
||||||
|
)
|
||||||
|
console.info(
|
||||||
|
f"Imported server {server['server_name']}[{server['id']}] "
|
||||||
|
f"from Crafty 3 to new server id {new_server_id}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Imported server {server['server_name']}[{server['id']}] "
|
||||||
|
f"from Crafty 3 to new server id {new_server_id}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_server_id = controller.import_jar_server(server_name=json_data['server_name'], server_path=json_data['server_path'],
|
new_server_id = controller.import_jar_server(
|
||||||
server_jar=json_data['server_jar'], min_mem=(int(json_data['memory_min'])/1000),
|
server_name=json_data["server_name"],
|
||||||
max_mem=(int(json_data['memory_max'])/1000), port=json_data['server_port'])
|
server_path=json_data["server_path"],
|
||||||
console.info(f"Imported server {json_data['server_name']}[{json_data['id']}] from Crafty 3 to new server id {new_server_id}")
|
server_jar=json_data["server_jar"],
|
||||||
logger.info(f"Imported server {json_data['server_name']}[{json_data['id']}] from Crafty 3 to new server id {new_server_id}")
|
min_mem=(int(json_data["memory_min"]) / 1000),
|
||||||
|
max_mem=(int(json_data["memory_max"]) / 1000),
|
||||||
|
port=json_data["server_port"],
|
||||||
|
)
|
||||||
|
console.info(
|
||||||
|
f"Imported server {json_data['server_name']}[{json_data['id']}] "
|
||||||
|
f"from Crafty 3 to new server id {new_server_id}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Imported server {json_data['server_name']}[{json_data['id']}] "
|
||||||
|
f"from Crafty 3 to new server id {new_server_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
import3 = import3()
|
import3 = import3()
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
class install:
|
|
||||||
|
|
||||||
|
class install:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_venv():
|
def is_venv():
|
||||||
return (hasattr(sys, 'real_prefix') or
|
return hasattr(sys, "real_prefix") or (
|
||||||
(hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
|
hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix
|
||||||
|
)
|
||||||
|
|
||||||
def do_install(self):
|
def do_install(self):
|
||||||
|
|
||||||
@ -16,8 +17,11 @@ class install:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# do our pip install
|
# do our pip install
|
||||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", 'requirements.txt'])
|
subprocess.check_call(
|
||||||
|
[sys.executable, "-m", "pip", "install", "-r", "requirements.txt"]
|
||||||
|
)
|
||||||
print("Crafty has installed it's dependencies, please restart Crafty")
|
print("Crafty has installed it's dependencies, please restart Crafty")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
installer = install()
|
installer = install()
|
||||||
|
@ -28,6 +28,7 @@ from app.classes.web.websocket_helper import websocket_helper
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import DoesNotExist
|
from peewee import DoesNotExist
|
||||||
|
|
||||||
# TZLocal is set as a hidden import on win pipeline
|
# TZLocal is set as a hidden import on win pipeline
|
||||||
from tzlocal import get_localzone
|
from tzlocal import get_localzone
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
@ -37,8 +38,8 @@ except ModuleNotFoundError as err:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Controller:
|
|
||||||
|
|
||||||
|
class Controller:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.servers_list = []
|
self.servers_list = []
|
||||||
self.stats = Stats(self)
|
self.stats = Stats(self)
|
||||||
@ -57,12 +58,15 @@ class Controller:
|
|||||||
logger.info(f"Checking to see if we already registered {server_id_to_check}")
|
logger.info(f"Checking to see if we already registered {server_id_to_check}")
|
||||||
|
|
||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
known_server = s.get('server_id')
|
known_server = s.get("server_id")
|
||||||
if known_server is None:
|
if known_server is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if known_server == server_id_to_check:
|
if known_server == server_id_to_check:
|
||||||
logger.info(f'skipping initialization of server {server_id_to_check} because it is already loaded')
|
logger.info(
|
||||||
|
f"skipping initialization of server {server_id_to_check} "
|
||||||
|
f"because it is already loaded"
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@ -72,20 +76,30 @@ class Controller:
|
|||||||
servers = self.servers.get_all_defined_servers()
|
servers = self.servers.get_all_defined_servers()
|
||||||
|
|
||||||
for s in servers:
|
for s in servers:
|
||||||
server_id = s.get('server_id')
|
server_id = s.get("server_id")
|
||||||
|
|
||||||
# if we have already initialized this server, let's skip it.
|
# if we have already initialized this server, let's skip it.
|
||||||
if self.check_server_loaded(server_id):
|
if self.check_server_loaded(server_id):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if this server path no longer exists - let's warn and bomb out
|
# if this server path no longer exists - let's warn and bomb out
|
||||||
if not helper.check_path_exists(helper.get_os_understandable_path(s['path'])):
|
if not helper.check_path_exists(
|
||||||
logger.warning(f"Unable to find server {s['server_name']} at path {s['path']}. Skipping this server")
|
helper.get_os_understandable_path(s["path"])
|
||||||
|
):
|
||||||
|
logger.warning(
|
||||||
|
f"Unable to find server {s['server_name']} at path {s['path']}. "
|
||||||
|
f"Skipping this server"
|
||||||
|
)
|
||||||
|
|
||||||
console.warning(f"Unable to find server {s['server_name']} at path {s['path']}. Skipping this server")
|
console.warning(
|
||||||
|
f"Unable to find server {s['server_name']} at path {s['path']}. "
|
||||||
|
f"Skipping this server"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
settings_file = os.path.join(helper.get_os_understandable_path(s['path']), 'server.properties')
|
settings_file = os.path.join(
|
||||||
|
helper.get_os_understandable_path(s["path"]), "server.properties"
|
||||||
|
)
|
||||||
|
|
||||||
# if the properties file isn't there, let's warn
|
# if the properties file isn't there, let's warn
|
||||||
if not helper.check_file_exists(settings_file):
|
if not helper.check_file_exists(settings_file):
|
||||||
@ -96,27 +110,29 @@ class Controller:
|
|||||||
settings = ServerProps(settings_file)
|
settings = ServerProps(settings_file)
|
||||||
|
|
||||||
temp_server_dict = {
|
temp_server_dict = {
|
||||||
'server_id': s.get('server_id'),
|
"server_id": s.get("server_id"),
|
||||||
'server_data_obj': s,
|
"server_data_obj": s,
|
||||||
'server_obj': Server(self.stats),
|
"server_obj": Server(self.stats),
|
||||||
'server_settings': settings.props
|
"server_settings": settings.props,
|
||||||
}
|
}
|
||||||
|
|
||||||
# setup the server, do the auto start and all that jazz
|
# setup the server, do the auto start and all that jazz
|
||||||
temp_server_dict['server_obj'].do_server_setup(s)
|
temp_server_dict["server_obj"].do_server_setup(s)
|
||||||
|
|
||||||
# add this temp object to the list of init servers
|
# add this temp object to the list of init servers
|
||||||
self.servers_list.append(temp_server_dict)
|
self.servers_list.append(temp_server_dict)
|
||||||
|
|
||||||
if s['auto_start']:
|
if s["auto_start"]:
|
||||||
self.servers.set_waiting_start(s['server_id'], True)
|
self.servers.set_waiting_start(s["server_id"], True)
|
||||||
|
|
||||||
self.refresh_server_settings(s['server_id'])
|
self.refresh_server_settings(s["server_id"])
|
||||||
|
|
||||||
console.info(f"Loaded Server: ID {s['server_id']}" +
|
console.info(
|
||||||
f" | Name: {s['server_name']}" +
|
f"Loaded Server: ID {s['server_id']}"
|
||||||
f" | Autostart: {s['auto_start']}" +
|
+ f" | Name: {s['server_name']}"
|
||||||
f" | Delay: {s['auto_start_delay']} ")
|
+ f" | Autostart: {s['auto_start']}"
|
||||||
|
+ f" | Delay: {s['auto_start_delay']} "
|
||||||
|
)
|
||||||
|
|
||||||
def refresh_server_settings(self, server_id: int):
|
def refresh_server_settings(self, server_id: int):
|
||||||
server_obj = self.get_server_obj(server_id)
|
server_obj = self.get_server_obj(server_id)
|
||||||
@ -132,72 +148,97 @@ class Controller:
|
|||||||
def set_project_root(self, root_dir):
|
def set_project_root(self, root_dir):
|
||||||
self.project_root = root_dir
|
self.project_root = root_dir
|
||||||
|
|
||||||
|
|
||||||
def package_support_logs(self, exec_user):
|
def package_support_logs(self, exec_user):
|
||||||
if exec_user['preparing']:
|
if exec_user["preparing"]:
|
||||||
return
|
return
|
||||||
self.users.set_prepare(exec_user['user_id'])
|
self.users.set_prepare(exec_user["user_id"])
|
||||||
# pausing so on screen notifications can run for user
|
# pausing so on screen notifications can run for user
|
||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
websocket_helper.broadcast_user(exec_user['user_id'], 'notification', 'Preparing your support logs')
|
websocket_helper.broadcast_user(
|
||||||
|
exec_user["user_id"], "notification", "Preparing your support logs"
|
||||||
|
)
|
||||||
tempDir = tempfile.mkdtemp()
|
tempDir = tempfile.mkdtemp()
|
||||||
tempZipStorage = tempfile.mkdtemp()
|
tempZipStorage = tempfile.mkdtemp()
|
||||||
full_temp = os.path.join(tempDir, 'support_logs')
|
full_temp = os.path.join(tempDir, "support_logs")
|
||||||
os.mkdir(full_temp)
|
os.mkdir(full_temp)
|
||||||
tempZipStorage = os.path.join(tempZipStorage, "support_logs")
|
tempZipStorage = os.path.join(tempZipStorage, "support_logs")
|
||||||
crafty_path = os.path.join(full_temp, "crafty")
|
crafty_path = os.path.join(full_temp, "crafty")
|
||||||
os.mkdir(crafty_path)
|
os.mkdir(crafty_path)
|
||||||
server_path = os.path.join(full_temp, "server")
|
server_path = os.path.join(full_temp, "server")
|
||||||
os.mkdir(server_path)
|
os.mkdir(server_path)
|
||||||
if exec_user['superuser']:
|
if exec_user["superuser"]:
|
||||||
auth_servers = self.servers.get_all_defined_servers()
|
auth_servers = self.servers.get_all_defined_servers()
|
||||||
else:
|
else:
|
||||||
user_servers = self.servers.get_authorized_servers(int(exec_user['user_id']))
|
user_servers = self.servers.get_authorized_servers(
|
||||||
|
int(exec_user["user_id"])
|
||||||
|
)
|
||||||
auth_servers = []
|
auth_servers = []
|
||||||
for server in user_servers:
|
for server in user_servers:
|
||||||
if Enum_Permissions_Server.Logs in self.server_perms.get_user_id_permissions_list(exec_user['user_id'], server["server_id"]):
|
if (
|
||||||
|
Enum_Permissions_Server.Logs
|
||||||
|
in self.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server["server_id"]
|
||||||
|
)
|
||||||
|
):
|
||||||
auth_servers.append(server)
|
auth_servers.append(server)
|
||||||
else:
|
else:
|
||||||
logger.info(f"Logs permission not available for server {server['server_name']}. Skipping.")
|
logger.info(
|
||||||
|
f"Logs permission not available for server "
|
||||||
|
f"{server['server_name']}. Skipping."
|
||||||
|
)
|
||||||
# we'll iterate through our list of log paths from auth servers.
|
# we'll iterate through our list of log paths from auth servers.
|
||||||
for server in auth_servers:
|
for server in auth_servers:
|
||||||
final_path = os.path.join(server_path, str(server['server_name']))
|
final_path = os.path.join(server_path, str(server["server_name"]))
|
||||||
try:
|
try:
|
||||||
os.mkdir(final_path)
|
os.mkdir(final_path)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
final_path += '_'+server['server_uuid']
|
final_path += "_" + server["server_uuid"]
|
||||||
os.mkdir(final_path)
|
os.mkdir(final_path)
|
||||||
try:
|
try:
|
||||||
file_helper.copy_file(server['log_path'], final_path)
|
file_helper.copy_file(server["log_path"], final_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to copy file with error: {e}")
|
logger.warning(f"Failed to copy file with error: {e}")
|
||||||
# Copy crafty logs to archive dir
|
# Copy crafty logs to archive dir
|
||||||
full_log_name = os.path.join(crafty_path, 'logs')
|
full_log_name = os.path.join(crafty_path, "logs")
|
||||||
file_helper.copy_dir(os.path.join(self.project_root, 'logs'), full_log_name)
|
file_helper.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
|
||||||
self.support_scheduler.add_job(self.log_status, 'interval', seconds=1, id="logs_"+str(exec_user['user_id']), args = [full_temp,
|
self.support_scheduler.add_job(
|
||||||
tempZipStorage +'.zip', exec_user])
|
self.log_status,
|
||||||
|
"interval",
|
||||||
|
seconds=1,
|
||||||
|
id="logs_" + str(exec_user["user_id"]),
|
||||||
|
args=[full_temp, tempZipStorage + ".zip", exec_user],
|
||||||
|
)
|
||||||
file_helper.make_archive(tempZipStorage, tempDir)
|
file_helper.make_archive(tempZipStorage, tempDir)
|
||||||
|
|
||||||
if len(websocket_helper.clients) > 0:
|
if len(websocket_helper.clients) > 0:
|
||||||
websocket_helper.broadcast_user(exec_user['user_id'], 'support_status_update', helper.calc_percent(full_temp, tempZipStorage +'.zip'))
|
websocket_helper.broadcast_user(
|
||||||
|
exec_user["user_id"],
|
||||||
|
"support_status_update",
|
||||||
|
helper.calc_percent(full_temp, tempZipStorage + ".zip"),
|
||||||
|
)
|
||||||
|
|
||||||
tempZipStorage += '.zip'
|
tempZipStorage += ".zip"
|
||||||
websocket_helper.broadcast_user(exec_user['user_id'], 'send_logs_bootbox', {
|
websocket_helper.broadcast_user(exec_user["user_id"], "send_logs_bootbox", {})
|
||||||
})
|
|
||||||
|
|
||||||
self.users.set_support_path(exec_user['user_id'], tempZipStorage)
|
self.users.set_support_path(exec_user["user_id"], tempZipStorage)
|
||||||
|
|
||||||
self.users.stop_prepare(exec_user['user_id'])
|
self.users.stop_prepare(exec_user["user_id"])
|
||||||
self.support_scheduler.remove_job('logs_'+str(exec_user["user_id"]))
|
self.support_scheduler.remove_job("logs_" + str(exec_user["user_id"]))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_system_user():
|
def add_system_user():
|
||||||
helper_users.add_user("system", helper.random_string_generator(64), "default@example.com", False, False)
|
helper_users.add_user(
|
||||||
|
"system",
|
||||||
|
helper.random_string_generator(64),
|
||||||
|
"default@example.com",
|
||||||
|
False,
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
|
||||||
def get_server_settings(self, server_id):
|
def get_server_settings(self, server_id):
|
||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
if int(s['server_id']) == int(server_id):
|
if int(s["server_id"]) == int(server_id):
|
||||||
return s['server_settings']
|
return s["server_settings"]
|
||||||
|
|
||||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False
|
return False
|
||||||
@ -205,7 +246,8 @@ class Controller:
|
|||||||
def crash_detection(self, server_obj):
|
def crash_detection(self, server_obj):
|
||||||
svr = self.get_server_obj(server_obj.server_id)
|
svr = self.get_server_obj(server_obj.server_id)
|
||||||
# start or stop crash detection depending upon user preference
|
# start or stop crash detection depending upon user preference
|
||||||
#The below functions check to see if the server is running. They only execute if it's running.
|
# The below functions check to see if the server is running.
|
||||||
|
# They only execute if it's running.
|
||||||
if server_obj.crash_detection == 1:
|
if server_obj.crash_detection == 1:
|
||||||
svr.start_crash_detection()
|
svr.start_crash_detection()
|
||||||
else:
|
else:
|
||||||
@ -216,29 +258,28 @@ class Controller:
|
|||||||
self.log_stats = results
|
self.log_stats = results
|
||||||
|
|
||||||
if len(websocket_helper.clients) > 0:
|
if len(websocket_helper.clients) > 0:
|
||||||
websocket_helper.broadcast_user(exec_user['user_id'], 'support_status_update', results)
|
websocket_helper.broadcast_user(
|
||||||
|
exec_user["user_id"], "support_status_update", results
|
||||||
|
)
|
||||||
|
|
||||||
def send_log_status(self):
|
def send_log_status(self):
|
||||||
try:
|
try:
|
||||||
return self.log_stats
|
return self.log_stats
|
||||||
except:
|
except:
|
||||||
return {
|
return {"percent": 0, "total_files": 0}
|
||||||
'percent': 0,
|
|
||||||
'total_files': 0
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_server_obj(self, server_id: Union[str, int]) -> Union[bool, Server]:
|
def get_server_obj(self, server_id: Union[str, int]) -> Union[bool, Server]:
|
||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
if str(s['server_id']) == str(server_id):
|
if str(s["server_id"]) == str(server_id):
|
||||||
return s['server_obj']
|
return s["server_obj"]
|
||||||
|
|
||||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False # TODO: Change to None
|
return False # TODO: Change to None
|
||||||
|
|
||||||
def get_server_data(self, server_id: str):
|
def get_server_data(self, server_id: str):
|
||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
if str(s['server_id']) == str(server_id):
|
if str(s["server_id"]) == str(server_id):
|
||||||
return s['server_data_obj']
|
return s["server_data_obj"]
|
||||||
|
|
||||||
logger.warning(f"Unable to find server object for server id {server_id}")
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False
|
return False
|
||||||
@ -255,14 +296,11 @@ class Controller:
|
|||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
|
|
||||||
# is the server running?
|
# is the server running?
|
||||||
srv_obj = s['server_obj']
|
srv_obj = s["server_obj"]
|
||||||
running = srv_obj.check_running()
|
running = srv_obj.check_running()
|
||||||
# if so, let's add a dictionary to the list of running servers
|
# if so, let's add a dictionary to the list of running servers
|
||||||
if running:
|
if running:
|
||||||
running_servers.append({
|
running_servers.append({"id": srv_obj.server_id, "name": srv_obj.name})
|
||||||
'id': srv_obj.server_id,
|
|
||||||
'name': srv_obj.name
|
|
||||||
})
|
|
||||||
|
|
||||||
return running_servers
|
return running_servers
|
||||||
|
|
||||||
@ -278,7 +316,7 @@ class Controller:
|
|||||||
logger.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
logger.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
||||||
console.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
console.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
||||||
|
|
||||||
self.stop_server(s['id'])
|
self.stop_server(s["id"])
|
||||||
|
|
||||||
# let's wait 2 seconds to let everything flush out
|
# let's wait 2 seconds to let everything flush out
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
@ -291,15 +329,23 @@ class Controller:
|
|||||||
svr_obj = self.get_server_obj(server_id)
|
svr_obj = self.get_server_obj(server_id)
|
||||||
svr_obj.stop_threaded_server()
|
svr_obj.stop_threaded_server()
|
||||||
|
|
||||||
def create_jar_server(self, server: str, version: str, name: str, min_mem: int, max_mem: int, port: int):
|
def create_jar_server(
|
||||||
|
self,
|
||||||
|
server: str,
|
||||||
|
version: str,
|
||||||
|
name: str,
|
||||||
|
min_mem: int,
|
||||||
|
max_mem: int,
|
||||||
|
port: int,
|
||||||
|
):
|
||||||
server_id = helper.create_uuid()
|
server_id = helper.create_uuid()
|
||||||
server_dir = os.path.join(helper.servers_dir, server_id)
|
server_dir = os.path.join(helper.servers_dir, server_id)
|
||||||
backup_path = os.path.join(helper.backup_path, server_id)
|
backup_path = os.path.join(helper.backup_path, server_id)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_dir = helper.wtol_path(server_dir)
|
server_dir = helper.wtol_path(server_dir)
|
||||||
backup_path = helper.wtol_path(backup_path)
|
backup_path = helper.wtol_path(backup_path)
|
||||||
server_dir.replace(' ', '^ ')
|
server_dir.replace(" ", "^ ")
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
|
|
||||||
server_file = f"{server}-{version}.jar"
|
server_file = f"{server}-{version}.jar"
|
||||||
full_jar_path = os.path.join(server_dir, server_file)
|
full_jar_path = os.path.join(server_dir, server_file)
|
||||||
@ -310,12 +356,14 @@ class Controller:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# do a eula.txt
|
# do a eula.txt
|
||||||
with open(os.path.join(server_dir, "eula.txt"), 'w', encoding='utf-8') as f:
|
with open(os.path.join(server_dir, "eula.txt"), "w", encoding="utf-8") as f:
|
||||||
f.write("eula=false")
|
f.write("eula=false")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
# setup server.properties with the port
|
# setup server.properties with the port
|
||||||
with open(os.path.join(server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
with open(
|
||||||
|
os.path.join(server_dir, "server.properties"), "w", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write(f"server-port={port}")
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
@ -323,16 +371,33 @@ class Controller:
|
|||||||
logger.error(f"Unable to create required server files due to :{e}")
|
logger.error(f"Unable to create required server files due to :{e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#must remain non-fstring due to string addtion
|
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
server_command = (
|
||||||
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f'-jar "{full_jar_path}" nogui'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
server_command = (
|
||||||
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f"-jar {full_jar_path} nogui"
|
||||||
|
)
|
||||||
server_log_file = f"{server_dir}/logs/latest.log"
|
server_log_file = f"{server_dir}/logs/latest.log"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(name, server_id, server_dir, backup_path, server_command, server_file, server_log_file, server_stop,
|
new_id = self.register_server(
|
||||||
port, server_type='minecraft-java')
|
name,
|
||||||
|
server_id,
|
||||||
|
server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_file,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
port,
|
||||||
|
server_type="minecraft-java",
|
||||||
|
)
|
||||||
|
|
||||||
# download the jar
|
# download the jar
|
||||||
server_jar_obj.download_jar(server, version, full_jar_path, new_id)
|
server_jar_obj.download_jar(server, version, full_jar_path, new_id)
|
||||||
@ -356,15 +421,23 @@ class Controller:
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def import_jar_server(self, server_name: str, server_path: str, server_jar: str, min_mem: int, max_mem: int, port: int):
|
def import_jar_server(
|
||||||
|
self,
|
||||||
|
server_name: str,
|
||||||
|
server_path: str,
|
||||||
|
server_jar: str,
|
||||||
|
min_mem: int,
|
||||||
|
max_mem: int,
|
||||||
|
port: int,
|
||||||
|
):
|
||||||
server_id = helper.create_uuid()
|
server_id = helper.create_uuid()
|
||||||
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
||||||
backup_path = os.path.join(helper.backup_path, server_id)
|
backup_path = os.path.join(helper.backup_path, server_id)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
new_server_dir = helper.wtol_path(new_server_dir)
|
new_server_dir = helper.wtol_path(new_server_dir)
|
||||||
backup_path = helper.wtol_path(backup_path)
|
backup_path = helper.wtol_path(backup_path)
|
||||||
new_server_dir.replace(' ', '^ ')
|
new_server_dir.replace(" ", "^ ")
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
|
|
||||||
helper.ensure_dir_exists(new_server_dir)
|
helper.ensure_dir_exists(new_server_dir)
|
||||||
helper.ensure_dir_exists(backup_path)
|
helper.ensure_dir_exists(backup_path)
|
||||||
@ -376,37 +449,67 @@ class Controller:
|
|||||||
|
|
||||||
has_properties = False
|
has_properties = False
|
||||||
for item in os.listdir(new_server_dir):
|
for item in os.listdir(new_server_dir):
|
||||||
if str(item) == 'server.properties':
|
if str(item) == "server.properties":
|
||||||
has_properties = True
|
has_properties = True
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
logger.info(
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
f"No server.properties found on zip file import. "
|
||||||
|
f"Creating one with port selection of {str(port)}"
|
||||||
|
)
|
||||||
|
with open(
|
||||||
|
os.path.join(new_server_dir, "server.properties"), "w", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write(f"server-port={port}")
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_jar)
|
full_jar_path = os.path.join(new_server_dir, server_jar)
|
||||||
|
|
||||||
#due to adding strings this must not be an fstring
|
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
server_command = (
|
||||||
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f'-jar "{full_jar_path}" nogui'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
server_command = (
|
||||||
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f"-jar {full_jar_path} nogui"
|
||||||
|
)
|
||||||
server_log_file = f"{new_server_dir}/logs/latest.log"
|
server_log_file = f"{new_server_dir}/logs/latest.log"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
new_id = self.register_server(
|
||||||
server_log_file, server_stop, port, server_type='minecraft-java')
|
server_name,
|
||||||
|
server_id,
|
||||||
|
new_server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_jar,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
port,
|
||||||
|
server_type="minecraft-java",
|
||||||
|
)
|
||||||
return new_id
|
return new_id
|
||||||
|
|
||||||
def import_zip_server(self, server_name: str, zip_path: str, server_jar: str, min_mem: int, max_mem: int, port: int):
|
def import_zip_server(
|
||||||
|
self,
|
||||||
|
server_name: str,
|
||||||
|
zip_path: str,
|
||||||
|
server_jar: str,
|
||||||
|
min_mem: int,
|
||||||
|
max_mem: int,
|
||||||
|
port: int,
|
||||||
|
):
|
||||||
server_id = helper.create_uuid()
|
server_id = helper.create_uuid()
|
||||||
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
||||||
backup_path = os.path.join(helper.backup_path, server_id)
|
backup_path = os.path.join(helper.backup_path, server_id)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
new_server_dir = helper.wtol_path(new_server_dir)
|
new_server_dir = helper.wtol_path(new_server_dir)
|
||||||
backup_path = helper.wtol_path(backup_path)
|
backup_path = helper.wtol_path(backup_path)
|
||||||
new_server_dir.replace(' ', '^ ')
|
new_server_dir.replace(" ", "^ ")
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
|
|
||||||
tempDir = helper.get_os_understandable_path(zip_path)
|
tempDir = helper.get_os_understandable_path(zip_path)
|
||||||
helper.ensure_dir_exists(new_server_dir)
|
helper.ensure_dir_exists(new_server_dir)
|
||||||
@ -414,49 +517,77 @@ class Controller:
|
|||||||
has_properties = False
|
has_properties = False
|
||||||
# extracts archive to temp directory
|
# extracts archive to temp directory
|
||||||
for item in os.listdir(tempDir):
|
for item in os.listdir(tempDir):
|
||||||
if str(item) == 'server.properties':
|
if str(item) == "server.properties":
|
||||||
has_properties = True
|
has_properties = True
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(os.path.join(tempDir, item)):
|
if not os.path.isdir(os.path.join(tempDir, item)):
|
||||||
file_helper.move_file(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
file_helper.move_file(
|
||||||
|
os.path.join(tempDir, item), os.path.join(new_server_dir, item)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
file_helper.move_dir(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
file_helper.move_dir(
|
||||||
|
os.path.join(tempDir, item), os.path.join(new_server_dir, item)
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f'ERROR IN ZIP IMPORT: {ex}')
|
logger.error(f"ERROR IN ZIP IMPORT: {ex}")
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
logger.info(
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
f"No server.properties found on zip file import. "
|
||||||
|
f"Creating one with port selection of {str(port)}"
|
||||||
|
)
|
||||||
|
with open(
|
||||||
|
os.path.join(new_server_dir, "server.properties"), "w", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write(f"server-port={port}")
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_jar)
|
full_jar_path = os.path.join(new_server_dir, server_jar)
|
||||||
|
|
||||||
#due to strings being added we need to leave this as not an fstring
|
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
server_command = (
|
||||||
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f'-jar "{full_jar_path}" nogui'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
server_command = (
|
||||||
logger.debug('command: ' + server_command)
|
f"java -Xms{helper.float_to_string(min_mem)}M "
|
||||||
|
f"-Xmx{helper.float_to_string(max_mem)}M "
|
||||||
|
f"-jar {full_jar_path} nogui"
|
||||||
|
)
|
||||||
|
logger.debug("command: " + server_command)
|
||||||
server_log_file = f"{new_server_dir}/logs/latest.log"
|
server_log_file = f"{new_server_dir}/logs/latest.log"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
new_id = self.register_server(
|
||||||
server_log_file, server_stop, port, server_type='minecraft-java')
|
server_name,
|
||||||
|
server_id,
|
||||||
|
new_server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_jar,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
port,
|
||||||
|
server_type="minecraft-java",
|
||||||
|
)
|
||||||
return new_id
|
return new_id
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# BEDROCK IMPORTS
|
# BEDROCK IMPORTS
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
def import_bedrock_server(self, server_name: str, server_path: str, server_exe: str, port: int):
|
def import_bedrock_server(
|
||||||
|
self, server_name: str, server_path: str, server_exe: str, port: int
|
||||||
|
):
|
||||||
server_id = helper.create_uuid()
|
server_id = helper.create_uuid()
|
||||||
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
||||||
backup_path = os.path.join(helper.backup_path, server_id)
|
backup_path = os.path.join(helper.backup_path, server_id)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
new_server_dir = helper.wtol_path(new_server_dir)
|
new_server_dir = helper.wtol_path(new_server_dir)
|
||||||
backup_path = helper.wtol_path(backup_path)
|
backup_path = helper.wtol_path(backup_path)
|
||||||
new_server_dir.replace(' ', '^ ')
|
new_server_dir.replace(" ", "^ ")
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
|
|
||||||
helper.ensure_dir_exists(new_server_dir)
|
helper.ensure_dir_exists(new_server_dir)
|
||||||
helper.ensure_dir_exists(backup_path)
|
helper.ensure_dir_exists(backup_path)
|
||||||
@ -468,41 +599,57 @@ class Controller:
|
|||||||
|
|
||||||
has_properties = False
|
has_properties = False
|
||||||
for item in os.listdir(new_server_dir):
|
for item in os.listdir(new_server_dir):
|
||||||
if str(item) == 'server.properties':
|
if str(item) == "server.properties":
|
||||||
has_properties = True
|
has_properties = True
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
logger.info(
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
f"No server.properties found on zip file import. "
|
||||||
|
f"Creating one with port selection of {str(port)}"
|
||||||
|
)
|
||||||
|
with open(
|
||||||
|
os.path.join(new_server_dir, "server.properties"), "w", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write(f"server-port={port}")
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_exe)
|
full_jar_path = os.path.join(new_server_dir, server_exe)
|
||||||
|
|
||||||
#due to adding strings this must not be an fstring
|
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = f'"{full_jar_path}"'
|
server_command = f'"{full_jar_path}"'
|
||||||
else:
|
else:
|
||||||
server_command = f'./{server_exe}'
|
server_command = f"./{server_exe}"
|
||||||
logger.debug('command: ' + server_command)
|
logger.debug("command: " + server_command)
|
||||||
server_log_file = "N/A"
|
server_log_file = "N/A"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_exe,
|
new_id = self.register_server(
|
||||||
server_log_file, server_stop, port, server_type='minecraft-bedrock')
|
server_name,
|
||||||
|
server_id,
|
||||||
|
new_server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_exe,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
port,
|
||||||
|
server_type="minecraft-bedrock",
|
||||||
|
)
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
if helper.check_file_exists(full_jar_path):
|
if helper.check_file_exists(full_jar_path):
|
||||||
os.chmod(full_jar_path, 0o2775)
|
os.chmod(full_jar_path, 0o2775)
|
||||||
return new_id
|
return new_id
|
||||||
|
|
||||||
def import_bedrock_zip_server(self, server_name: str, zip_path: str, server_exe: str, port: int):
|
def import_bedrock_zip_server(
|
||||||
|
self, server_name: str, zip_path: str, server_exe: str, port: int
|
||||||
|
):
|
||||||
server_id = helper.create_uuid()
|
server_id = helper.create_uuid()
|
||||||
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
new_server_dir = os.path.join(helper.servers_dir, server_id)
|
||||||
backup_path = os.path.join(helper.backup_path, server_id)
|
backup_path = os.path.join(helper.backup_path, server_id)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
new_server_dir = helper.wtol_path(new_server_dir)
|
new_server_dir = helper.wtol_path(new_server_dir)
|
||||||
backup_path = helper.wtol_path(backup_path)
|
backup_path = helper.wtol_path(backup_path)
|
||||||
new_server_dir.replace(' ', '^ ')
|
new_server_dir.replace(" ", "^ ")
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
|
|
||||||
tempDir = helper.get_os_understandable_path(zip_path)
|
tempDir = helper.get_os_understandable_path(zip_path)
|
||||||
helper.ensure_dir_exists(new_server_dir)
|
helper.ensure_dir_exists(new_server_dir)
|
||||||
@ -510,54 +657,74 @@ class Controller:
|
|||||||
has_properties = False
|
has_properties = False
|
||||||
# extracts archive to temp directory
|
# extracts archive to temp directory
|
||||||
for item in os.listdir(tempDir):
|
for item in os.listdir(tempDir):
|
||||||
if str(item) == 'server.properties':
|
if str(item) == "server.properties":
|
||||||
has_properties = True
|
has_properties = True
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(os.path.join(tempDir, item)):
|
if not os.path.isdir(os.path.join(tempDir, item)):
|
||||||
file_helper.move_file(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
file_helper.move_file(
|
||||||
|
os.path.join(tempDir, item), os.path.join(new_server_dir, item)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
file_helper.move_dir(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
file_helper.move_dir(
|
||||||
|
os.path.join(tempDir, item), os.path.join(new_server_dir, item)
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error(f'ERROR IN ZIP IMPORT: {ex}')
|
logger.error(f"ERROR IN ZIP IMPORT: {ex}")
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
logger.info(
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
f"No server.properties found on zip file import. "
|
||||||
|
f"Creating one with port selection of {str(port)}"
|
||||||
|
)
|
||||||
|
with open(
|
||||||
|
os.path.join(new_server_dir, "server.properties"), "w", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write(f"server-port={port}")
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_exe)
|
full_jar_path = os.path.join(new_server_dir, server_exe)
|
||||||
|
|
||||||
#due to strings being added we need to leave this as not an fstring
|
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = f'"{full_jar_path}"'
|
server_command = f'"{full_jar_path}"'
|
||||||
else:
|
else:
|
||||||
server_command = f'./{server_exe}'
|
server_command = f"./{server_exe}"
|
||||||
logger.debug('command: ' + server_command)
|
logger.debug("command: " + server_command)
|
||||||
server_log_file = "N/A"
|
server_log_file = "N/A"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_exe,
|
new_id = self.register_server(
|
||||||
server_log_file, server_stop, port, server_type='minecraft-bedrock')
|
server_name,
|
||||||
|
server_id,
|
||||||
|
new_server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_exe,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
port,
|
||||||
|
server_type="minecraft-bedrock",
|
||||||
|
)
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
if helper.check_file_exists(full_jar_path):
|
if helper.check_file_exists(full_jar_path):
|
||||||
os.chmod(full_jar_path, 0o2775)
|
os.chmod(full_jar_path, 0o2775)
|
||||||
|
|
||||||
return new_id
|
return new_id
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# BEDROCK IMPORTS END
|
# BEDROCK IMPORTS END
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
|
|
||||||
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
|
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
|
||||||
server_data = self.servers.get_server_data_by_id(old_server_id)
|
server_data = self.servers.get_server_data_by_id(old_server_id)
|
||||||
old_bu_path = server_data['backup_path']
|
old_bu_path = server_data["backup_path"]
|
||||||
Server_Perms_Controller.backup_role_swap(old_server_id, new_server_id)
|
Server_Perms_Controller.backup_role_swap(old_server_id, new_server_id)
|
||||||
if not helper.is_os_windows():
|
if not helper.is_os_windows():
|
||||||
backup_path = helper.validate_traversal(helper.backup_path, old_bu_path)
|
backup_path = helper.validate_traversal(helper.backup_path, old_bu_path)
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
backup_path = helper.validate_traversal(helper.wtol_path(helper.backup_path), helper.wtol_path(old_bu_path))
|
backup_path = helper.validate_traversal(
|
||||||
|
helper.wtol_path(helper.backup_path), helper.wtol_path(old_bu_path)
|
||||||
|
)
|
||||||
backup_path = helper.wtol_path(str(backup_path))
|
backup_path = helper.wtol_path(str(backup_path))
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(" ", "^ ")
|
||||||
backup_path = Path(backup_path)
|
backup_path = Path(backup_path)
|
||||||
backup_path_components = list(backup_path.parts)
|
backup_path_components = list(backup_path.parts)
|
||||||
backup_path_components[-1] = new_uuid
|
backup_path_components[-1] = new_uuid
|
||||||
@ -567,7 +734,9 @@ class Controller:
|
|||||||
os.rmdir(new_bu_path)
|
os.rmdir(new_bu_path)
|
||||||
backup_path.rename(new_bu_path)
|
backup_path.rename(new_bu_path)
|
||||||
|
|
||||||
def register_server(self, name: str,
|
def register_server(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
server_uuid: str,
|
server_uuid: str,
|
||||||
server_dir: str,
|
server_dir: str,
|
||||||
backup_path: str,
|
backup_path: str,
|
||||||
@ -576,18 +745,35 @@ class Controller:
|
|||||||
server_log_file: str,
|
server_log_file: str,
|
||||||
server_stop: str,
|
server_stop: str,
|
||||||
server_port: int,
|
server_port: int,
|
||||||
server_type: str):
|
server_type: str,
|
||||||
|
):
|
||||||
# put data in the db
|
# put data in the db
|
||||||
|
|
||||||
new_id = self.servers.create_server(
|
new_id = self.servers.create_server(
|
||||||
name, server_uuid, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, server_type, server_port)
|
name,
|
||||||
|
server_uuid,
|
||||||
|
server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_file,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
server_type,
|
||||||
|
server_port,
|
||||||
|
)
|
||||||
|
|
||||||
if not helper.check_file_exists(os.path.join(server_dir, "crafty_managed.txt")):
|
if not helper.check_file_exists(os.path.join(server_dir, "crafty_managed.txt")):
|
||||||
try:
|
try:
|
||||||
# place a file in the dir saying it's owned by crafty
|
# place a file in the dir saying it's owned by crafty
|
||||||
with open(os.path.join(server_dir, "crafty_managed.txt"), 'w', encoding='utf-8') as f:
|
with open(
|
||||||
|
os.path.join(server_dir, "crafty_managed.txt"),
|
||||||
|
"w",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as f:
|
||||||
f.write(
|
f.write(
|
||||||
"The server is managed by Crafty Controller.\n Leave this directory/files alone please")
|
"The server is managed by Crafty Controller.\n "
|
||||||
|
"Leave this directory/files alone please"
|
||||||
|
)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -604,26 +790,40 @@ class Controller:
|
|||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
|
|
||||||
# if this is the droid... im mean server we are looking for...
|
# if this is the droid... im mean server we are looking for...
|
||||||
if str(s['server_id']) == str(server_id):
|
if str(s["server_id"]) == str(server_id):
|
||||||
server_data = self.get_server_data(server_id)
|
server_data = self.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data["server_name"]
|
||||||
|
|
||||||
logger.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
logger.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
||||||
console.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
console.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
||||||
|
|
||||||
srv_obj = s['server_obj']
|
srv_obj = s["server_obj"]
|
||||||
running = srv_obj.check_running()
|
running = srv_obj.check_running()
|
||||||
|
|
||||||
if running:
|
if running:
|
||||||
self.stop_server(server_id)
|
self.stop_server(server_id)
|
||||||
if files:
|
if files:
|
||||||
try:
|
try:
|
||||||
file_helper.del_dirs(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['path']))
|
file_helper.del_dirs(
|
||||||
|
helper.get_os_understandable_path(
|
||||||
|
self.servers.get_server_data_by_id(server_id)["path"]
|
||||||
|
)
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to delete server files for server with ID: {server_id} with error logged: {e}")
|
logger.error(
|
||||||
if helper.check_path_exists(self.servers.get_server_data_by_id(server_id)['backup_path']):
|
f"Unable to delete server files for server with ID: "
|
||||||
file_helper.del_dirs(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['backup_path']))
|
f"{server_id} with error logged: {e}"
|
||||||
|
)
|
||||||
|
if helper.check_path_exists(
|
||||||
|
self.servers.get_server_data_by_id(server_id)["backup_path"]
|
||||||
|
):
|
||||||
|
file_helper.del_dirs(
|
||||||
|
helper.get_os_understandable_path(
|
||||||
|
self.servers.get_server_data_by_id(server_id)[
|
||||||
|
"backup_path"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Cleanup scheduled tasks
|
# Cleanup scheduled tasks
|
||||||
try:
|
try:
|
||||||
@ -637,6 +837,7 @@ class Controller:
|
|||||||
self.servers_list.pop(counter)
|
self.servers_list.pop(counter)
|
||||||
|
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clear_unexecuted_commands():
|
def clear_unexecuted_commands():
|
||||||
helpers_management.clear_unexecuted_commands()
|
helpers_management.clear_unexecuted_commands()
|
||||||
|
@ -17,24 +17,29 @@ except ModuleNotFoundError as err:
|
|||||||
helper.auto_installer_fix(err)
|
helper.auto_installer_fix(err)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
database = SqliteDatabase(helper.db_path, pragmas = {
|
database = SqliteDatabase(
|
||||||
'journal_mode': 'wal',
|
helper.db_path, pragmas={"journal_mode": "wal", "cache_size": -1024 * 10}
|
||||||
'cache_size': -1024 * 10})
|
)
|
||||||
|
|
||||||
|
|
||||||
class db_builder:
|
class db_builder:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def default_settings():
|
def default_settings():
|
||||||
logger.info("Fresh Install Detected - Creating Default Settings")
|
logger.info("Fresh Install Detected - Creating Default Settings")
|
||||||
console.info("Fresh Install Detected - Creating Default Settings")
|
console.info("Fresh Install Detected - Creating Default Settings")
|
||||||
default_data = helper.find_default_password()
|
default_data = helper.find_default_password()
|
||||||
|
|
||||||
username = default_data.get("username", 'admin')
|
username = default_data.get("username", "admin")
|
||||||
password = default_data.get("password", 'crafty')
|
password = default_data.get("password", "crafty")
|
||||||
|
|
||||||
users_helper.add_user(username=username, password=password, email="default@example.com", superuser=True)
|
users_helper.add_user(
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
email="default@example.com",
|
||||||
|
superuser=True,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_fresh_install():
|
def is_fresh_install():
|
||||||
@ -45,11 +50,12 @@ class db_builder:
|
|||||||
except:
|
except:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class db_shortcuts:
|
class db_shortcuts:
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Generic Databse Methods
|
# Generic Databse Methods
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def return_rows(query):
|
def return_rows(query):
|
||||||
rows = []
|
rows = []
|
||||||
@ -69,8 +75,8 @@ class db_shortcuts:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
# Static Accessors
|
# Static Accessors
|
||||||
#************************************************************************************************
|
# **********************************************************************************
|
||||||
installer = db_builder()
|
installer = db_builder()
|
||||||
db_helper = db_shortcuts()
|
db_helper = db_shortcuts()
|
||||||
|
@ -15,8 +15,10 @@ try:
|
|||||||
import peewee
|
import peewee
|
||||||
from playhouse.migrate import (
|
from playhouse.migrate import (
|
||||||
SqliteMigrator,
|
SqliteMigrator,
|
||||||
Operation, SQL, SqliteDatabase,
|
Operation,
|
||||||
make_index_name
|
SQL,
|
||||||
|
SqliteDatabase,
|
||||||
|
make_index_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
@ -24,7 +26,7 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
MIGRATE_TABLE = 'migratehistory'
|
MIGRATE_TABLE = "migratehistory"
|
||||||
MIGRATE_TEMPLATE = '''# Generated by database migrator
|
MIGRATE_TEMPLATE = '''# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
@ -70,6 +72,7 @@ def get_model(method):
|
|||||||
if isinstance(model, str):
|
if isinstance(model, str):
|
||||||
return method(migrator, migrator.table_dict[model], *args, **kwargs)
|
return method(migrator, migrator.table_dict[model], *args, **kwargs)
|
||||||
return method(migrator, model, *args, **kwargs)
|
return method(migrator, model, *args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@ -133,11 +136,17 @@ class Migrator(object):
|
|||||||
"""
|
"""
|
||||||
for name, field in fields.items():
|
for name, field in fields.items():
|
||||||
model._meta.add_field(name, field)
|
model._meta.add_field(name, field)
|
||||||
self.operations.append(self.migrator.add_column(
|
self.operations.append(
|
||||||
model._meta.table_name, field.column_name, field))
|
self.migrator.add_column(
|
||||||
|
model._meta.table_name, field.column_name, field
|
||||||
|
)
|
||||||
|
)
|
||||||
if field.unique:
|
if field.unique:
|
||||||
self.operations.append(self.migrator.add_index(
|
self.operations.append(
|
||||||
model._meta.table_name, (field.column_name,), unique=True))
|
self.migrator.add_index(
|
||||||
|
model._meta.table_name, (field.column_name,), unique=True
|
||||||
|
)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
@ -145,19 +154,22 @@ class Migrator(object):
|
|||||||
"""
|
"""
|
||||||
Removes fields from model.
|
Removes fields from model.
|
||||||
"""
|
"""
|
||||||
fields = [field for field in model._meta.fields.values()
|
fields = [field for field in model._meta.fields.values() if field.name in names]
|
||||||
if field.name in names]
|
|
||||||
for field in fields:
|
for field in fields:
|
||||||
self.__del_field__(model, field)
|
self.__del_field__(model, field)
|
||||||
if field.unique:
|
if field.unique:
|
||||||
# Drop unique index
|
# Drop unique index
|
||||||
index_name = make_index_name(
|
index_name = make_index_name(
|
||||||
model._meta.table_name, [field.column_name])
|
model._meta.table_name, [field.column_name]
|
||||||
self.operations.append(self.migrator.drop_index(
|
)
|
||||||
model._meta.table_name, index_name))
|
self.operations.append(
|
||||||
|
self.migrator.drop_index(model._meta.table_name, index_name)
|
||||||
|
)
|
||||||
self.operations.append(
|
self.operations.append(
|
||||||
self.migrator.drop_column(
|
self.migrator.drop_column(
|
||||||
model._meta.table_name, field.column_name, cascade=False))
|
model._meta.table_name, field.column_name, cascade=False
|
||||||
|
)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def __del_field__(self, model: peewee.Model, field: peewee.Field):
|
def __del_field__(self, model: peewee.Model, field: peewee.Field):
|
||||||
@ -169,12 +181,14 @@ class Migrator(object):
|
|||||||
if isinstance(field, peewee.ForeignKeyField):
|
if isinstance(field, peewee.ForeignKeyField):
|
||||||
obj_id_name = field.column_name
|
obj_id_name = field.column_name
|
||||||
if field.column_name == field.name:
|
if field.column_name == field.name:
|
||||||
obj_id_name += '_id'
|
obj_id_name += "_id"
|
||||||
delattr(model, obj_id_name)
|
delattr(model, obj_id_name)
|
||||||
delattr(field.rel_model, field.backref)
|
delattr(field.rel_model, field.backref)
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
def rename_column(self, model: peewee.Model, old_name: str, new_name: str) -> peewee.Model:
|
def rename_column(
|
||||||
|
self, model: peewee.Model, old_name: str, new_name: str
|
||||||
|
) -> peewee.Model:
|
||||||
"""
|
"""
|
||||||
Renames field in model.
|
Renames field in model.
|
||||||
"""
|
"""
|
||||||
@ -185,9 +199,10 @@ class Migrator(object):
|
|||||||
field.name = field.column_name = new_name
|
field.name = field.column_name = new_name
|
||||||
model._meta.add_field(new_name, field)
|
model._meta.add_field(new_name, field)
|
||||||
if isinstance(field, peewee.ForeignKeyField):
|
if isinstance(field, peewee.ForeignKeyField):
|
||||||
field.column_name = new_name = field.column_name + '_id'
|
field.column_name = new_name = field.column_name + "_id"
|
||||||
self.operations.append(self.migrator.rename_column(
|
self.operations.append(
|
||||||
model._meta.table_name, old_name, new_name))
|
self.migrator.rename_column(model._meta.table_name, old_name, new_name)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
@ -203,7 +218,9 @@ class Migrator(object):
|
|||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
def add_index(self, model: peewee.Model, *columns: str, unique=False) -> peewee.Model:
|
def add_index(
|
||||||
|
self, model: peewee.Model, *columns: str, unique=False
|
||||||
|
) -> peewee.Model:
|
||||||
"""Create indexes."""
|
"""Create indexes."""
|
||||||
model._meta.indexes.append((columns, unique))
|
model._meta.indexes.append((columns, unique))
|
||||||
columns_ = []
|
columns_ = []
|
||||||
@ -215,11 +232,12 @@ class Migrator(object):
|
|||||||
field.index = not unique
|
field.index = not unique
|
||||||
|
|
||||||
if isinstance(field, peewee.ForeignKeyField):
|
if isinstance(field, peewee.ForeignKeyField):
|
||||||
col = col + '_id'
|
col = col + "_id"
|
||||||
|
|
||||||
columns_.append(col)
|
columns_.append(col)
|
||||||
self.operations.append(self.migrator.add_index(
|
self.operations.append(
|
||||||
model._meta.table_name, columns_, unique=unique))
|
self.migrator.add_index(model._meta.table_name, columns_, unique=unique)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
@ -235,13 +253,15 @@ class Migrator(object):
|
|||||||
field.unique = field.index = False
|
field.unique = field.index = False
|
||||||
|
|
||||||
if isinstance(field, peewee.ForeignKeyField):
|
if isinstance(field, peewee.ForeignKeyField):
|
||||||
col = col + '_id'
|
col = col + "_id"
|
||||||
columns_.append(col)
|
columns_.append(col)
|
||||||
index_name = make_index_name(model._meta.table_name, columns_)
|
index_name = make_index_name(model._meta.table_name, columns_)
|
||||||
model._meta.indexes = [(cols, _) for (
|
model._meta.indexes = [
|
||||||
cols, _) in model._meta.indexes if columns != cols]
|
(cols, _) for (cols, _) in model._meta.indexes if columns != cols
|
||||||
self.operations.append(self.migrator.drop_index(
|
]
|
||||||
model._meta.table_name, index_name))
|
self.operations.append(
|
||||||
|
self.migrator.drop_index(model._meta.table_name, index_name)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
@ -250,8 +270,9 @@ class Migrator(object):
|
|||||||
for name in names:
|
for name in names:
|
||||||
field = model._meta.fields[name]
|
field = model._meta.fields[name]
|
||||||
field.null = False
|
field.null = False
|
||||||
self.operations.append(self.migrator.add_not_null(
|
self.operations.append(
|
||||||
model._meta.table_name, field.column_name))
|
self.migrator.add_not_null(model._meta.table_name, field.column_name)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
@ -260,17 +281,21 @@ class Migrator(object):
|
|||||||
for name in names:
|
for name in names:
|
||||||
field = model._meta.fields[name]
|
field = model._meta.fields[name]
|
||||||
field.null = True
|
field.null = True
|
||||||
self.operations.append(self.migrator.drop_not_null(
|
self.operations.append(
|
||||||
model._meta.table_name, field.column_name))
|
self.migrator.drop_not_null(model._meta.table_name, field.column_name)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
@get_model
|
@get_model
|
||||||
def add_default(self, model: peewee.Model, name: str, default: t.Any) -> peewee.Model:
|
def add_default(
|
||||||
|
self, model: peewee.Model, name: str, default: t.Any
|
||||||
|
) -> peewee.Model:
|
||||||
"""Add default."""
|
"""Add default."""
|
||||||
field = model._meta.fields[name]
|
field = model._meta.fields[name]
|
||||||
model._meta.defaults[field] = field.default = default
|
model._meta.defaults[field] = field.default = default
|
||||||
self.operations.append(self.migrator.apply_default(
|
self.operations.append(
|
||||||
model._meta.table_name, name, field))
|
self.migrator.apply_default(model._meta.table_name, name, field)
|
||||||
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
@ -283,7 +308,7 @@ class MigrationManager(object):
|
|||||||
Initializes the migration manager.
|
Initializes the migration manager.
|
||||||
"""
|
"""
|
||||||
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
||||||
raise RuntimeError('Invalid database: {}'.format(database))
|
raise RuntimeError("Invalid database: {}".format(database))
|
||||||
self.database = database
|
self.database = database
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@ -292,7 +317,7 @@ class MigrationManager(object):
|
|||||||
Initialize and cache the MigrationHistory model.
|
Initialize and cache the MigrationHistory model.
|
||||||
"""
|
"""
|
||||||
MigrateHistory._meta.database = self.database
|
MigrateHistory._meta.database = self.database
|
||||||
MigrateHistory._meta.table_name = 'migratehistory'
|
MigrateHistory._meta.table_name = "migratehistory"
|
||||||
MigrateHistory._meta.schema = None
|
MigrateHistory._meta.schema = None
|
||||||
MigrateHistory.create_table(True)
|
MigrateHistory.create_table(True)
|
||||||
return MigrateHistory
|
return MigrateHistory
|
||||||
@ -310,10 +335,13 @@ class MigrationManager(object):
|
|||||||
Scans migrations in the file system.
|
Scans migrations in the file system.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(helper.migration_dir):
|
if not os.path.exists(helper.migration_dir):
|
||||||
logger.warning('Migration directory: {} does not exist.'.format(
|
logger.warning(
|
||||||
helper.migration_dir))
|
"Migration directory: {} does not exist.".format(helper.migration_dir)
|
||||||
|
)
|
||||||
os.makedirs(helper.migration_dir)
|
os.makedirs(helper.migration_dir)
|
||||||
return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f))
|
return sorted(
|
||||||
|
f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def diff(self) -> t.List[str]:
|
def diff(self) -> t.List[str]:
|
||||||
@ -333,24 +361,27 @@ class MigrationManager(object):
|
|||||||
self.up_one(name, migrator, True)
|
self.up_one(name, migrator, True)
|
||||||
return migrator
|
return migrator
|
||||||
|
|
||||||
def compile(self, name, migrate='', rollback=''):
|
def compile(self, name, migrate="", rollback=""):
|
||||||
"""
|
"""
|
||||||
Compiles a migration.
|
Compiles a migration.
|
||||||
"""
|
"""
|
||||||
name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name
|
name = datetime.utcnow().strftime("%Y%m%d%H%M%S") + "_" + name
|
||||||
filename = name + '.py'
|
filename = name + ".py"
|
||||||
path = os.path.join(helper.migration_dir, filename)
|
path = os.path.join(helper.migration_dir, filename)
|
||||||
with open(path, 'w') as f:
|
with open(path, "w") as f:
|
||||||
f.write(MIGRATE_TEMPLATE.format(
|
f.write(
|
||||||
migrate=migrate, rollback=rollback, name=filename))
|
MIGRATE_TEMPLATE.format(
|
||||||
|
migrate=migrate, rollback=rollback, name=filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def create(self, name: str = 'auto', auto: bool = False) -> t.Optional[str]:
|
def create(self, name: str = "auto", auto: bool = False) -> t.Optional[str]:
|
||||||
"""
|
"""
|
||||||
Creates a migration.
|
Creates a migration.
|
||||||
"""
|
"""
|
||||||
migrate = rollback = ''
|
migrate = rollback = ""
|
||||||
if auto:
|
if auto:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@ -367,14 +398,14 @@ class MigrationManager(object):
|
|||||||
"""
|
"""
|
||||||
Runs all unapplied migrations.
|
Runs all unapplied migrations.
|
||||||
"""
|
"""
|
||||||
logger.info('Starting migrations')
|
logger.info("Starting migrations")
|
||||||
console.info('Starting migrations')
|
console.info("Starting migrations")
|
||||||
|
|
||||||
done = []
|
done = []
|
||||||
diff = self.diff
|
diff = self.diff
|
||||||
if not diff:
|
if not diff:
|
||||||
logger.info('There is nothing to migrate')
|
logger.info("There is nothing to migrate")
|
||||||
console.info('There is nothing to migrate')
|
console.info("There is nothing to migrate")
|
||||||
return done
|
return done
|
||||||
|
|
||||||
migrator = self.migrator
|
migrator = self.migrator
|
||||||
@ -392,16 +423,19 @@ class MigrationManager(object):
|
|||||||
call_params = dict()
|
call_params = dict()
|
||||||
if helper.is_os_windows() and sys.version_info >= (3, 0):
|
if helper.is_os_windows() and sys.version_info >= (3, 0):
|
||||||
# if system is windows - force utf-8 encoding
|
# if system is windows - force utf-8 encoding
|
||||||
call_params['encoding'] = 'utf-8'
|
call_params["encoding"] = "utf-8"
|
||||||
with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f:
|
with open(os.path.join(helper.migration_dir, name + ".py"), **call_params) as f:
|
||||||
code = f.read()
|
code = f.read()
|
||||||
scope = {}
|
scope = {}
|
||||||
code = compile(code, '<string>', 'exec', dont_inherit=True)
|
code = compile(code, "<string>", "exec", dont_inherit=True)
|
||||||
exec(code, scope, None)
|
exec(code, scope, None)
|
||||||
return scope.get('migrate', lambda m, d: None), scope.get('rollback', lambda m, d: None)
|
return scope.get("migrate", lambda m, d: None), scope.get(
|
||||||
|
"rollback", lambda m, d: None
|
||||||
|
)
|
||||||
|
|
||||||
def up_one(self, name: str, migrator: Migrator,
|
def up_one(
|
||||||
fake: bool = False, rollback: bool = False) -> str:
|
self, name: str, migrator: Migrator, fake: bool = False, rollback: bool = False
|
||||||
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Runs a migration with a given name.
|
Runs a migration with a given name.
|
||||||
"""
|
"""
|
||||||
@ -429,8 +463,8 @@ class MigrationManager(object):
|
|||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.database.rollback()
|
self.database.rollback()
|
||||||
operation_name = 'Rollback' if rollback else 'Migration'
|
operation_name = "Rollback" if rollback else "Migration"
|
||||||
logger.exception('{} failed: {}'.format(operation_name, name))
|
logger.exception("{} failed: {}".format(operation_name, name))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
@ -438,10 +472,10 @@ class MigrationManager(object):
|
|||||||
Rolls back migrations.
|
Rolls back migrations.
|
||||||
"""
|
"""
|
||||||
if not self.done:
|
if not self.done:
|
||||||
raise RuntimeError('No migrations are found.')
|
raise RuntimeError("No migrations are found.")
|
||||||
|
|
||||||
name = self.done[-1]
|
name = self.done[-1]
|
||||||
|
|
||||||
migrator = self.migrator
|
migrator = self.migrator
|
||||||
self.up_one(name, migrator, False, True)
|
self.up_one(name, migrator, False, True)
|
||||||
logger.warning('Rolled back migration: {}'.format(name))
|
logger.warning("Rolled back migration: {}".format(name))
|
||||||
|
@ -1,22 +1,27 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class PermissionHelper:
|
class PermissionHelper:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def both_have_perm(a: str, b: str, permission_tested: Enum):
|
def both_have_perm(a: str, b: str, permission_tested: Enum):
|
||||||
return permission_helper.combine_perm_bool(a[permission_tested.value], b[permission_tested.value])
|
return permission_helper.combine_perm_bool(
|
||||||
|
a[permission_tested.value], b[permission_tested.value]
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def combine_perm(a: str, b: str) -> str:
|
def combine_perm(a: str, b: str) -> str:
|
||||||
return '1' if (a == '1' and b == '1') else '0'
|
return "1" if (a == "1" and b == "1") else "0"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def combine_perm_bool(a: str, b: str) -> bool:
|
def combine_perm_bool(a: str, b: str) -> bool:
|
||||||
return a == '1' and b == '1'
|
return a == "1" and b == "1"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def combine_masks(permission_mask_a: str, permission_mask_b: str) -> str:
|
def combine_masks(permission_mask_a: str, permission_mask_b: str) -> str:
|
||||||
both_masks = zip(list(permission_mask_a), list(permission_mask_b))
|
both_masks = zip(list(permission_mask_a), list(permission_mask_b))
|
||||||
return ''.join(map(lambda x: permission_helper.combine_perm(x[0], x[1]), both_masks))
|
return "".join(
|
||||||
|
map(lambda x: permission_helper.combine_perm(x[0], x[1]), both_masks)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
permission_helper = PermissionHelper()
|
permission_helper = PermissionHelper()
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -23,23 +23,24 @@ try:
|
|||||||
except ModuleNotFoundError as err:
|
except ModuleNotFoundError as err:
|
||||||
helper.auto_installer_fix(err)
|
helper.auto_installer_fix(err)
|
||||||
|
|
||||||
logger = logging.getLogger('apscheduler')
|
logger = logging.getLogger("apscheduler")
|
||||||
scheduler_intervals = { 'seconds',
|
scheduler_intervals = {
|
||||||
'minutes',
|
"seconds",
|
||||||
'hours',
|
"minutes",
|
||||||
'days',
|
"hours",
|
||||||
'weeks',
|
"days",
|
||||||
'monday',
|
"weeks",
|
||||||
'tuesday',
|
"monday",
|
||||||
'wednesday',
|
"tuesday",
|
||||||
'thursday',
|
"wednesday",
|
||||||
'friday',
|
"thursday",
|
||||||
'saturday',
|
"friday",
|
||||||
'sunday'
|
"saturday",
|
||||||
|
"sunday",
|
||||||
}
|
}
|
||||||
|
|
||||||
class TasksManager:
|
|
||||||
|
|
||||||
|
class TasksManager:
|
||||||
def __init__(self, controller):
|
def __init__(self, controller):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.tornado = Webserver(controller, self)
|
self.tornado = Webserver(controller, self)
|
||||||
@ -49,21 +50,30 @@ class TasksManager:
|
|||||||
|
|
||||||
self.users_controller = Users_Controller()
|
self.users_controller = Users_Controller()
|
||||||
|
|
||||||
self.webserver_thread = threading.Thread(target=self.tornado.run_tornado, daemon=True, name='tornado_thread')
|
self.webserver_thread = threading.Thread(
|
||||||
|
target=self.tornado.run_tornado, daemon=True, name="tornado_thread"
|
||||||
|
)
|
||||||
|
|
||||||
self.main_thread_exiting = False
|
self.main_thread_exiting = False
|
||||||
|
|
||||||
self.schedule_thread = threading.Thread(target=self.scheduler_thread, daemon=True, name="scheduler")
|
self.schedule_thread = threading.Thread(
|
||||||
|
target=self.scheduler_thread, daemon=True, name="scheduler"
|
||||||
|
)
|
||||||
|
|
||||||
self.log_watcher_thread = threading.Thread(target=self.log_watcher, daemon=True, name="log_watcher")
|
self.log_watcher_thread = threading.Thread(
|
||||||
|
target=self.log_watcher, daemon=True, name="log_watcher"
|
||||||
|
)
|
||||||
|
|
||||||
self.command_thread = threading.Thread(target=self.command_watcher, daemon=True, name="command_watcher")
|
self.command_thread = threading.Thread(
|
||||||
|
target=self.command_watcher, daemon=True, name="command_watcher"
|
||||||
|
)
|
||||||
|
|
||||||
self.realtime_thread = threading.Thread(target=self.realtime, daemon=True, name="realtime")
|
self.realtime_thread = threading.Thread(
|
||||||
|
target=self.realtime, daemon=True, name="realtime"
|
||||||
|
)
|
||||||
|
|
||||||
self.reload_schedule_from_db()
|
self.reload_schedule_from_db()
|
||||||
|
|
||||||
|
|
||||||
def get_main_thread_run_status(self):
|
def get_main_thread_run_status(self):
|
||||||
return self.main_thread_exiting
|
return self.main_thread_exiting
|
||||||
|
|
||||||
@ -81,16 +91,19 @@ class TasksManager:
|
|||||||
try:
|
try:
|
||||||
svr = self.controller.get_server_obj(c.server_id)
|
svr = self.controller.get_server_obj(c.server_id)
|
||||||
except:
|
except:
|
||||||
logger.error("Server value requested does note exist purging item from waiting commands.")
|
logger.error(
|
||||||
|
"Server value requested does note exist! "
|
||||||
|
"Purging item from waiting commands."
|
||||||
|
)
|
||||||
management_helper.mark_command_complete(c.command_id)
|
management_helper.mark_command_complete(c.command_id)
|
||||||
|
|
||||||
user_id = c.user_id
|
user_id = c.user_id
|
||||||
command = c.command
|
command = c.command
|
||||||
|
|
||||||
if command == 'start_server':
|
if command == "start_server":
|
||||||
svr.run_threaded_server(user_id)
|
svr.run_threaded_server(user_id)
|
||||||
|
|
||||||
elif command == 'stop_server':
|
elif command == "stop_server":
|
||||||
svr.stop_threaded_server()
|
svr.stop_threaded_server()
|
||||||
|
|
||||||
elif command == "restart_server":
|
elif command == "restart_server":
|
||||||
@ -125,7 +138,9 @@ class TasksManager:
|
|||||||
self.tornado.stop_web_server()
|
self.tornado.stop_web_server()
|
||||||
console.info("Waiting 3 seconds")
|
console.info("Waiting 3 seconds")
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
self.webserver_thread = threading.Thread(target=self.tornado.run_tornado, daemon=True, name='tornado_thread')
|
self.webserver_thread = threading.Thread(
|
||||||
|
target=self.tornado.run_tornado, daemon=True, name="tornado_thread"
|
||||||
|
)
|
||||||
self.start_webserver()
|
self.start_webserver()
|
||||||
|
|
||||||
def stop_webserver(self):
|
def stop_webserver(self):
|
||||||
@ -148,21 +163,27 @@ class TasksManager:
|
|||||||
def scheduler_thread(self):
|
def scheduler_thread(self):
|
||||||
schedules = management_helper.get_schedules_enabled()
|
schedules = management_helper.get_schedules_enabled()
|
||||||
self.scheduler.add_listener(self.schedule_watcher, mask=EVENT_JOB_EXECUTED)
|
self.scheduler.add_listener(self.schedule_watcher, mask=EVENT_JOB_EXECUTED)
|
||||||
#self.scheduler.add_job(self.scheduler.print_jobs, 'interval', seconds=10, id='-1')
|
# self.scheduler.add_job(
|
||||||
|
# self.scheduler.print_jobs, "interval", seconds=10, id="-1"
|
||||||
|
# )
|
||||||
|
|
||||||
# load schedules from DB
|
# load schedules from DB
|
||||||
for schedule in schedules:
|
for schedule in schedules:
|
||||||
if schedule.interval != 'reaction':
|
if schedule.interval != "reaction":
|
||||||
if schedule.cron_string != "":
|
if schedule.cron_string != "":
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
CronTrigger.from_crontab(schedule.cron_string,
|
management_helper.add_command,
|
||||||
timezone=str(self.tz)),
|
CronTrigger.from_crontab(
|
||||||
|
schedule.cron_string, timezone=str(self.tz)
|
||||||
|
),
|
||||||
id=str(schedule.schedule_id),
|
id=str(schedule.schedule_id),
|
||||||
args = [schedule.server_id,
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
schedule.server_id,
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
schedule.command]
|
"127.0.0.1",
|
||||||
|
schedule.command,
|
||||||
|
],
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error(f"Failed to schedule task with error: {e}.")
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
@ -172,39 +193,48 @@ class TasksManager:
|
|||||||
# remove items from DB if task fails to add to apscheduler
|
# remove items from DB if task fails to add to apscheduler
|
||||||
management_helper.delete_scheduled_task(schedule.schedule_id)
|
management_helper.delete_scheduled_task(schedule.schedule_id)
|
||||||
else:
|
else:
|
||||||
if schedule.interval_type == 'hours':
|
if schedule.interval_type == "hours":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
|
"cron",
|
||||||
minute=0,
|
minute=0,
|
||||||
hour = '*/'+str(schedule.interval),
|
hour="*/" + str(schedule.interval),
|
||||||
id=str(schedule.schedule_id),
|
id=str(schedule.schedule_id),
|
||||||
args = [schedule.server_id,
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
schedule.server_id,
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
schedule.command]
|
"127.0.0.1",
|
||||||
|
schedule.command,
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif schedule.interval_type == 'minutes':
|
elif schedule.interval_type == "minutes":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
minute = '*/'+str(schedule.interval),
|
"cron",
|
||||||
|
minute="*/" + str(schedule.interval),
|
||||||
id=str(schedule.schedule_id),
|
id=str(schedule.schedule_id),
|
||||||
args = [schedule.server_id,
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
schedule.server_id,
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
schedule.command]
|
"127.0.0.1",
|
||||||
|
schedule.command,
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif schedule.interval_type == 'days':
|
elif schedule.interval_type == "days":
|
||||||
curr_time = schedule.start_time.split(':')
|
curr_time = schedule.start_time.split(":")
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
day = '*/'+str(schedule.interval),
|
"cron",
|
||||||
|
day="*/" + str(schedule.interval),
|
||||||
hour=curr_time[0],
|
hour=curr_time[0],
|
||||||
minute=curr_time[1],
|
minute=curr_time[1],
|
||||||
id=str(schedule.schedule_id),
|
id=str(schedule.schedule_id),
|
||||||
args=[schedule.server_id,
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
schedule.server_id,
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
schedule.command]
|
"127.0.0.1",
|
||||||
|
schedule.command,
|
||||||
|
],
|
||||||
)
|
)
|
||||||
self.scheduler.start()
|
self.scheduler.start()
|
||||||
jobs = self.scheduler.get_jobs()
|
jobs = self.scheduler.get_jobs()
|
||||||
@ -214,33 +244,39 @@ class TasksManager:
|
|||||||
|
|
||||||
def schedule_job(self, job_data):
|
def schedule_job(self, job_data):
|
||||||
sch_id = management_helper.create_scheduled_task(
|
sch_id = management_helper.create_scheduled_task(
|
||||||
job_data['server_id'],
|
job_data["server_id"],
|
||||||
job_data['action'],
|
job_data["action"],
|
||||||
job_data['interval'],
|
job_data["interval"],
|
||||||
job_data['interval_type'],
|
job_data["interval_type"],
|
||||||
job_data['start_time'],
|
job_data["start_time"],
|
||||||
job_data['command'],
|
job_data["command"],
|
||||||
"None",
|
"None",
|
||||||
job_data['enabled'],
|
job_data["enabled"],
|
||||||
job_data['one_time'],
|
job_data["one_time"],
|
||||||
job_data['cron_string'],
|
job_data["cron_string"],
|
||||||
job_data['parent'],
|
job_data["parent"],
|
||||||
job_data['delay'])
|
job_data["delay"],
|
||||||
#Checks to make sure some doofus didn't actually make the newly created task a child of itself.
|
)
|
||||||
if str(job_data['parent']) == str(sch_id):
|
# Checks to make sure some doofus didn't actually make the newly
|
||||||
management_helper.update_scheduled_task(sch_id, {'parent':None})
|
# created task a child of itself.
|
||||||
|
if str(job_data["parent"]) == str(sch_id):
|
||||||
|
management_helper.update_scheduled_task(sch_id, {"parent": None})
|
||||||
# Check to see if it's enabled and is not a chain reaction.
|
# Check to see if it's enabled and is not a chain reaction.
|
||||||
if job_data['enabled'] and job_data['interval_type'] != 'reaction':
|
if job_data["enabled"] and job_data["interval_type"] != "reaction":
|
||||||
if job_data['cron_string'] != "":
|
if job_data["cron_string"] != "":
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
CronTrigger.from_crontab(job_data['cron_string'],
|
management_helper.add_command,
|
||||||
timezone=str(self.tz)),
|
CronTrigger.from_crontab(
|
||||||
|
job_data["cron_string"], timezone=str(self.tz)
|
||||||
|
),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error(f"Failed to schedule task with error: {e}.")
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
@ -250,39 +286,48 @@ class TasksManager:
|
|||||||
# remove items from DB if task fails to add to apscheduler
|
# remove items from DB if task fails to add to apscheduler
|
||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
else:
|
else:
|
||||||
if job_data['interval_type'] == 'hours':
|
if job_data["interval_type"] == "hours":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
|
"cron",
|
||||||
minute=0,
|
minute=0,
|
||||||
hour = '*/'+str(job_data['interval']),
|
hour="*/" + str(job_data["interval"]),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif job_data['interval_type'] == 'minutes':
|
elif job_data["interval_type"] == "minutes":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
minute = '*/'+str(job_data['interval']),
|
"cron",
|
||||||
|
minute="*/" + str(job_data["interval"]),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif job_data['interval_type'] == 'days':
|
elif job_data["interval_type"] == "days":
|
||||||
curr_time = job_data['start_time'].split(':')
|
curr_time = job_data["start_time"].split(":")
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
day = '*/'+str(job_data['interval']),
|
"cron",
|
||||||
|
day="*/" + str(job_data["interval"]),
|
||||||
hour=curr_time[0],
|
hour=curr_time[0],
|
||||||
minute=curr_time[1],
|
minute=curr_time[1],
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']],
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
logger.info("Added job. Current enabled schedules: ")
|
logger.info("Added job. Current enabled schedules: ")
|
||||||
jobs = self.scheduler.get_jobs()
|
jobs = self.scheduler.get_jobs()
|
||||||
@ -292,134 +337,191 @@ class TasksManager:
|
|||||||
def remove_all_server_tasks(self, server_id):
|
def remove_all_server_tasks(self, server_id):
|
||||||
schedules = management_helper.get_schedules_by_server(server_id)
|
schedules = management_helper.get_schedules_by_server(server_id)
|
||||||
for schedule in schedules:
|
for schedule in schedules:
|
||||||
if schedule.interval != 'reaction':
|
if schedule.interval != "reaction":
|
||||||
self.remove_job(schedule.schedule_id)
|
self.remove_job(schedule.schedule_id)
|
||||||
|
|
||||||
def remove_job(self, sch_id):
|
def remove_job(self, sch_id):
|
||||||
job = management_helper.get_scheduled_task_model(sch_id)
|
job = management_helper.get_scheduled_task_model(sch_id)
|
||||||
for schedule in management_helper.get_child_schedules(sch_id):
|
for schedule in management_helper.get_child_schedules(sch_id):
|
||||||
management_helper.update_scheduled_task(schedule.schedule_id, {'parent':None})
|
management_helper.update_scheduled_task(
|
||||||
|
schedule.schedule_id, {"parent": None}
|
||||||
|
)
|
||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
if job.enabled and job.interval_type != 'reaction':
|
if job.enabled and job.interval_type != "reaction":
|
||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
logger.info(f"Job with ID {sch_id} was deleted.")
|
logger.info(f"Job with ID {sch_id} was deleted.")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Job with ID {sch_id} was deleted from DB, but was not enabled."
|
logger.info(
|
||||||
+ "Not going to try removing something that doesn't exist from active schedules.")
|
f"Job with ID {sch_id} was deleted from DB, but was not enabled."
|
||||||
|
f"Not going to try removing something "
|
||||||
|
f"that doesn't exist from active schedules."
|
||||||
|
)
|
||||||
|
|
||||||
def update_job(self, sch_id, job_data):
|
def update_job(self, sch_id, job_data):
|
||||||
management_helper.update_scheduled_task(sch_id, job_data)
|
management_helper.update_scheduled_task(sch_id, job_data)
|
||||||
#Checks to make sure some doofus didn't actually make the newly created task a child of itself.
|
# Checks to make sure some doofus didn't actually make the newly
|
||||||
if str(job_data['parent']) == str(sch_id):
|
# created task a child of itself.
|
||||||
management_helper.update_scheduled_task(sch_id, {'parent':None})
|
if str(job_data["parent"]) == str(sch_id):
|
||||||
|
management_helper.update_scheduled_task(sch_id, {"parent": None})
|
||||||
try:
|
try:
|
||||||
if job_data['interval'] != 'reaction':
|
if job_data["interval"] != "reaction":
|
||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
except:
|
except:
|
||||||
logger.info("No job found in update job. Assuming it was previously disabled. Starting new job.")
|
logger.info(
|
||||||
|
"No job found in update job. "
|
||||||
|
"Assuming it was previously disabled. Starting new job."
|
||||||
|
)
|
||||||
|
|
||||||
if job_data['enabled']:
|
if job_data["enabled"]:
|
||||||
if job_data['interval'] != 'reaction':
|
if job_data["interval"] != "reaction":
|
||||||
if job_data['cron_string'] != "":
|
if job_data["cron_string"] != "":
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
CronTrigger.from_crontab(job_data['cron_string'],
|
management_helper.add_command,
|
||||||
timezone=str(self.tz)),
|
CronTrigger.from_crontab(
|
||||||
|
job_data["cron_string"], timezone=str(self.tz)
|
||||||
|
),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error(f"Failed to schedule task with error: {e}.")
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
console.info("Removing failed task from DB.")
|
console.info("Removing failed task from DB.")
|
||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
else:
|
else:
|
||||||
if job_data['interval_type'] == 'hours':
|
if job_data["interval_type"] == "hours":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
|
"cron",
|
||||||
minute=0,
|
minute=0,
|
||||||
hour = '*/'+str(job_data['interval']),
|
hour="*/" + str(job_data["interval"]),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif job_data['interval_type'] == 'minutes':
|
elif job_data["interval_type"] == "minutes":
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
minute = '*/'+str(job_data['interval']),
|
"cron",
|
||||||
|
minute="*/" + str(job_data["interval"]),
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
elif job_data['interval_type'] == 'days':
|
elif job_data["interval_type"] == "days":
|
||||||
curr_time = job_data['start_time'].split(':')
|
curr_time = job_data["start_time"].split(":")
|
||||||
self.scheduler.add_job(management_helper.add_command,
|
self.scheduler.add_job(
|
||||||
'cron',
|
management_helper.add_command,
|
||||||
day = '*/'+str(job_data['interval']),
|
"cron",
|
||||||
|
day="*/" + str(job_data["interval"]),
|
||||||
hour=curr_time[0],
|
hour=curr_time[0],
|
||||||
minute=curr_time[1],
|
minute=curr_time[1],
|
||||||
id=str(sch_id),
|
id=str(sch_id),
|
||||||
args=[job_data['server_id'],
|
args=[
|
||||||
self.users_controller.get_id_by_name('system'),
|
job_data["server_id"],
|
||||||
'127.0.0.1',
|
self.users_controller.get_id_by_name("system"),
|
||||||
job_data['command']]
|
"127.0.0.1",
|
||||||
|
job_data["command"],
|
||||||
|
],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.scheduler.get_job(str(sch_id))
|
self.scheduler.get_job(str(sch_id))
|
||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
except:
|
except:
|
||||||
logger.info(f"APScheduler found no scheduled job on schedule update for schedule with id: {sch_id} Assuming it was already disabled.")
|
logger.info(
|
||||||
|
f"APScheduler found no scheduled job on schedule update for "
|
||||||
|
f"schedule with id: {sch_id} Assuming it was already disabled."
|
||||||
|
)
|
||||||
|
|
||||||
def schedule_watcher(self, event):
|
def schedule_watcher(self, event):
|
||||||
if not event.exception:
|
if not event.exception:
|
||||||
if str(event.job_id).isnumeric():
|
if str(event.job_id).isnumeric():
|
||||||
task = management_helper.get_scheduled_task_model(int(event.job_id))
|
task = management_helper.get_scheduled_task_model(int(event.job_id))
|
||||||
management_helper.add_to_audit_log_raw('system', users_helper.get_user_id_by_name('system'), task.server_id,
|
management_helper.add_to_audit_log_raw(
|
||||||
f"Task with id {task.schedule_id} completed successfully", '127.0.0.1')
|
"system",
|
||||||
|
users_helper.get_user_id_by_name("system"),
|
||||||
|
task.server_id,
|
||||||
|
f"Task with id {task.schedule_id} completed successfully",
|
||||||
|
"127.0.0.1",
|
||||||
|
)
|
||||||
# check if the task is a single run.
|
# check if the task is a single run.
|
||||||
if task.one_time:
|
if task.one_time:
|
||||||
self.remove_job(task.schedule_id)
|
self.remove_job(task.schedule_id)
|
||||||
logger.info("one time task detected. Deleting...")
|
logger.info("one time task detected. Deleting...")
|
||||||
#check for any child tasks for this. It's kind of backward, but this makes DB management a lot easier. One to one instead of one to many.
|
# check for any child tasks for this. It's kind of backward,
|
||||||
for schedule in management_helper.get_child_schedules_by_server(task.schedule_id, task.server_id):
|
# but this makes DB management a lot easier. One to one
|
||||||
#event job ID's are strings so we need to look at this as the same data type.
|
# instead of one to many.
|
||||||
|
for schedule in management_helper.get_child_schedules_by_server(
|
||||||
|
task.schedule_id, task.server_id
|
||||||
|
):
|
||||||
|
# event job ID's are strings so we need to look at
|
||||||
|
# this as the same data type.
|
||||||
if str(schedule.parent) == str(event.job_id):
|
if str(schedule.parent) == str(event.job_id):
|
||||||
if schedule.enabled:
|
if schedule.enabled:
|
||||||
delaytime = datetime.datetime.now() + datetime.timedelta(seconds=schedule.delay)
|
delaytime = datetime.datetime.now() + datetime.timedelta(
|
||||||
self.scheduler.add_job(management_helper.add_command, 'date', run_date=delaytime, id=str(schedule.schedule_id),
|
seconds=schedule.delay
|
||||||
args=[schedule.server_id,
|
)
|
||||||
self.users_controller.get_id_by_name('system'),
|
self.scheduler.add_job(
|
||||||
'127.0.0.1',
|
management_helper.add_command,
|
||||||
schedule.command])
|
"date",
|
||||||
|
run_date=delaytime,
|
||||||
|
id=str(schedule.schedule_id),
|
||||||
|
args=[
|
||||||
|
schedule.server_id,
|
||||||
|
self.users_controller.get_id_by_name("system"),
|
||||||
|
"127.0.0.1",
|
||||||
|
schedule.command,
|
||||||
|
],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.info("Event job ID is not numerical. Assuming it's stats - not stored in DB. Moving on.")
|
logger.info(
|
||||||
|
"Event job ID is not numerical. Assuming it's stats "
|
||||||
|
"- not stored in DB. Moving on."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Task failed with error: {event.exception}")
|
logger.error(f"Task failed with error: {event.exception}")
|
||||||
|
|
||||||
def start_stats_recording(self):
|
def start_stats_recording(self):
|
||||||
stats_update_frequency = helper.get_setting('stats_update_frequency')
|
stats_update_frequency = helper.get_setting("stats_update_frequency")
|
||||||
logger.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
logger.info(
|
||||||
console.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
f"Stats collection frequency set to {stats_update_frequency} seconds"
|
||||||
|
)
|
||||||
|
console.info(
|
||||||
|
f"Stats collection frequency set to {stats_update_frequency} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
# one for now,
|
# one for now,
|
||||||
self.controller.stats.record_stats()
|
self.controller.stats.record_stats()
|
||||||
# one for later
|
# one for later
|
||||||
self.scheduler.add_job(self.controller.stats.record_stats, 'interval', seconds=stats_update_frequency, id="stats")
|
self.scheduler.add_job(
|
||||||
|
self.controller.stats.record_stats,
|
||||||
|
"interval",
|
||||||
|
seconds=stats_update_frequency,
|
||||||
|
id="stats",
|
||||||
|
)
|
||||||
|
|
||||||
def serverjar_cache_refresher(self):
|
def serverjar_cache_refresher(self):
|
||||||
logger.info("Refreshing serverjars.com cache on start")
|
logger.info("Refreshing serverjars.com cache on start")
|
||||||
server_jar_obj.refresh_cache()
|
server_jar_obj.refresh_cache()
|
||||||
|
|
||||||
logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
|
logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
|
||||||
self.scheduler.add_job(server_jar_obj.refresh_cache, 'interval', hours=12, id="serverjars")
|
self.scheduler.add_job(
|
||||||
|
server_jar_obj.refresh_cache, "interval", hours=12, id="serverjars"
|
||||||
|
)
|
||||||
|
|
||||||
def realtime(self):
|
def realtime(self):
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
@ -429,24 +531,38 @@ class TasksManager:
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
if host_stats.get('cpu_usage') != \
|
if host_stats.get(
|
||||||
management_helper.get_latest_hosts_stats().get('cpu_usage') or \
|
"cpu_usage"
|
||||||
host_stats.get('mem_percent') != \
|
) != management_helper.get_latest_hosts_stats().get(
|
||||||
management_helper.get_latest_hosts_stats().get('mem_percent'):
|
"cpu_usage"
|
||||||
|
) or host_stats.get(
|
||||||
|
"mem_percent"
|
||||||
|
) != management_helper.get_latest_hosts_stats().get(
|
||||||
|
"mem_percent"
|
||||||
|
):
|
||||||
# Stats are different
|
# Stats are different
|
||||||
|
|
||||||
host_stats = management_helper.get_latest_hosts_stats()
|
host_stats = management_helper.get_latest_hosts_stats()
|
||||||
if len(websocket_helper.clients) > 0:
|
if len(websocket_helper.clients) > 0:
|
||||||
# There are clients
|
# There are clients
|
||||||
websocket_helper.broadcast_page('/panel/dashboard', 'update_host_stats', {
|
websocket_helper.broadcast_page(
|
||||||
'cpu_usage': host_stats.get('cpu_usage'),
|
"/panel/dashboard",
|
||||||
'cpu_cores': host_stats.get('cpu_cores'),
|
"update_host_stats",
|
||||||
'cpu_cur_freq': host_stats.get('cpu_cur_freq'),
|
{
|
||||||
'cpu_max_freq': host_stats.get('cpu_max_freq'),
|
"cpu_usage": host_stats.get("cpu_usage"),
|
||||||
'mem_percent': host_stats.get('mem_percent'),
|
"cpu_cores": host_stats.get("cpu_cores"),
|
||||||
'mem_usage': host_stats.get('mem_usage')
|
"cpu_cur_freq": host_stats.get("cpu_cur_freq"),
|
||||||
})
|
"cpu_max_freq": host_stats.get("cpu_max_freq"),
|
||||||
|
"mem_percent": host_stats.get("mem_percent"),
|
||||||
|
"mem_usage": host_stats.get("mem_usage"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def log_watcher(self):
|
def log_watcher(self):
|
||||||
self.controller.servers.check_for_old_logs()
|
self.controller.servers.check_for_old_logs()
|
||||||
self.scheduler.add_job(self.controller.servers.check_for_old_logs, 'interval', hours=6, id="log-mgmt")
|
self.scheduler.add_job(
|
||||||
|
self.controller.servers.check_for_old_logs,
|
||||||
|
"interval",
|
||||||
|
hours=6,
|
||||||
|
id="log-mgmt",
|
||||||
|
)
|
||||||
|
@ -8,17 +8,18 @@ from app.classes.shared.helpers import helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Translation:
|
class Translation:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.translations_path = os.path.join(helper.root_dir, 'app', 'translations')
|
self.translations_path = os.path.join(helper.root_dir, "app", "translations")
|
||||||
self.cached_translation = None
|
self.cached_translation = None
|
||||||
self.cached_translation_lang = None
|
self.cached_translation_lang = None
|
||||||
|
|
||||||
def get_language_file(self, language: str):
|
def get_language_file(self, language: str):
|
||||||
return os.path.join(self.translations_path, str(language) + '.json')
|
return os.path.join(self.translations_path, str(language) + ".json")
|
||||||
|
|
||||||
def translate(self, page, word, language):
|
def translate(self, page, word, language):
|
||||||
fallback_language = 'en_EN'
|
fallback_language = "en_EN"
|
||||||
|
|
||||||
translated_word = self.translate_inner(page, word, language)
|
translated_word = self.translate_inner(page, word, language)
|
||||||
if translated_word is None:
|
if translated_word is None:
|
||||||
@ -31,20 +32,20 @@ class Translation:
|
|||||||
elif isinstance(translated_word, str):
|
elif isinstance(translated_word, str):
|
||||||
# Basic strings
|
# Basic strings
|
||||||
return translated_word
|
return translated_word
|
||||||
elif hasattr(translated_word, '__iter__'):
|
elif hasattr(translated_word, "__iter__"):
|
||||||
# Multiline strings
|
# Multiline strings
|
||||||
return '\n'.join(translated_word)
|
return "\n".join(translated_word)
|
||||||
return 'Error while getting translation'
|
return "Error while getting translation"
|
||||||
|
|
||||||
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
|
||||||
language_file = self.get_language_file(language)
|
language_file = self.get_language_file(language)
|
||||||
try:
|
try:
|
||||||
if not self.cached_translation:
|
if not self.cached_translation:
|
||||||
with open(language_file, 'r', encoding='utf-8') as f:
|
with open(language_file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
self.cached_translation = data
|
self.cached_translation = data
|
||||||
elif self.cached_translation_lang != language:
|
elif self.cached_translation_lang != language:
|
||||||
with open(language_file, 'r', encoding='utf-8') as f:
|
with open(language_file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
self.cached_translation = data
|
self.cached_translation = data
|
||||||
self.cached_translation_lang = language
|
self.cached_translation_lang = language
|
||||||
@ -54,21 +55,37 @@ class Translation:
|
|||||||
try:
|
try:
|
||||||
translated_page = data[page]
|
translated_page = data[page]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
logger.error(
|
||||||
console.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
f"Translation File Error: page {page} "
|
||||||
|
f"does not exist for lang {language}"
|
||||||
|
)
|
||||||
|
console.error(
|
||||||
|
f"Translation File Error: page {page} "
|
||||||
|
f"does not exist for lang {language}"
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
translated_word = translated_page[word]
|
translated_word = translated_page[word]
|
||||||
return translated_word
|
return translated_word
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.error(f'Translation File Error: word {word} does not exist on page {page} for lang {language}')
|
logger.error(
|
||||||
console.error(f'Translation File Error: word {word} does not exist on page {page} for lang {language}')
|
f"Translation File Error: word {word} does not exist on page "
|
||||||
|
f"{page} for lang {language}"
|
||||||
|
)
|
||||||
|
console.error(
|
||||||
|
f"Translation File Error: word {word} does not exist on page "
|
||||||
|
f"{page} for lang {language}"
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical(f'Translation File Error: Unable to read {language_file} due to {e}')
|
logger.critical(
|
||||||
console.critical(f'Translation File Error: Unable to read {language_file} due to {e}')
|
f"Translation File Error: Unable to read {language_file} due to {e}"
|
||||||
|
)
|
||||||
|
console.critical(
|
||||||
|
f"Translation File Error: Unable to read {language_file} due to {e}"
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,8 +22,8 @@ except ModuleNotFoundError as ex:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class AjaxHandler(BaseHandler):
|
|
||||||
|
|
||||||
|
class AjaxHandler(BaseHandler):
|
||||||
def render_page(self, template, page_data):
|
def render_page(self, template, page_data):
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
@ -34,22 +34,19 @@ class AjaxHandler(BaseHandler):
|
|||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def get(self, page):
|
def get(self, page):
|
||||||
_, _, exec_user = self.current_user
|
_, _, exec_user = self.current_user
|
||||||
error = bleach.clean(self.get_argument('error', "WTF Error!"))
|
error = bleach.clean(self.get_argument("error", "WTF Error!"))
|
||||||
|
|
||||||
template = "panel/denied.html"
|
template = "panel/denied.html"
|
||||||
|
|
||||||
page_data = {
|
page_data = {"user_data": exec_user, "error": error}
|
||||||
'user_data': exec_user,
|
|
||||||
'error': error
|
|
||||||
}
|
|
||||||
|
|
||||||
if page == "error":
|
if page == "error":
|
||||||
template = "public/error.html"
|
template = "public/error.html"
|
||||||
self.render_page(template, page_data)
|
self.render_page(template, page_data)
|
||||||
|
|
||||||
elif page == 'server_log':
|
elif page == "server_log":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
full_log = self.get_argument('full', False)
|
full_log = self.get_argument("full", False)
|
||||||
|
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning("Server ID not found in server_log ajax call")
|
logger.warning("Server ID not found in server_log ajax call")
|
||||||
@ -64,22 +61,26 @@ class AjaxHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Server ID Not Found")
|
self.redirect("/panel/error?error=Server ID Not Found")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not server_data['log_path']:
|
if not server_data["log_path"]:
|
||||||
logger.warning(f"Log path not found in server_log ajax call ({server_id})")
|
logger.warning(
|
||||||
|
f"Log path not found in server_log ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
|
||||||
if full_log:
|
if full_log:
|
||||||
log_lines = helper.get_setting('max_log_lines')
|
log_lines = helper.get_setting("max_log_lines")
|
||||||
data = helper.tail_file(helper.get_os_understandable_path(server_data['log_path']), log_lines)
|
data = helper.tail_file(
|
||||||
|
helper.get_os_understandable_path(server_data["log_path"]),
|
||||||
|
log_lines,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
data = ServerOutBuf.lines.get(server_id, [])
|
data = ServerOutBuf.lines.get(server_id, [])
|
||||||
|
|
||||||
|
|
||||||
for d in data:
|
for d in data:
|
||||||
try:
|
try:
|
||||||
d = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', d)
|
d = re.sub("(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )", "", d)
|
||||||
d = re.sub('[A-z]{2}\b\b', '', d)
|
d = re.sub("[A-z]{2}\b\b", "", d)
|
||||||
line = helper.log_colors(html.escape(d))
|
line = helper.log_colors(html.escape(d))
|
||||||
self.write(f'{line}<br />')
|
self.write(f"{line}<br />")
|
||||||
# self.write(d.encode("utf-8"))
|
# self.write(d.encode("utf-8"))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -87,27 +88,32 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
elif page == "announcements":
|
elif page == "announcements":
|
||||||
data = helper.get_announcements()
|
data = helper.get_announcements()
|
||||||
page_data['notify_data'] = data
|
page_data["notify_data"] = data
|
||||||
self.render_page('ajax/notify.html', page_data)
|
self.render_page("ajax/notify.html", page_data)
|
||||||
|
|
||||||
|
|
||||||
elif page == "get_zip_tree":
|
elif page == "get_zip_tree":
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
|
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.write(
|
||||||
helper.generate_zip_tree(path))
|
helper.get_os_understandable_path(path)
|
||||||
|
+ "\n"
|
||||||
|
+ helper.generate_zip_tree(path)
|
||||||
|
)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_zip_dir":
|
elif page == "get_zip_dir":
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
|
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.write(
|
||||||
helper.generate_zip_dir(path))
|
helper.get_os_understandable_path(path)
|
||||||
|
+ "\n"
|
||||||
|
+ helper.generate_zip_dir(path)
|
||||||
|
)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_backup_tree":
|
elif page == "get_backup_tree":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
folder = self.get_argument('path', None)
|
folder = self.get_argument("path", None)
|
||||||
|
|
||||||
output = ""
|
output = ""
|
||||||
|
|
||||||
@ -119,18 +125,19 @@ class AjaxHandler(BaseHandler):
|
|||||||
dir_list.append(item)
|
dir_list.append(item)
|
||||||
else:
|
else:
|
||||||
unsorted_files.append(item)
|
unsorted_files.append(item)
|
||||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||||
output += \
|
unsorted_files, key=str.casefold
|
||||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
)
|
||||||
|
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(server_id):
|
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
|
||||||
|
server_id
|
||||||
|
):
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}" checked>
|
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}" checked>
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -139,8 +146,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
<strong>{filename}</strong>
|
<strong>{filename}</strong>
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>
|
</input></div><li>
|
||||||
\n"""\
|
\n"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-nested d-block tree-ctx-item tree-file"
|
class="tree-nested d-block tree-ctx-item tree-file"
|
||||||
@ -151,8 +157,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}">
|
<input type="checkbox" class="checkBoxClass" name="root_path" value="{dpath}">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -161,22 +166,21 @@ class AjaxHandler(BaseHandler):
|
|||||||
<strong>{filename}</strong>
|
<strong>{filename}</strong>
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>
|
</input></div><li>
|
||||||
\n"""\
|
\n"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-nested d-block tree-ctx-item tree-file"
|
class="tree-nested d-block tree-ctx-item tree-file"
|
||||||
data-path="{dpath}"
|
data-path="{dpath}"
|
||||||
data-name="{filename}"
|
data-name="{filename}"
|
||||||
onclick=""><input type='checkbox' class="checkBoxClass" name='root_path' value="{dpath}">
|
onclick=""><input type='checkbox' class="checkBoxClass" name='root_path' value="{dpath}">
|
||||||
<span style="margin-right: 6px;"><i class="far fa-file"></i></span></input>{filename}</li>"""
|
<span style="margin-right: 6px;"><i class="far fa-file">
|
||||||
self.write(helper.get_os_understandable_path(folder) + '\n' +
|
</i></span></input>{filename}</li>"""
|
||||||
output)
|
self.write(helper.get_os_understandable_path(folder) + "\n" + output)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_backup_dir":
|
elif page == "get_backup_dir":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
folder = self.get_argument('path', None)
|
folder = self.get_argument("path", None)
|
||||||
output = ""
|
output = ""
|
||||||
|
|
||||||
dir_list = []
|
dir_list = []
|
||||||
@ -187,18 +191,19 @@ class AjaxHandler(BaseHandler):
|
|||||||
dir_list.append(item)
|
dir_list.append(item)
|
||||||
else:
|
else:
|
||||||
unsorted_files.append(item)
|
unsorted_files.append(item)
|
||||||
file_list = sorted(dir_list, key=str.casefold) + sorted(unsorted_files, key=str.casefold)
|
file_list = sorted(dir_list, key=str.casefold) + sorted(
|
||||||
output += \
|
unsorted_files, key=str.casefold
|
||||||
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
)
|
||||||
|
output += f"""<ul class="tree-nested d-block" id="{folder}ul">"""
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
dpath = os.path.join(folder, filename)
|
dpath = os.path.join(folder, filename)
|
||||||
if str(dpath) in self.controller.management.get_excluded_backup_dirs(server_id):
|
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
|
||||||
|
server_id
|
||||||
|
):
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="checkbox" name="root_path" value="{dpath}">
|
<input type="checkbox" name="root_path" value="{dpath}">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -206,8 +211,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
<strong>{filename}</strong>
|
<strong>{filename}</strong>
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>"""\
|
</input></div><li>"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
||||||
@ -218,8 +222,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += f"""<li class="tree-item" data-path="{dpath}">
|
||||||
f"""<li class="tree-item" data-path="{dpath}">
|
|
||||||
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="checkbox" name="root_path" value="{dpath}">
|
<input type="checkbox" name="root_path" value="{dpath}">
|
||||||
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
@ -227,58 +230,64 @@ class AjaxHandler(BaseHandler):
|
|||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
<strong>{filename}</strong>
|
<strong>{filename}</strong>
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>"""\
|
</input></div><li>"""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
output += f"""<li
|
output += f"""<li
|
||||||
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
class="tree-item tree-nested d-block tree-ctx-item tree-file"
|
||||||
data-path="{dpath}"
|
data-path="{dpath}"
|
||||||
data-name="{filename}"
|
data-name="{filename}"
|
||||||
onclick=""><input type='checkbox' name='root_path' value='{dpath}'>
|
onclick=""><input type='checkbox' name='root_path' value='{dpath}'>
|
||||||
<span style="margin-right: 6px;"><i class="far fa-file"></i></span></input>{filename}</li>"""
|
<span style="margin-right: 6px;"><i class="far fa-file">
|
||||||
|
</i></span></input>{filename}</li>"""
|
||||||
|
|
||||||
self.write(helper.get_os_understandable_path(folder) + '\n' +
|
self.write(helper.get_os_understandable_path(folder) + "\n" + output)
|
||||||
output)
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_dir":
|
elif page == "get_dir":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_tree'):
|
if not self.check_server_id(server_id, "get_tree"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
if helper.validate_traversal(
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||||
helper.generate_dir(path))
|
):
|
||||||
|
self.write(
|
||||||
|
helper.get_os_understandable_path(path)
|
||||||
|
+ "\n"
|
||||||
|
+ helper.generate_dir(path)
|
||||||
|
)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def post(self, page):
|
def post(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
|
||||||
if page == "send_command":
|
if page == "send_command":
|
||||||
command = self.get_body_argument('command', default=None, strip=True)
|
command = self.get_body_argument("command", default=None, strip=True)
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning("Server ID not found in send_command ajax call")
|
logger.warning("Server ID not found in send_command ajax call")
|
||||||
@ -286,54 +295,74 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
srv_obj = self.controller.get_server_obj(server_id)
|
srv_obj = self.controller.get_server_obj(server_id)
|
||||||
|
|
||||||
if command == srv_obj.settings['stop_command']:
|
if command == srv_obj.settings["stop_command"]:
|
||||||
logger.info("Stop command detected as terminal input - intercepting." +
|
logger.info(
|
||||||
f"Starting Crafty's stop process for server with id: {server_id}")
|
"Stop command detected as terminal input - intercepting."
|
||||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'stop_server')
|
+ f"Starting Crafty's stop process for server with id: {server_id}"
|
||||||
|
)
|
||||||
|
self.controller.management.send_command(
|
||||||
|
exec_user["user_id"], server_id, self.get_remote_ip(), "stop_server"
|
||||||
|
)
|
||||||
command = None
|
command = None
|
||||||
elif command == 'restart':
|
elif command == "restart":
|
||||||
logger.info("Restart command detected as terminal input - intercepting." +
|
logger.info(
|
||||||
f"Starting Crafty's stop process for server with id: {server_id}")
|
"Restart command detected as terminal input - intercepting."
|
||||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'restart_server')
|
+ f"Starting Crafty's stop process for server with id: {server_id}"
|
||||||
|
)
|
||||||
|
self.controller.management.send_command(
|
||||||
|
exec_user["user_id"],
|
||||||
|
server_id,
|
||||||
|
self.get_remote_ip(),
|
||||||
|
"restart_server",
|
||||||
|
)
|
||||||
command = None
|
command = None
|
||||||
if command:
|
if command:
|
||||||
if srv_obj.check_running():
|
if srv_obj.check_running():
|
||||||
srv_obj.send_command(command)
|
srv_obj.send_command(command)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(
|
||||||
f"Sent command to {self.controller.servers.get_server_friendly_name(server_id)} terminal: {command}",
|
exec_user["user_id"],
|
||||||
|
f"Sent command to "
|
||||||
|
f"{self.controller.servers.get_server_friendly_name(server_id)} "
|
||||||
|
f"terminal: {command}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
elif page == "send_order":
|
elif page == "send_order":
|
||||||
self.controller.users.update_server_order(exec_user['user_id'], bleach.clean(self.get_argument('order')))
|
self.controller.users.update_server_order(
|
||||||
|
exec_user["user_id"], bleach.clean(self.get_argument("order"))
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif page == "backup_now":
|
elif page == "backup_now":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.error("Server ID is none. Canceling backup!")
|
logger.error("Server ID is none. Canceling backup!")
|
||||||
return
|
return
|
||||||
|
|
||||||
server = self.controller.get_server_obj(server_id)
|
server = self.controller.get_server_obj(server_id)
|
||||||
self.controller.management.add_to_audit_log_raw(
|
self.controller.management.add_to_audit_log_raw(
|
||||||
self.controller.users.get_user_by_id(exec_user['user_id'])['username'], exec_user['user_id'], server_id,
|
self.controller.users.get_user_by_id(exec_user["user_id"])["username"],
|
||||||
|
exec_user["user_id"],
|
||||||
|
server_id,
|
||||||
f"Backup now executed for server {server_id} ",
|
f"Backup now executed for server {server_id} ",
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
server.backup_server()
|
server.backup_server()
|
||||||
|
|
||||||
elif page == "clear_comms":
|
elif page == "clear_comms":
|
||||||
if exec_user['superuser']:
|
if exec_user["superuser"]:
|
||||||
self.controller.clear_unexecuted_commands()
|
self.controller.clear_unexecuted_commands()
|
||||||
return
|
return
|
||||||
|
|
||||||
elif page == "kill":
|
elif page == "kill":
|
||||||
if not permissions['Commands'] in user_perms:
|
if not permissions["Commands"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Commands")
|
self.redirect("/panel/error?error=Unauthorized access to Commands")
|
||||||
return
|
return
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
svr = self.controller.get_server_obj(server_id)
|
svr = self.controller.get_server_obj(server_id)
|
||||||
try:
|
try:
|
||||||
svr.kill()
|
svr.kill()
|
||||||
@ -341,175 +370,216 @@ class AjaxHandler(BaseHandler):
|
|||||||
svr.cleanup_server_object()
|
svr.cleanup_server_object()
|
||||||
svr.record_server_stats()
|
svr.record_server_stats()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Could not find PID for requested termsig. Full error: {e}")
|
logger.error(
|
||||||
|
f"Could not find PID for requested termsig. Full error: {e}"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
elif page == "eula":
|
elif page == "eula":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
svr = self.controller.get_server_obj(server_id)
|
svr = self.controller.get_server_obj(server_id)
|
||||||
svr.agree_eula(exec_user['user_id'])
|
svr.agree_eula(exec_user["user_id"])
|
||||||
|
|
||||||
elif page == "restore_backup":
|
elif page == "restore_backup":
|
||||||
if not permissions['Backup'] in user_perms:
|
if not permissions["Backup"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
||||||
return
|
return
|
||||||
server_id = bleach.clean(self.get_argument('id', None))
|
server_id = bleach.clean(self.get_argument("id", None))
|
||||||
zip_name = bleach.clean(self.get_argument('zip_file', None))
|
zip_name = bleach.clean(self.get_argument("zip_file", None))
|
||||||
svr_obj = self.controller.servers.get_server_obj(server_id)
|
svr_obj = self.controller.servers.get_server_obj(server_id)
|
||||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
if server_data['type'] == 'minecraft-java':
|
if server_data["type"] == "minecraft-java":
|
||||||
backup_path = svr_obj.backup_path
|
backup_path = svr_obj.backup_path
|
||||||
if helper.validate_traversal(backup_path, zip_name):
|
if helper.validate_traversal(backup_path, zip_name):
|
||||||
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
||||||
new_server = self.controller.import_zip_server(svr_obj.server_name,
|
new_server = self.controller.import_zip_server(
|
||||||
|
svr_obj.server_name,
|
||||||
tempDir,
|
tempDir,
|
||||||
server_data['executable'],
|
server_data["executable"],
|
||||||
'1', '2',
|
"1",
|
||||||
server_data['server_port'])
|
"2",
|
||||||
|
server_data["server_port"],
|
||||||
|
)
|
||||||
new_server_id = new_server
|
new_server_id = new_server
|
||||||
new_server = self.controller.get_server_data(new_server)
|
new_server = self.controller.get_server_data(new_server)
|
||||||
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
self.controller.rename_backup_dir(
|
||||||
|
server_id, new_server_id, new_server["server_uuid"]
|
||||||
|
)
|
||||||
self.controller.remove_server(server_id, True)
|
self.controller.remove_server(server_id, True)
|
||||||
self.redirect('/panel/dashboard')
|
self.redirect("/panel/dashboard")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
backup_path = svr_obj.backup_path
|
backup_path = svr_obj.backup_path
|
||||||
if helper.validate_traversal(backup_path, zip_name):
|
if helper.validate_traversal(backup_path, zip_name):
|
||||||
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
||||||
new_server = self.controller.import_bedrock_zip_server(svr_obj.server_name,
|
new_server = self.controller.import_bedrock_zip_server(
|
||||||
|
svr_obj.server_name,
|
||||||
tempDir,
|
tempDir,
|
||||||
server_data['executable'],
|
server_data["executable"],
|
||||||
server_data['server_port'])
|
server_data["server_port"],
|
||||||
|
)
|
||||||
new_server_id = new_server
|
new_server_id = new_server
|
||||||
new_server = self.controller.get_server_data(new_server)
|
new_server = self.controller.get_server_data(new_server)
|
||||||
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
self.controller.rename_backup_dir(
|
||||||
|
server_id, new_server_id, new_server["server_uuid"]
|
||||||
|
)
|
||||||
self.controller.remove_server(server_id, True)
|
self.controller.remove_server(server_id, True)
|
||||||
self.redirect('/panel/dashboard')
|
self.redirect("/panel/dashboard")
|
||||||
|
|
||||||
elif page == "unzip_server":
|
elif page == "unzip_server":
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
if helper.check_file_exists(path):
|
if helper.check_file_exists(path):
|
||||||
helper.unzipServer(path, exec_user['user_id'])
|
helper.unzipServer(path, exec_user["user_id"])
|
||||||
else:
|
else:
|
||||||
user_id = exec_user['user_id']
|
user_id = exec_user["user_id"]
|
||||||
if user_id:
|
if user_id:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
user_lang = self.controller.users.get_user_lang_by_id(user_id)
|
user_lang = self.controller.users.get_user_lang_by_id(user_id)
|
||||||
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
websocket_helper.broadcast_user(
|
||||||
'error': translation.translate('error', 'no-file', user_lang)
|
user_id,
|
||||||
})
|
"send_start_error",
|
||||||
|
{"error": translation.translate("error", "no-file", user_lang)},
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif page == "backup_select":
|
elif page == "backup_select":
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
helper.backup_select(path, exec_user['user_id'])
|
helper.backup_select(path, exec_user["user_id"])
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def delete(self, page):
|
def delete(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
if page == "del_task":
|
if page == "del_task":
|
||||||
if not permissions['Schedule'] in user_perms:
|
if not permissions["Schedule"] in user_perms:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Tasks")
|
self.redirect("/panel/error?error=Unauthorized access to Tasks")
|
||||||
else:
|
else:
|
||||||
sch_id = self.get_argument('schedule_id', '-404')
|
sch_id = self.get_argument("schedule_id", "-404")
|
||||||
self.tasks_manager.remove_job(sch_id)
|
self.tasks_manager.remove_job(sch_id)
|
||||||
|
|
||||||
if page == "del_backup":
|
if page == "del_backup":
|
||||||
if not permissions['Backup'] in user_perms:
|
if not permissions["Backup"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
self.redirect("/panel/error?error=Unauthorized access to Backups")
|
||||||
return
|
return
|
||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
file_path = helper.get_os_understandable_path(
|
||||||
server_id = self.get_argument('id', None)
|
self.get_body_argument("file_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
console.warning(f"Delete {file_path} for server {server_id}")
|
console.warning(f"Delete {file_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_backup'):
|
if not self.check_server_id(server_id, "del_backup"):
|
||||||
return
|
return
|
||||||
else: server_id = bleach.clean(server_id)
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
if not (
|
||||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
helper.in_path(
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
helper.get_os_understandable_path(server_info["path"]), file_path
|
||||||
|
)
|
||||||
|
or helper.in_path(
|
||||||
|
helper.get_os_understandable_path(server_info["backup_path"]),
|
||||||
|
file_path,
|
||||||
|
)
|
||||||
|
) or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
logger.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
||||||
console.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
console.warning(f"Invalid path in del_backup ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the file
|
# Delete the file
|
||||||
if helper.validate_traversal(helper.get_os_understandable_path(server_info['backup_path']), file_path):
|
if helper.validate_traversal(
|
||||||
|
helper.get_os_understandable_path(server_info["backup_path"]), file_path
|
||||||
|
):
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
|
|
||||||
elif page == "delete_server":
|
elif page == "delete_server":
|
||||||
if not permissions['Config'] in user_perms:
|
if not permissions["Config"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||||
return
|
return
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
logger.info(f"Removing server from panel for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
logger.info(
|
||||||
|
f"Removing server from panel for server: "
|
||||||
|
f"{self.controller.servers.get_server_friendly_name(server_id)}"
|
||||||
|
)
|
||||||
|
|
||||||
server_data = self.controller.get_server_data(server_id)
|
server_data = self.controller.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data["server_name"]
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
f"Deleted server {server_id} named {server_name}",
|
f"Deleted server {server_id} named {server_name}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||||
self.controller.remove_server(server_id, False)
|
self.controller.remove_server(server_id, False)
|
||||||
|
|
||||||
elif page == "delete_server_files":
|
elif page == "delete_server_files":
|
||||||
if not permissions['Config'] in user_perms:
|
if not permissions["Config"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||||
return
|
return
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
logger.info(f"Removing server and all associated files for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
logger.info(
|
||||||
|
f"Removing server and all associated files for server: "
|
||||||
|
f"{self.controller.servers.get_server_friendly_name(server_id)}"
|
||||||
|
)
|
||||||
|
|
||||||
server_data = self.controller.get_server_data(server_id)
|
server_data = self.controller.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data["server_name"]
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
f"Deleted server {server_id} named {server_name}",
|
f"Deleted server {server_id} named {server_name}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
self.tasks_manager.remove_all_server_tasks(server_id)
|
self.tasks_manager.remove_all_server_tasks(server_id)
|
||||||
self.controller.remove_server(server_id, True)
|
self.controller.remove_server(server_id, True)
|
||||||
|
|
||||||
def check_server_id(self, server_id, page_name):
|
def check_server_id(self, server_id, page_name):
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
logger.warning(
|
||||||
console.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
f"Server ID not defined in {page_name} ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Server ID not defined in {page_name} ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
# does this server id exist?
|
# does this server id exist?
|
||||||
if not self.controller.servers.server_id_exists(server_id):
|
if not self.controller.servers.server_id_exists(server_id):
|
||||||
logger.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
logger.warning(
|
||||||
console.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
f"Server ID not found in {page_name} ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Server ID not found in {page_name} ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
return True
|
return True
|
||||||
|
@ -4,33 +4,46 @@ import re
|
|||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
bearer_pattern = re.compile(r'^Bearer', flags=re.IGNORECASE)
|
bearer_pattern = re.compile(r"^Bearer", flags=re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
class ApiHandler(BaseHandler):
|
class ApiHandler(BaseHandler):
|
||||||
|
|
||||||
def return_response(self, status: int, data: dict):
|
def return_response(self, status: int, data: dict):
|
||||||
# Define a standardized response
|
# Define a standardized response
|
||||||
self.set_status(status)
|
self.set_status(status)
|
||||||
self.write(data)
|
self.write(data)
|
||||||
|
|
||||||
def access_denied(self, user, reason=''):
|
def access_denied(self, user, reason=""):
|
||||||
if reason:
|
if reason:
|
||||||
reason = ' because ' + reason
|
reason = " because " + reason
|
||||||
logger.info("User %s from IP %s was denied access to the API route " + self.request.path + reason, user, self.get_remote_ip())
|
logger.info(
|
||||||
self.finish(self.return_response(403, {
|
"User %s from IP %s was denied access to the API route "
|
||||||
'error':'ACCESS_DENIED',
|
+ self.request.path
|
||||||
'info':'You were denied access to the requested resource'
|
+ reason,
|
||||||
}))
|
user,
|
||||||
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
self.finish(
|
||||||
|
self.return_response(
|
||||||
|
403,
|
||||||
|
{
|
||||||
|
"error": "ACCESS_DENIED",
|
||||||
|
"info": "You were denied access to the requested resource",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def authenticate_user(self) -> bool:
|
def authenticate_user(self) -> bool:
|
||||||
try:
|
try:
|
||||||
logger.debug("Searching for specified token")
|
logger.debug("Searching for specified token")
|
||||||
|
|
||||||
api_token = self.get_argument('token', '')
|
api_token = self.get_argument("token", "")
|
||||||
if api_token is None and self.request.headers.get('Authorization'):
|
if api_token is None and self.request.headers.get("Authorization"):
|
||||||
api_token = bearer_pattern.sub('', self.request.headers.get('Authorization'))
|
api_token = bearer_pattern.sub(
|
||||||
|
"", self.request.headers.get("Authorization")
|
||||||
|
)
|
||||||
elif api_token is None:
|
elif api_token is None:
|
||||||
api_token = self.get_cookie('token')
|
api_token = self.get_cookie("token")
|
||||||
user_data = self.controller.users.get_user_by_api_token(api_token)
|
user_data = self.controller.users.get_user_by_api_token(api_token)
|
||||||
|
|
||||||
logger.debug("Checking results")
|
logger.debug("Checking results")
|
||||||
@ -46,10 +59,15 @@ class ApiHandler(BaseHandler):
|
|||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("An error occured while authenticating an API user: %s", e)
|
logger.warning("An error occured while authenticating an API user: %s", e)
|
||||||
self.finish(self.return_response(403, {
|
self.finish(
|
||||||
'error':'ACCESS_DENIED',
|
self.return_response(
|
||||||
'info':'An error occured while authenticating the user'
|
403,
|
||||||
}))
|
{
|
||||||
|
"error": "ACCESS_DENIED",
|
||||||
|
"info": "An error occured while authenticating the user",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import (
|
from typing import Union, List, Optional, Tuple, Dict, Any
|
||||||
Union,
|
|
||||||
List,
|
|
||||||
Optional, Tuple, Dict, Any
|
|
||||||
)
|
|
||||||
|
|
||||||
from app.classes.models.users import ApiKeys
|
from app.classes.models.users import ApiKeys
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
@ -19,25 +15,33 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseHandler(tornado.web.RequestHandler):
|
class BaseHandler(tornado.web.RequestHandler):
|
||||||
|
|
||||||
nobleach = {bool, type(None)}
|
nobleach = {bool, type(None)}
|
||||||
redactables = ("pass", "api")
|
redactables = ("pass", "api")
|
||||||
|
|
||||||
# noinspection PyAttributeOutsideInit
|
# noinspection PyAttributeOutsideInit
|
||||||
def initialize(self, controller: Controller = None, tasks_manager=None, translator=None):
|
def initialize(
|
||||||
|
self, controller: Controller = None, tasks_manager=None, translator=None
|
||||||
|
):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.tasks_manager = tasks_manager
|
self.tasks_manager = tasks_manager
|
||||||
self.translator = translator
|
self.translator = translator
|
||||||
|
|
||||||
def get_remote_ip(self):
|
def get_remote_ip(self):
|
||||||
remote_ip = self.request.headers.get("X-Real-IP") or \
|
remote_ip = (
|
||||||
self.request.headers.get("X-Forwarded-For") or \
|
self.request.headers.get("X-Real-IP")
|
||||||
self.request.remote_ip
|
or self.request.headers.get("X-Forwarded-For")
|
||||||
|
or self.request.remote_ip
|
||||||
|
)
|
||||||
return remote_ip
|
return remote_ip
|
||||||
|
|
||||||
current_user: Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]
|
current_user: Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]
|
||||||
def get_current_user(self) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
|
||||||
|
def get_current_user(
|
||||||
|
self,
|
||||||
|
) -> Optional[Tuple[Optional[ApiKeys], Dict[str, Any], Dict[str, Any]]]:
|
||||||
return authentication.check(self.get_cookie("token"))
|
return authentication.check(self.get_cookie("token"))
|
||||||
|
|
||||||
def autobleach(self, name, text):
|
def autobleach(self, name, text):
|
||||||
@ -56,7 +60,9 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
def get_argument(
|
def get_argument(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
default: Union[None, str, tornado.web._ArgDefaultMarker] = tornado.web._ARG_DEFAULT,
|
default: Union[
|
||||||
|
None, str, tornado.web._ArgDefaultMarker
|
||||||
|
] = tornado.web._ARG_DEFAULT,
|
||||||
strip: bool = True,
|
strip: bool = True,
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
arg = self._get_argument(name, default, self.request.arguments, strip)
|
arg = self._get_argument(name, default, self.request.arguments, strip)
|
||||||
|
@ -4,6 +4,7 @@ from app.classes.web.base_handler import BaseHandler
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DefaultHandler(BaseHandler):
|
class DefaultHandler(BaseHandler):
|
||||||
|
|
||||||
# Override prepare() instead of get() to cover all possible HTTP methods.
|
# Override prepare() instead of get() to cover all possible HTTP methods.
|
||||||
|
@ -17,8 +17,8 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class FileHandler(BaseHandler):
|
|
||||||
|
|
||||||
|
class FileHandler(BaseHandler):
|
||||||
def render_page(self, template, page_data):
|
def render_page(self, template, page_data):
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
@ -29,292 +29,367 @@ class FileHandler(BaseHandler):
|
|||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def get(self, page):
|
def get(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
|
||||||
if page == "get_file":
|
if page == "get_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
file_path = helper.get_os_understandable_path(self.get_argument('file_path', None))
|
file_path = helper.get_os_understandable_path(
|
||||||
|
self.get_argument("file_path", None)
|
||||||
|
)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_file'):
|
if not self.check_server_id(server_id, "get_file"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
if not helper.in_path(
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid path in get_file file file ajax call ({file_path})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid path in get_file file file ajax call ({file_path})")
|
),
|
||||||
|
file_path,
|
||||||
|
) or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid path in get_file file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid path in get_file file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(file_path, encoding='utf-8') as file:
|
with open(file_path, encoding="utf-8") as file:
|
||||||
file_contents = file.read()
|
file_contents = file.read()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
file_contents = ''
|
file_contents = ""
|
||||||
error = 'UnicodeDecodeError'
|
error = "UnicodeDecodeError"
|
||||||
|
|
||||||
self.write({
|
self.write({"content": file_contents, "error": error})
|
||||||
'content': file_contents,
|
|
||||||
'error': error
|
|
||||||
})
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_tree":
|
elif page == "get_tree":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_tree'):
|
if not self.check_server_id(server_id, "get_tree"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
if helper.validate_traversal(
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||||
helper.generate_tree(path))
|
):
|
||||||
|
self.write(
|
||||||
|
helper.get_os_understandable_path(path)
|
||||||
|
+ "\n"
|
||||||
|
+ helper.generate_tree(path)
|
||||||
|
)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
elif page == "get_dir":
|
elif page == "get_dir":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument("path", None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_tree'):
|
if not self.check_server_id(server_id, "get_tree"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
if helper.validate_traversal(
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.controller.servers.get_server_data_by_id(server_id)["path"], path
|
||||||
helper.generate_dir(path))
|
):
|
||||||
|
self.write(
|
||||||
|
helper.get_os_understandable_path(path)
|
||||||
|
+ "\n"
|
||||||
|
+ helper.generate_dir(path)
|
||||||
|
)
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def post(self, page):
|
def post(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
|
||||||
if page == "create_file":
|
if page == "create_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
file_parent = helper.get_os_understandable_path(self.get_body_argument('file_parent', default=None, strip=True))
|
file_parent = helper.get_os_understandable_path(
|
||||||
file_name = self.get_body_argument('file_name', default=None, strip=True)
|
self.get_body_argument("file_parent", default=None, strip=True)
|
||||||
|
)
|
||||||
|
file_name = self.get_body_argument("file_name", default=None, strip=True)
|
||||||
file_path = os.path.join(file_parent, file_name)
|
file_path = os.path.join(file_parent, file_name)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'create_file'):
|
if not self.check_server_id(server_id, "create_file"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path) \
|
if not helper.in_path(
|
||||||
or helper.check_file_exists(os.path.abspath(file_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid path in create_file file ajax call ({file_path})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid path in create_file file ajax call ({file_path})")
|
),
|
||||||
|
file_path,
|
||||||
|
) or helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid path in create_file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid path in create_file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create the file by opening it
|
# Create the file by opening it
|
||||||
with open(file_path, 'w', encoding='utf-8') as file_object:
|
with open(file_path, "w", encoding="utf-8") as file_object:
|
||||||
file_object.close()
|
file_object.close()
|
||||||
|
|
||||||
elif page == "create_dir":
|
elif page == "create_dir":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
dir_parent = helper.get_os_understandable_path(self.get_body_argument('dir_parent', default=None, strip=True))
|
dir_parent = helper.get_os_understandable_path(
|
||||||
dir_name = self.get_body_argument('dir_name', default=None, strip=True)
|
self.get_body_argument("dir_parent", default=None, strip=True)
|
||||||
|
)
|
||||||
|
dir_name = self.get_body_argument("dir_name", default=None, strip=True)
|
||||||
dir_path = os.path.join(dir_parent, dir_name)
|
dir_path = os.path.join(dir_parent, dir_name)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'create_dir'):
|
if not self.check_server_id(server_id, "create_dir"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), dir_path) \
|
if not helper.in_path(
|
||||||
or helper.check_path_exists(os.path.abspath(dir_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid path in create_dir file ajax call ({dir_path})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid path in create_dir file ajax call ({dir_path})")
|
),
|
||||||
|
dir_path,
|
||||||
|
) or helper.check_path_exists(os.path.abspath(dir_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid path in create_dir file ajax call ({dir_path})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid path in create_dir file ajax call ({dir_path})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
# Create the directory
|
# Create the directory
|
||||||
os.mkdir(dir_path)
|
os.mkdir(dir_path)
|
||||||
|
|
||||||
elif page == "unzip_file":
|
elif page == "unzip_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
path = helper.get_os_understandable_path(self.get_argument('path', None))
|
path = helper.get_os_understandable_path(self.get_argument("path", None))
|
||||||
helper.unzipFile(path)
|
helper.unzipFile(path)
|
||||||
self.redirect(f"/panel/server_detail?id={server_id}&subpage=files")
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=files")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def delete(self, page):
|
def delete(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
|
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
if page == "del_file":
|
if page == "del_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
file_path = helper.get_os_understandable_path(
|
||||||
|
self.get_body_argument("file_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
|
||||||
console.warning(f"Delete {file_path} for server {server_id}")
|
console.warning(f"Delete {file_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_file'):
|
if not self.check_server_id(server_id, "del_file"):
|
||||||
return
|
return
|
||||||
else: server_id = bleach.clean(server_id)
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
if not (
|
||||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
helper.in_path(
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
helper.get_os_understandable_path(server_info["path"]), file_path
|
||||||
|
)
|
||||||
|
or helper.in_path(
|
||||||
|
helper.get_os_understandable_path(server_info["backup_path"]),
|
||||||
|
file_path,
|
||||||
|
)
|
||||||
|
) or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
logger.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
||||||
console.warning(f"Invalid path in del_file file ajax call ({file_path})")
|
console.warning(
|
||||||
|
f"Invalid path in del_file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the file
|
# Delete the file
|
||||||
file_helper.del_file(file_path)
|
file_helper.del_file(file_path)
|
||||||
|
|
||||||
elif page == "del_dir":
|
elif page == "del_dir":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
dir_path = helper.get_os_understandable_path(self.get_body_argument('dir_path', default=None, strip=True))
|
dir_path = helper.get_os_understandable_path(
|
||||||
|
self.get_body_argument("dir_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
|
||||||
console.warning(f"Delete {dir_path} for server {server_id}")
|
console.warning(f"Delete {dir_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_dir'):
|
if not self.check_server_id(server_id, "del_dir"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
if not helper.in_path(helper.get_os_understandable_path(server_info['path']), dir_path) \
|
if not helper.in_path(
|
||||||
or not helper.check_path_exists(os.path.abspath(dir_path)):
|
helper.get_os_understandable_path(server_info["path"]), dir_path
|
||||||
|
) or not helper.check_path_exists(os.path.abspath(dir_path)):
|
||||||
logger.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
logger.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
||||||
console.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
console.warning(f"Invalid path in del_file file ajax call ({dir_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the directory
|
# Delete the directory
|
||||||
# os.rmdir(dir_path) # Would only remove empty directories
|
# os.rmdir(dir_path) # Would only remove empty directories
|
||||||
if helper.validate_traversal(helper.get_os_understandable_path(server_info['path']), dir_path):
|
if helper.validate_traversal(
|
||||||
|
helper.get_os_understandable_path(server_info["path"]), dir_path
|
||||||
|
):
|
||||||
# Removes also when there are contents
|
# Removes also when there are contents
|
||||||
file_helper.del_dirs(dir_path)
|
file_helper.del_dirs(dir_path)
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def put(self, page):
|
def put(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
if page == "save_file":
|
if page == "save_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
file_contents = self.get_body_argument('file_contents', default=None, strip=True)
|
file_contents = self.get_body_argument(
|
||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
"file_contents", default=None, strip=True
|
||||||
|
)
|
||||||
|
file_path = helper.get_os_understandable_path(
|
||||||
|
self.get_body_argument("file_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'save_file'):
|
if not self.check_server_id(server_id, "save_file"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
if not helper.in_path(
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid path in save_file file ajax call ({file_path})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid path in save_file file ajax call ({file_path})")
|
),
|
||||||
|
file_path,
|
||||||
|
) or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid path in save_file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid path in save_file file ajax call ({file_path})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Open the file in write mode and store the content in file_object
|
# Open the file in write mode and store the content in file_object
|
||||||
with open(file_path, 'w', encoding='utf-8') as file_object:
|
with open(file_path, "w", encoding="utf-8") as file_object:
|
||||||
file_object.write(file_contents)
|
file_object.write(file_contents)
|
||||||
|
|
||||||
elif page == "rename_file":
|
elif page == "rename_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
item_path = helper.get_os_understandable_path(self.get_body_argument('item_path', default=None, strip=True))
|
item_path = helper.get_os_understandable_path(
|
||||||
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
self.get_body_argument("item_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
new_item_name = self.get_body_argument(
|
||||||
|
"new_item_name", default=None, strip=True
|
||||||
|
)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'rename_file'):
|
if not self.check_server_id(server_id, "rename_file"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
@ -324,53 +399,73 @@ class FileHandler(BaseHandler):
|
|||||||
console.warning("Invalid path(s) in rename_file file ajax call")
|
console.warning("Invalid path(s) in rename_file file ajax call")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
if not helper.in_path(
|
||||||
or not helper.check_path_exists(os.path.abspath(item_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
),
|
||||||
|
item_path,
|
||||||
|
) or not helper.check_path_exists(os.path.abspath(item_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']),
|
if not helper.in_path(
|
||||||
new_item_path) \
|
helper.get_os_understandable_path(
|
||||||
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
logger.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
),
|
||||||
console.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
new_item_path,
|
||||||
|
) or helper.check_path_exists(os.path.abspath(new_item_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# RENAME
|
# RENAME
|
||||||
os.rename(item_path, new_item_path)
|
os.rename(item_path, new_item_path)
|
||||||
|
|
||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def patch(self, page):
|
def patch(self, page):
|
||||||
api_key, _, exec_user = self.current_user
|
api_key, _, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument("id", None)
|
||||||
permissions = {
|
permissions = {
|
||||||
'Commands': Enum_Permissions_Server.Commands,
|
"Commands": Enum_Permissions_Server.Commands,
|
||||||
'Terminal': Enum_Permissions_Server.Terminal,
|
"Terminal": Enum_Permissions_Server.Terminal,
|
||||||
'Logs': Enum_Permissions_Server.Logs,
|
"Logs": Enum_Permissions_Server.Logs,
|
||||||
'Schedule': Enum_Permissions_Server.Schedule,
|
"Schedule": Enum_Permissions_Server.Schedule,
|
||||||
'Backup': Enum_Permissions_Server.Backup,
|
"Backup": Enum_Permissions_Server.Backup,
|
||||||
'Files': Enum_Permissions_Server.Files,
|
"Files": Enum_Permissions_Server.Files,
|
||||||
'Config': Enum_Permissions_Server.Config,
|
"Config": Enum_Permissions_Server.Config,
|
||||||
'Players': Enum_Permissions_Server.Players,
|
"Players": Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
if page == "rename_file":
|
if page == "rename_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions["Files"] in user_perms:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
self.redirect("/panel/error?error=Unauthorized access to Files")
|
self.redirect("/panel/error?error=Unauthorized access to Files")
|
||||||
return
|
return
|
||||||
item_path = helper.get_os_understandable_path(self.get_body_argument('item_path', default=None, strip=True))
|
item_path = helper.get_os_understandable_path(
|
||||||
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
self.get_body_argument("item_path", default=None, strip=True)
|
||||||
|
)
|
||||||
|
new_item_name = self.get_body_argument(
|
||||||
|
"new_item_name", default=None, strip=True
|
||||||
|
)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'rename_file'):
|
if not self.check_server_id(server_id, "rename_file"):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
@ -380,19 +475,34 @@ class FileHandler(BaseHandler):
|
|||||||
console.warning("Invalid path(s) in rename_file file ajax call")
|
console.warning("Invalid path(s) in rename_file file ajax call")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
if not helper.in_path(
|
||||||
or not helper.check_path_exists(os.path.abspath(item_path)):
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f"Invalid old name path in rename_file file ajax call ({server_id})")
|
),
|
||||||
|
item_path,
|
||||||
|
) or not helper.check_path_exists(os.path.abspath(item_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid old name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']),
|
if not helper.in_path(
|
||||||
new_item_path) \
|
helper.get_os_understandable_path(
|
||||||
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
logger.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
),
|
||||||
console.warning(f"Invalid new name path in rename_file file ajax call ({server_id})")
|
new_item_path,
|
||||||
|
) or helper.check_path_exists(os.path.abspath(new_item_path)):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Invalid new name path in rename_file file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# RENAME
|
# RENAME
|
||||||
@ -400,15 +510,23 @@ class FileHandler(BaseHandler):
|
|||||||
|
|
||||||
def check_server_id(self, server_id, page_name):
|
def check_server_id(self, server_id, page_name):
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning(f"Server ID not defined in {page_name} file ajax call ({server_id})")
|
logger.warning(
|
||||||
console.warning(f"Server ID not defined in {page_name} file ajax call ({server_id})")
|
f"Server ID not defined in {page_name} file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Server ID not defined in {page_name} file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
# does this server id exist?
|
# does this server id exist?
|
||||||
if not self.controller.servers.server_id_exists(server_id):
|
if not self.controller.servers.server_id_exists(server_id):
|
||||||
logger.warning(f"Server ID not found in {page_name} file ajax call ({server_id})")
|
logger.warning(
|
||||||
console.warning(f"Server ID not found in {page_name} file ajax call ({server_id})")
|
f"Server ID not found in {page_name} file ajax call ({server_id})"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"Server ID not found in {page_name} file ajax call ({server_id})"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
return True
|
return True
|
||||||
|
@ -11,16 +11,17 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandler(BaseHandler):
|
class HTTPHandler(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
url = str(self.request.host)
|
url = str(self.request.host)
|
||||||
port = 443
|
port = 443
|
||||||
url_list = url.split(":")
|
url_list = url.split(":")
|
||||||
if url_list[0] != "":
|
if url_list[0] != "":
|
||||||
url = 'https://' + url_list[0]
|
url = "https://" + url_list[0]
|
||||||
else:
|
else:
|
||||||
url = 'https://' + url
|
url = "https://" + url
|
||||||
db_port = helper.get_setting('https_port')
|
db_port = helper.get_setting("https_port")
|
||||||
try:
|
try:
|
||||||
resp = requests.get(url + ":" + str(port))
|
resp = requests.get(url + ":" + str(port))
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
@ -35,10 +36,10 @@ class HTTPHandlerPage(BaseHandler):
|
|||||||
port = 443
|
port = 443
|
||||||
url_list = url.split(":")
|
url_list = url.split(":")
|
||||||
if url_list[0] != "":
|
if url_list[0] != "":
|
||||||
url = 'https://' + url_list[0]
|
url = "https://" + url_list[0]
|
||||||
else:
|
else:
|
||||||
url = 'https://' + url
|
url = "https://" + url
|
||||||
db_port = helper.get_setting('https_port')
|
db_port = helper.get_setting("https_port")
|
||||||
try:
|
try:
|
||||||
resp = requests.get(url + ":" + str(port))
|
resp = requests.get(url + ":" + str(port))
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
|
@ -5,13 +5,15 @@ from app.classes.shared.helpers import helper
|
|||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandlerPage(BaseHandler):
|
class HTTPHandlerPage(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
url = self.request.full_url
|
url = self.request.full_url
|
||||||
port = 443
|
port = 443
|
||||||
if url[len(url)-1] == '/':
|
if url[len(url) - 1] == "/":
|
||||||
url = url.strip(url[len(url) - 1])
|
url = url.strip(url[len(url) - 1])
|
||||||
url_list = url.split('/')
|
url_list = url.split("/")
|
||||||
if url_list[0] != "":
|
if url_list[0] != "":
|
||||||
primary_url = url_list[0] + ":" + str(port) + "/"
|
primary_url = url_list[0] + ":" + str(port) + "/"
|
||||||
backup_url = url_list[0] + ":" + str(helper.get_setting("https_port")) + "/"
|
backup_url = url_list[0] + ":" + str(helper.get_setting("https_port")) + "/"
|
||||||
@ -20,7 +22,7 @@ class HTTPHandlerPage(BaseHandler):
|
|||||||
backup_url += url_list[i + 1]
|
backup_url += url_list[i + 1]
|
||||||
else:
|
else:
|
||||||
primary_url = url + str(port)
|
primary_url = url + str(port)
|
||||||
backup_url = url + str(helper.get_setting('https_port'))
|
backup_url = url + str(helper.get_setting("https_port"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.get(primary_url)
|
resp = requests.get(primary_url)
|
||||||
@ -28,4 +30,4 @@ class HTTPHandlerPage(BaseHandler):
|
|||||||
url = primary_url
|
url = primary_url
|
||||||
except Exception:
|
except Exception:
|
||||||
url = backup_url
|
url = backup_url
|
||||||
self.redirect('https://'+url+':'+ str(port))
|
self.redirect("https://" + url + ":" + str(port))
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -14,18 +14,20 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class PublicHandler(BaseHandler):
|
|
||||||
|
|
||||||
|
class PublicHandler(BaseHandler):
|
||||||
def set_current_user(self, user_id: str = None):
|
def set_current_user(self, user_id: str = None):
|
||||||
|
|
||||||
expire_days = helper.get_setting('cookie_expire')
|
expire_days = helper.get_setting("cookie_expire")
|
||||||
|
|
||||||
# if helper comes back with false
|
# if helper comes back with false
|
||||||
if not expire_days:
|
if not expire_days:
|
||||||
expire_days = "5"
|
expire_days = "5"
|
||||||
|
|
||||||
if user_id is not None:
|
if user_id is not None:
|
||||||
self.set_cookie("token", authentication.generate(user_id), expires_days=int(expire_days))
|
self.set_cookie(
|
||||||
|
"token", authentication.generate(user_id), expires_days=int(expire_days)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
@ -33,17 +35,18 @@ class PublicHandler(BaseHandler):
|
|||||||
|
|
||||||
def get(self, page=None):
|
def get(self, page=None):
|
||||||
|
|
||||||
error = bleach.clean(self.get_argument('error', "Invalid Login!"))
|
error = bleach.clean(self.get_argument("error", "Invalid Login!"))
|
||||||
error_msg = bleach.clean(self.get_argument('error_msg', ''))
|
error_msg = bleach.clean(self.get_argument("error_msg", ""))
|
||||||
|
|
||||||
page_data = {
|
page_data = {
|
||||||
'version': helper.get_version_string(),
|
"version": helper.get_version_string(),
|
||||||
'error': error, 'lang': helper.get_setting('language'),
|
"error": error,
|
||||||
'lang_page': helper.getLangPage(helper.get_setting('language')),
|
"lang": helper.get_setting("language"),
|
||||||
'query': ""
|
"lang_page": helper.getLangPage(helper.get_setting("language")),
|
||||||
|
"query": "",
|
||||||
}
|
}
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
page_data['query'] = self.request.query
|
page_data["query"] = self.request.query
|
||||||
|
|
||||||
# sensible defaults
|
# sensible defaults
|
||||||
template = "public/404.html"
|
template = "public/404.html"
|
||||||
@ -61,50 +64,52 @@ class PublicHandler(BaseHandler):
|
|||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.redirect('/public/login')
|
self.redirect("/public/login")
|
||||||
return
|
return
|
||||||
|
|
||||||
# if we have no page, let's go to login
|
# if we have no page, let's go to login
|
||||||
else:
|
else:
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
self.redirect('/public/login?'+self.request.query)
|
self.redirect("/public/login?" + self.request.query)
|
||||||
else:
|
else:
|
||||||
self.redirect('/public/login')
|
self.redirect("/public/login")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
data=page_data,
|
data=page_data,
|
||||||
translate=self.translator.translate,
|
translate=self.translator.translate,
|
||||||
error_msg = error_msg
|
error_msg=error_msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self, page=None):
|
def post(self, page=None):
|
||||||
|
|
||||||
error = bleach.clean(self.get_argument('error', "Invalid Login!"))
|
error = bleach.clean(self.get_argument("error", "Invalid Login!"))
|
||||||
error_msg = bleach.clean(self.get_argument('error_msg', ''))
|
error_msg = bleach.clean(self.get_argument("error_msg", ""))
|
||||||
|
|
||||||
page_data = {
|
page_data = {
|
||||||
'version': helper.get_version_string(),
|
"version": helper.get_version_string(),
|
||||||
'error': error, 'lang': helper.get_setting('language'),
|
"error": error,
|
||||||
'lang_page': helper.getLangPage(helper.get_setting('language')),
|
"lang": helper.get_setting("language"),
|
||||||
'query': ""
|
"lang_page": helper.getLangPage(helper.get_setting("language")),
|
||||||
|
"query": "",
|
||||||
}
|
}
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
page_data['query'] = self.request.query
|
page_data["query"] = self.request.query
|
||||||
|
|
||||||
if page == 'login':
|
if page == "login":
|
||||||
|
|
||||||
next_page = "/public/login"
|
next_page = "/public/login"
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
next_page = '/public/login?'+self.request.query
|
next_page = "/public/login?" + self.request.query
|
||||||
|
|
||||||
entered_username = bleach.clean(self.get_argument('username'))
|
entered_username = bleach.clean(self.get_argument("username"))
|
||||||
entered_password = bleach.clean(self.get_argument('password'))
|
entered_password = bleach.clean(self.get_argument("password"))
|
||||||
|
|
||||||
# pylint: disable=no-member
|
# pylint: disable=no-member
|
||||||
user_data = Users.get_or_none(fn.Lower(Users.username) == entered_username.lower())
|
user_data = Users.get_or_none(
|
||||||
|
fn.Lower(Users.username) == entered_username.lower()
|
||||||
|
)
|
||||||
|
|
||||||
# if we don't have a user
|
# if we don't have a user
|
||||||
if not user_data:
|
if not user_data:
|
||||||
@ -113,21 +118,28 @@ class PublicHandler(BaseHandler):
|
|||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}&{self.request.query}')
|
self.redirect(
|
||||||
|
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# if they are disabled
|
# if they are disabled
|
||||||
if not user_data.enabled:
|
if not user_data.enabled:
|
||||||
error_msg = "User account disabled. Please contact your system administrator for more info."
|
error_msg = (
|
||||||
|
"User account disabled. Please contact "
|
||||||
|
"your system administrator for more info."
|
||||||
|
)
|
||||||
# self.clear_cookie("user")
|
# self.clear_cookie("user")
|
||||||
# self.clear_cookie("user_data")
|
# self.clear_cookie("user_data")
|
||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}&{self.request.query}')
|
self.redirect(
|
||||||
|
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||||
return
|
return
|
||||||
|
|
||||||
login_result = helper.verify_pass(entered_password, user_data.password)
|
login_result = helper.verify_pass(entered_password, user_data.password)
|
||||||
@ -135,20 +147,27 @@ class PublicHandler(BaseHandler):
|
|||||||
# Valid Login
|
# Valid Login
|
||||||
if login_result:
|
if login_result:
|
||||||
self.set_current_user(user_data.user_id)
|
self.set_current_user(user_data.user_id)
|
||||||
logger.info(f"User: {user_data} Logged in from IP: {self.get_remote_ip()}")
|
logger.info(
|
||||||
|
f"User: {user_data} Logged in from IP: {self.get_remote_ip()}"
|
||||||
|
)
|
||||||
|
|
||||||
# record this login
|
# record this login
|
||||||
q = Users.select().where(Users.username == entered_username.lower()).get()
|
q = (
|
||||||
|
Users.select()
|
||||||
|
.where(Users.username == entered_username.lower())
|
||||||
|
.get()
|
||||||
|
)
|
||||||
q.last_ip = self.get_remote_ip()
|
q.last_ip = self.get_remote_ip()
|
||||||
q.last_login = helper.get_time_as_string()
|
q.last_login = helper.get_time_as_string()
|
||||||
q.save()
|
q.save()
|
||||||
|
|
||||||
# log this login
|
# log this login
|
||||||
self.controller.management.add_to_audit_log(user_data.user_id, "Logged in", 0, self.get_remote_ip())
|
self.controller.management.add_to_audit_log(
|
||||||
|
user_data.user_id, "Logged in", 0, self.get_remote_ip()
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.request.query_arguments.get("next"):
|
||||||
if self.request.query_arguments.get('next'):
|
next_page = self.request.query_arguments.get("next")[0].decode()
|
||||||
next_page = self.request.query_arguments.get('next')[0].decode()
|
|
||||||
else:
|
else:
|
||||||
next_page = "/panel/dashboard"
|
next_page = "/panel/dashboard"
|
||||||
|
|
||||||
@ -159,13 +178,17 @@ class PublicHandler(BaseHandler):
|
|||||||
self.clear_cookie("token")
|
self.clear_cookie("token")
|
||||||
error_msg = "Inncorrect username or password. Please try again."
|
error_msg = "Inncorrect username or password. Please try again."
|
||||||
# log this failed login attempt
|
# log this failed login attempt
|
||||||
self.controller.management.add_to_audit_log(user_data.user_id, "Tried to log in", 0, self.get_remote_ip())
|
self.controller.management.add_to_audit_log(
|
||||||
|
user_data.user_id, "Tried to log in", 0, self.get_remote_ip()
|
||||||
|
)
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}&{self.request.query}')
|
self.redirect(
|
||||||
|
f"/public/login?error_msg={error_msg}&{self.request.query}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.redirect(f'/public/login?error_msg={error_msg}')
|
self.redirect(f"/public/login?error_msg={error_msg}")
|
||||||
else:
|
else:
|
||||||
if self.request.query:
|
if self.request.query:
|
||||||
self.redirect('/public/login?'+self.request.query)
|
self.redirect("/public/login?" + self.request.query)
|
||||||
else:
|
else:
|
||||||
self.redirect('/public/login')
|
self.redirect("/public/login")
|
||||||
|
@ -20,13 +20,13 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class ServerHandler(BaseHandler):
|
|
||||||
|
|
||||||
|
class ServerHandler(BaseHandler):
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def get(self, page):
|
def get(self, page):
|
||||||
# pylint: disable=unused-variable
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
@ -34,50 +34,65 @@ class ServerHandler(BaseHandler):
|
|||||||
if superuser:
|
if superuser:
|
||||||
defined_servers = self.controller.list_defined_servers()
|
defined_servers = self.controller.list_defined_servers()
|
||||||
exec_user_role.add("Super User")
|
exec_user_role.add("Super User")
|
||||||
exec_user_crafty_permissions = self.controller.crafty_perms.list_defined_crafty_permissions()
|
exec_user_crafty_permissions = (
|
||||||
|
self.controller.crafty_perms.list_defined_crafty_permissions()
|
||||||
|
)
|
||||||
list_roles = []
|
list_roles = []
|
||||||
for role in self.controller.roles.get_all_roles():
|
for role in self.controller.roles.get_all_roles():
|
||||||
list_roles.append(self.controller.roles.get_role(role.role_id))
|
list_roles.append(self.controller.roles.get_role(role.role_id))
|
||||||
else:
|
else:
|
||||||
exec_user_crafty_permissions = self.controller.crafty_perms.get_crafty_permissions_list(exec_user["user_id"])
|
exec_user_crafty_permissions = (
|
||||||
defined_servers = self.controller.servers.get_authorized_servers(exec_user["user_id"])
|
self.controller.crafty_perms.get_crafty_permissions_list(
|
||||||
|
exec_user["user_id"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
defined_servers = self.controller.servers.get_authorized_servers(
|
||||||
|
exec_user["user_id"]
|
||||||
|
)
|
||||||
list_roles = []
|
list_roles = []
|
||||||
for r in exec_user['roles']:
|
for r in exec_user["roles"]:
|
||||||
role = self.controller.roles.get_role(r)
|
role = self.controller.roles.get_role(r)
|
||||||
exec_user_role.add(role['role_name'])
|
exec_user_role.add(role["role_name"])
|
||||||
list_roles.append(self.controller.roles.get_role(role['role_id']))
|
list_roles.append(self.controller.roles.get_role(role["role_id"]))
|
||||||
|
|
||||||
template = "public/404.html"
|
template = "public/404.html"
|
||||||
|
|
||||||
page_data = {
|
page_data = {
|
||||||
'version_data': helper.get_version_string(),
|
"version_data": helper.get_version_string(),
|
||||||
'user_data': exec_user,
|
"user_data": exec_user,
|
||||||
'user_role' : exec_user_role,
|
"user_role": exec_user_role,
|
||||||
'roles' : list_roles,
|
"roles": list_roles,
|
||||||
'user_crafty_permissions' : exec_user_crafty_permissions,
|
"user_crafty_permissions": exec_user_crafty_permissions,
|
||||||
'crafty_permissions': {
|
"crafty_permissions": {
|
||||||
'Server_Creation': Enum_Permissions_Crafty.Server_Creation,
|
"Server_Creation": Enum_Permissions_Crafty.Server_Creation,
|
||||||
'User_Config': Enum_Permissions_Crafty.User_Config,
|
"User_Config": Enum_Permissions_Crafty.User_Config,
|
||||||
'Roles_Config': Enum_Permissions_Crafty.Roles_Config,
|
"Roles_Config": Enum_Permissions_Crafty.Roles_Config,
|
||||||
},
|
},
|
||||||
'server_stats': {
|
"server_stats": {
|
||||||
'total': len(self.controller.list_defined_servers()),
|
"total": len(self.controller.list_defined_servers()),
|
||||||
'running': len(self.controller.list_running_servers()),
|
"running": len(self.controller.list_running_servers()),
|
||||||
'stopped': (len(self.controller.list_defined_servers()) - len(self.controller.list_running_servers()))
|
"stopped": (
|
||||||
|
len(self.controller.list_defined_servers())
|
||||||
|
- len(self.controller.list_running_servers())
|
||||||
|
),
|
||||||
},
|
},
|
||||||
'hosts_data': self.controller.management.get_latest_hosts_stats(),
|
"hosts_data": self.controller.management.get_latest_hosts_stats(),
|
||||||
'menu_servers': defined_servers,
|
"menu_servers": defined_servers,
|
||||||
'show_contribute': helper.get_setting("show_contribute_link", True),
|
"show_contribute": helper.get_setting("show_contribute_link", True),
|
||||||
'lang': self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
"lang": self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||||
'lang_page': helper.getLangPage(self.controller.users.get_user_lang_by_id(exec_user["user_id"])),
|
"lang_page": helper.getLangPage(
|
||||||
'api_key': {
|
self.controller.users.get_user_lang_by_id(exec_user["user_id"])
|
||||||
'name': api_key.name,
|
),
|
||||||
'created': api_key.created,
|
"api_key": {
|
||||||
'server_permissions': api_key.server_permissions,
|
"name": api_key.name,
|
||||||
'crafty_permissions': api_key.crafty_permissions,
|
"created": api_key.created,
|
||||||
'superuser': api_key.superuser
|
"server_permissions": api_key.server_permissions,
|
||||||
} if api_key is not None else None,
|
"crafty_permissions": api_key.crafty_permissions,
|
||||||
'superuser': superuser
|
"superuser": api_key.superuser,
|
||||||
|
}
|
||||||
|
if api_key is not None
|
||||||
|
else None,
|
||||||
|
"superuser": superuser,
|
||||||
}
|
}
|
||||||
|
|
||||||
if helper.get_setting("allow_nsfw_profile_pictures"):
|
if helper.get_setting("allow_nsfw_profile_pictures"):
|
||||||
@ -85,10 +100,16 @@ class ServerHandler(BaseHandler):
|
|||||||
else:
|
else:
|
||||||
rating = "g"
|
rating = "g"
|
||||||
|
|
||||||
|
if exec_user["email"] != "default@example.com" or "":
|
||||||
if exec_user['email'] != 'default@example.com' or "":
|
g = libgravatar.Gravatar(libgravatar.sanitize_email(exec_user["email"]))
|
||||||
g = libgravatar.Gravatar(libgravatar.sanitize_email(exec_user['email']))
|
url = g.get_image(
|
||||||
url = g.get_image(size=80, default="404", force_default=False, rating=rating, filetype_extension=False, use_ssl=True) # + "?d=404"
|
size=80,
|
||||||
|
default="404",
|
||||||
|
force_default=False,
|
||||||
|
rating=rating,
|
||||||
|
filetype_extension=False,
|
||||||
|
use_ssl=True,
|
||||||
|
) # + "?d=404"
|
||||||
if requests.head(url).status_code != 404:
|
if requests.head(url).status_code != 404:
|
||||||
profile_url = url
|
profile_url = url
|
||||||
else:
|
else:
|
||||||
@ -96,22 +117,34 @@ class ServerHandler(BaseHandler):
|
|||||||
else:
|
else:
|
||||||
profile_url = "/static/assets/images/faces-clipart/pic-3.png"
|
profile_url = "/static/assets/images/faces-clipart/pic-3.png"
|
||||||
|
|
||||||
page_data['user_image'] = profile_url
|
page_data["user_image"] = profile_url
|
||||||
if superuser:
|
if superuser:
|
||||||
page_data['roles'] = list_roles
|
page_data["roles"] = list_roles
|
||||||
|
|
||||||
if page == "step1":
|
if page == "step1":
|
||||||
if not superuser and not self.controller.crafty_perms.can_create_server(exec_user["user_id"]):
|
if not superuser and not self.controller.crafty_perms.can_create_server(
|
||||||
self.redirect("/panel/error?error=Unauthorized access: not a server creator or server limit reached")
|
exec_user["user_id"]
|
||||||
|
):
|
||||||
|
self.redirect(
|
||||||
|
"/panel/error?error=Unauthorized access: "
|
||||||
|
"not a server creator or server limit reached"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
page_data['server_types'] = server_jar_obj.get_serverjar_data()
|
page_data["server_types"] = server_jar_obj.get_serverjar_data()
|
||||||
page_data['js_server_types'] = json.dumps(server_jar_obj.get_serverjar_data())
|
page_data["js_server_types"] = json.dumps(
|
||||||
|
server_jar_obj.get_serverjar_data()
|
||||||
|
)
|
||||||
template = "server/wizard.html"
|
template = "server/wizard.html"
|
||||||
|
|
||||||
if page == "bedrock_step1":
|
if page == "bedrock_step1":
|
||||||
if not superuser and not self.controller.crafty_perms.can_create_server(exec_user["user_id"]):
|
if not superuser and not self.controller.crafty_perms.can_create_server(
|
||||||
self.redirect("/panel/error?error=Unauthorized access: not a server creator or server limit reached")
|
exec_user["user_id"]
|
||||||
|
):
|
||||||
|
self.redirect(
|
||||||
|
"/panel/error?error=Unauthorized access: "
|
||||||
|
"not a server creator or server limit reached"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
template = "server/bedrock_wizard.html"
|
template = "server/bedrock_wizard.html"
|
||||||
@ -126,17 +159,19 @@ class ServerHandler(BaseHandler):
|
|||||||
def post(self, page):
|
def post(self, page):
|
||||||
# pylint: disable=unused-variable
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
|
|
||||||
template = "public/404.html"
|
template = "public/404.html"
|
||||||
page_data = {
|
page_data = {
|
||||||
'version_data': "version_data_here", # TODO
|
"version_data": "version_data_here", # TODO
|
||||||
'user_data': exec_user,
|
"user_data": exec_user,
|
||||||
'show_contribute': helper.get_setting("show_contribute_link", True),
|
"show_contribute": helper.get_setting("show_contribute_link", True),
|
||||||
'lang': self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
"lang": self.controller.users.get_user_lang_by_id(exec_user["user_id"]),
|
||||||
'lang_page': helper.getLangPage(self.controller.users.get_user_lang_by_id(exec_user["user_id"]))
|
"lang_page": helper.getLangPage(
|
||||||
|
self.controller.users.get_user_lang_by_id(exec_user["user_id"])
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
if page == "command":
|
if page == "command":
|
||||||
@ -145,38 +180,50 @@ class ServerHandler(BaseHandler):
|
|||||||
|
|
||||||
if server_id is not None:
|
if server_id is not None:
|
||||||
if command == "clone_server":
|
if command == "clone_server":
|
||||||
|
|
||||||
def is_name_used(name):
|
def is_name_used(name):
|
||||||
for server in self.controller.servers.get_all_defined_servers():
|
for server in self.controller.servers.get_all_defined_servers():
|
||||||
if server['server_name'] == name:
|
if server["server_name"] == name:
|
||||||
return True
|
return True
|
||||||
return
|
return
|
||||||
|
|
||||||
server_data = self.controller.servers.get_server_data_by_id(server_id)
|
server_data = self.controller.servers.get_server_data_by_id(
|
||||||
server_uuid = server_data.get('server_uuid')
|
server_id
|
||||||
new_server_name = server_data.get('server_name') + " (Copy)"
|
)
|
||||||
|
server_uuid = server_data.get("server_uuid")
|
||||||
|
new_server_name = server_data.get("server_name") + " (Copy)"
|
||||||
|
|
||||||
name_counter = 1
|
name_counter = 1
|
||||||
while is_name_used(new_server_name):
|
while is_name_used(new_server_name):
|
||||||
name_counter += 1
|
name_counter += 1
|
||||||
new_server_name = server_data.get('server_name') + f" (Copy {name_counter})"
|
new_server_name = (
|
||||||
|
server_data.get("server_name") + f" (Copy {name_counter})"
|
||||||
|
)
|
||||||
|
|
||||||
new_server_uuid = helper.create_uuid()
|
new_server_uuid = helper.create_uuid()
|
||||||
while os.path.exists(os.path.join(helper.servers_dir, new_server_uuid)):
|
while os.path.exists(
|
||||||
|
os.path.join(helper.servers_dir, new_server_uuid)
|
||||||
|
):
|
||||||
new_server_uuid = helper.create_uuid()
|
new_server_uuid = helper.create_uuid()
|
||||||
new_server_path = os.path.join(helper.servers_dir, new_server_uuid)
|
new_server_path = os.path.join(helper.servers_dir, new_server_uuid)
|
||||||
|
|
||||||
# copy the old server
|
# copy the old server
|
||||||
file_helper.copy_dir(server_data.get('path'), new_server_path)
|
file_helper.copy_dir(server_data.get("path"), new_server_path)
|
||||||
|
|
||||||
# TODO get old server DB data to individual variables
|
# TODO get old server DB data to individual variables
|
||||||
stop_command = server_data.get('stop_command')
|
stop_command = server_data.get("stop_command")
|
||||||
new_server_command = str(server_data.get('execution_command')).replace(server_uuid, new_server_uuid)
|
new_server_command = str(
|
||||||
new_executable = server_data.get('executable')
|
server_data.get("execution_command")
|
||||||
new_server_log_file = str(helper.get_os_understandable_path(server_data.get('log_path'))).replace(server_uuid, new_server_uuid)
|
).replace(server_uuid, new_server_uuid)
|
||||||
server_port = server_data.get('server_port')
|
new_executable = server_data.get("executable")
|
||||||
server_type = server_data.get('type')
|
new_server_log_file = str(
|
||||||
|
helper.get_os_understandable_path(server_data.get("log_path"))
|
||||||
|
).replace(server_uuid, new_server_uuid)
|
||||||
|
server_port = server_data.get("server_port")
|
||||||
|
server_type = server_data.get("type")
|
||||||
|
|
||||||
self.controller.servers.create_server(new_server_name,
|
self.controller.servers.create_server(
|
||||||
|
new_server_name,
|
||||||
new_server_uuid,
|
new_server_uuid,
|
||||||
new_server_path,
|
new_server_path,
|
||||||
"",
|
"",
|
||||||
@ -185,13 +232,16 @@ class ServerHandler(BaseHandler):
|
|||||||
new_server_log_file,
|
new_server_log_file,
|
||||||
stop_command,
|
stop_command,
|
||||||
server_type,
|
server_type,
|
||||||
server_port)
|
server_port,
|
||||||
|
)
|
||||||
|
|
||||||
self.controller.init_all_servers()
|
self.controller.init_all_servers()
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), command)
|
self.controller.management.send_command(
|
||||||
|
exec_user["user_id"], server_id, self.get_remote_ip(), command
|
||||||
|
)
|
||||||
|
|
||||||
if page == "step1":
|
if page == "step1":
|
||||||
|
|
||||||
@ -199,53 +249,74 @@ class ServerHandler(BaseHandler):
|
|||||||
user_roles = self.controller.roles.get_all_roles()
|
user_roles = self.controller.roles.get_all_roles()
|
||||||
else:
|
else:
|
||||||
user_roles = self.controller.roles.get_all_roles()
|
user_roles = self.controller.roles.get_all_roles()
|
||||||
server = bleach.clean(self.get_argument('server', ''))
|
server = bleach.clean(self.get_argument("server", ""))
|
||||||
server_name = bleach.clean(self.get_argument('server_name', ''))
|
server_name = bleach.clean(self.get_argument("server_name", ""))
|
||||||
min_mem = bleach.clean(self.get_argument('min_memory', ''))
|
min_mem = bleach.clean(self.get_argument("min_memory", ""))
|
||||||
max_mem = bleach.clean(self.get_argument('max_memory', ''))
|
max_mem = bleach.clean(self.get_argument("max_memory", ""))
|
||||||
port = bleach.clean(self.get_argument('port', ''))
|
port = bleach.clean(self.get_argument("port", ""))
|
||||||
import_type = bleach.clean(self.get_argument('create_type', ''))
|
import_type = bleach.clean(self.get_argument("create_type", ""))
|
||||||
import_server_path = bleach.clean(self.get_argument('server_path', ''))
|
import_server_path = bleach.clean(self.get_argument("server_path", ""))
|
||||||
import_server_jar = bleach.clean(self.get_argument('server_jar', ''))
|
import_server_jar = bleach.clean(self.get_argument("server_jar", ""))
|
||||||
server_parts = server.split("|")
|
server_parts = server.split("|")
|
||||||
captured_roles = []
|
captured_roles = []
|
||||||
for role in user_roles:
|
for role in user_roles:
|
||||||
if bleach.clean(self.get_argument(str(role), '')) == "on":
|
if bleach.clean(self.get_argument(str(role), "")) == "on":
|
||||||
captured_roles.append(role)
|
captured_roles.append(role)
|
||||||
|
|
||||||
if not server_name:
|
if not server_name:
|
||||||
self.redirect("/panel/error?error=Server name cannot be empty!")
|
self.redirect("/panel/error?error=Server name cannot be empty!")
|
||||||
return
|
return
|
||||||
|
|
||||||
if import_type == 'import_jar':
|
if import_type == "import_jar":
|
||||||
good_path = self.controller.verify_jar_server(import_server_path, import_server_jar)
|
good_path = self.controller.verify_jar_server(
|
||||||
|
import_server_path, import_server_jar
|
||||||
|
)
|
||||||
|
|
||||||
if not good_path:
|
if not good_path:
|
||||||
self.redirect("/panel/error?error=Server path or Server Jar not found!")
|
self.redirect(
|
||||||
|
"/panel/error?error=Server path or Server Jar not found!"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_server_id = self.controller.import_jar_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
|
new_server_id = self.controller.import_jar_server(
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
server_name,
|
||||||
f"imported a jar server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
import_server_path,
|
||||||
|
import_server_jar,
|
||||||
|
min_mem,
|
||||||
|
max_mem,
|
||||||
|
port,
|
||||||
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
|
f'imported a jar server named "{server_name}"',
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
elif import_type == 'import_zip':
|
)
|
||||||
|
elif import_type == "import_zip":
|
||||||
# here import_server_path means the zip path
|
# here import_server_path means the zip path
|
||||||
zip_path = bleach.clean(self.get_argument('root_path'))
|
zip_path = bleach.clean(self.get_argument("root_path"))
|
||||||
good_path = helper.check_path_exists(zip_path)
|
good_path = helper.check_path_exists(zip_path)
|
||||||
if not good_path:
|
if not good_path:
|
||||||
self.redirect("/panel/error?error=Temp path not found!")
|
self.redirect("/panel/error?error=Temp path not found!")
|
||||||
return
|
return
|
||||||
|
|
||||||
new_server_id = self.controller.import_zip_server(server_name, zip_path, import_server_jar, min_mem, max_mem, port)
|
new_server_id = self.controller.import_zip_server(
|
||||||
|
server_name, zip_path, import_server_jar, min_mem, max_mem, port
|
||||||
|
)
|
||||||
if new_server_id == "false":
|
if new_server_id == "false":
|
||||||
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with" +
|
self.redirect(
|
||||||
f"sudo chown -R crafty:crafty {import_server_path} And sudo chmod 2775 -R {import_server_path}")
|
f"/panel/error?error=Zip file not accessible! "
|
||||||
|
f"You can fix this permissions issue with "
|
||||||
|
f"sudo chown -R crafty:crafty {import_server_path} "
|
||||||
|
f"And sudo chmod 2775 -R {import_server_path}"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(
|
||||||
f"imported a zip server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
exec_user["user_id"],
|
||||||
|
f'imported a zip server named "{server_name}"',
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
# deletes temp dir
|
# deletes temp dir
|
||||||
file_helper.del_dirs(zip_path)
|
file_helper.del_dirs(zip_path)
|
||||||
else:
|
else:
|
||||||
@ -253,28 +324,47 @@ class ServerHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Invalid server data")
|
self.redirect("/panel/error?error=Invalid server data")
|
||||||
return
|
return
|
||||||
server_type, server_version = server_parts
|
server_type, server_version = server_parts
|
||||||
# TODO: add server type check here and call the correct server add functions if not a jar
|
# TODO: add server type check here and call the correct server
|
||||||
|
# add functions if not a jar
|
||||||
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
||||||
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
new_server_id = self.controller.create_jar_server(
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
server_type, server_version, server_name, min_mem, max_mem, port
|
||||||
f"created a {server_version} {str(server_type).capitalize()} server named \"{server_name}\"",
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
|
f"created a {server_version} {str(server_type).capitalize()}"
|
||||||
|
f' server named "{server_name}"',
|
||||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
# These lines create a new Role for the Server with full permissions and add the user to it if he's not a superuser
|
# These lines create a new Role for the Server with full permissions
|
||||||
|
# and add the user to it if he's not a superuser
|
||||||
if len(captured_roles) == 0:
|
if len(captured_roles) == 0:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
new_server_uuid = self.controller.servers.get_server_data_by_id(
|
||||||
role_id = self.controller.roles.add_role(f"Creator of Server with uuid={new_server_uuid}")
|
new_server_id
|
||||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
).get("server_uuid")
|
||||||
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
role_id = self.controller.roles.add_role(
|
||||||
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
f"Creator of Server with uuid={new_server_uuid}"
|
||||||
|
)
|
||||||
|
self.controller.server_perms.add_role_server(
|
||||||
|
new_server_id, role_id, "11111111"
|
||||||
|
)
|
||||||
|
self.controller.users.add_role_to_user(
|
||||||
|
exec_user["user_id"], role_id
|
||||||
|
)
|
||||||
|
self.controller.crafty_perms.add_server_creation(
|
||||||
|
exec_user["user_id"]
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
for role in captured_roles:
|
for role in captured_roles:
|
||||||
role_id = role
|
role_id = role
|
||||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
self.controller.server_perms.add_role_server(
|
||||||
|
new_server_id, role_id, "11111111"
|
||||||
|
)
|
||||||
|
|
||||||
self.controller.stats.record_stats()
|
self.controller.stats.record_stats()
|
||||||
self.redirect("/panel/dashboard")
|
self.redirect("/panel/dashboard")
|
||||||
@ -284,51 +374,67 @@ class ServerHandler(BaseHandler):
|
|||||||
user_roles = self.controller.roles.get_all_roles()
|
user_roles = self.controller.roles.get_all_roles()
|
||||||
else:
|
else:
|
||||||
user_roles = self.controller.roles.get_all_roles()
|
user_roles = self.controller.roles.get_all_roles()
|
||||||
server = bleach.clean(self.get_argument('server', ''))
|
server = bleach.clean(self.get_argument("server", ""))
|
||||||
server_name = bleach.clean(self.get_argument('server_name', ''))
|
server_name = bleach.clean(self.get_argument("server_name", ""))
|
||||||
port = bleach.clean(self.get_argument('port', ''))
|
port = bleach.clean(self.get_argument("port", ""))
|
||||||
import_type = bleach.clean(self.get_argument('create_type', ''))
|
import_type = bleach.clean(self.get_argument("create_type", ""))
|
||||||
import_server_path = bleach.clean(self.get_argument('server_path', ''))
|
import_server_path = bleach.clean(self.get_argument("server_path", ""))
|
||||||
import_server_exe = bleach.clean(self.get_argument('server_jar', ''))
|
import_server_exe = bleach.clean(self.get_argument("server_jar", ""))
|
||||||
server_parts = server.split("|")
|
server_parts = server.split("|")
|
||||||
captured_roles = []
|
captured_roles = []
|
||||||
for role in user_roles:
|
for role in user_roles:
|
||||||
if bleach.clean(self.get_argument(str(role), '')) == "on":
|
if bleach.clean(self.get_argument(str(role), "")) == "on":
|
||||||
captured_roles.append(role)
|
captured_roles.append(role)
|
||||||
|
|
||||||
if not server_name:
|
if not server_name:
|
||||||
self.redirect("/panel/error?error=Server name cannot be empty!")
|
self.redirect("/panel/error?error=Server name cannot be empty!")
|
||||||
return
|
return
|
||||||
|
|
||||||
if import_type == 'import_jar':
|
if import_type == "import_jar":
|
||||||
good_path = self.controller.verify_jar_server(import_server_path, import_server_exe)
|
good_path = self.controller.verify_jar_server(
|
||||||
|
import_server_path, import_server_exe
|
||||||
|
)
|
||||||
|
|
||||||
if not good_path:
|
if not good_path:
|
||||||
self.redirect("/panel/error?error=Server path or Server Jar not found!")
|
self.redirect(
|
||||||
|
"/panel/error?error=Server path or Server Jar not found!"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_server_id = self.controller.import_bedrock_server(server_name, import_server_path,import_server_exe, port)
|
new_server_id = self.controller.import_bedrock_server(
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
server_name, import_server_path, import_server_exe, port
|
||||||
f"imported a jar server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
|
f'imported a jar server named "{server_name}"',
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
elif import_type == 'import_zip':
|
)
|
||||||
|
elif import_type == "import_zip":
|
||||||
# here import_server_path means the zip path
|
# here import_server_path means the zip path
|
||||||
zip_path = bleach.clean(self.get_argument('root_path'))
|
zip_path = bleach.clean(self.get_argument("root_path"))
|
||||||
good_path = helper.check_path_exists(zip_path)
|
good_path = helper.check_path_exists(zip_path)
|
||||||
if not good_path:
|
if not good_path:
|
||||||
self.redirect("/panel/error?error=Temp path not found!")
|
self.redirect("/panel/error?error=Temp path not found!")
|
||||||
return
|
return
|
||||||
|
|
||||||
new_server_id = self.controller.import_bedrock_zip_server(server_name, zip_path, import_server_exe, port)
|
new_server_id = self.controller.import_bedrock_zip_server(
|
||||||
|
server_name, zip_path, import_server_exe, port
|
||||||
|
)
|
||||||
if new_server_id == "false":
|
if new_server_id == "false":
|
||||||
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with" +
|
self.redirect(
|
||||||
f"sudo chown -R crafty:crafty {import_server_path} And sudo chmod 2775 -R {import_server_path}")
|
f"/panel/error?error=Zip file not accessible! "
|
||||||
|
f"You can fix this permissions issue with"
|
||||||
|
f"sudo chown -R crafty:crafty {import_server_path} "
|
||||||
|
f"And sudo chmod 2775 -R {import_server_path}"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(
|
||||||
f"imported a zip server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
exec_user["user_id"],
|
||||||
|
f'imported a zip server named "{server_name}"',
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
# deletes temp dir
|
# deletes temp dir
|
||||||
file_helper.del_dirs(zip_path)
|
file_helper.del_dirs(zip_path)
|
||||||
else:
|
else:
|
||||||
@ -336,28 +442,47 @@ class ServerHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Invalid server data")
|
self.redirect("/panel/error?error=Invalid server data")
|
||||||
return
|
return
|
||||||
server_type, server_version = server_parts
|
server_type, server_version = server_parts
|
||||||
# TODO: add server type check here and call the correct server add functions if not a jar
|
# TODO: add server type check here and call the correct server
|
||||||
|
# add functions if not a jar
|
||||||
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
||||||
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
new_server_id = self.controller.create_jar_server(
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
server_type, server_version, server_name, min_mem, max_mem, port
|
||||||
f"created a {server_version} {str(server_type).capitalize()} server named \"{server_name}\"",
|
)
|
||||||
|
self.controller.management.add_to_audit_log(
|
||||||
|
exec_user["user_id"],
|
||||||
|
f"created a {server_version} {str(server_type).capitalize()} "
|
||||||
|
f'server named "{server_name}"',
|
||||||
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
|
||||||
# These lines create a new Role for the Server with full permissions and add the user to it if he's not a superuser
|
# These lines create a new Role for the Server with full permissions
|
||||||
|
# and add the user to it if he's not a superuser
|
||||||
if len(captured_roles) == 0:
|
if len(captured_roles) == 0:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
new_server_uuid = self.controller.servers.get_server_data_by_id(
|
||||||
role_id = self.controller.roles.add_role(f"Creator of Server with uuid={new_server_uuid}")
|
new_server_id
|
||||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
).get("server_uuid")
|
||||||
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
role_id = self.controller.roles.add_role(
|
||||||
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
f"Creator of Server with uuid={new_server_uuid}"
|
||||||
|
)
|
||||||
|
self.controller.server_perms.add_role_server(
|
||||||
|
new_server_id, role_id, "11111111"
|
||||||
|
)
|
||||||
|
self.controller.users.add_role_to_user(
|
||||||
|
exec_user["user_id"], role_id
|
||||||
|
)
|
||||||
|
self.controller.crafty_perms.add_server_creation(
|
||||||
|
exec_user["user_id"]
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
for role in captured_roles:
|
for role in captured_roles:
|
||||||
role_id = role
|
role_id = role
|
||||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
self.controller.server_perms.add_role_server(
|
||||||
|
new_server_id, role_id, "11111111"
|
||||||
|
)
|
||||||
|
|
||||||
self.controller.stats.record_stats()
|
self.controller.stats.record_stats()
|
||||||
self.redirect("/panel/dashboard")
|
self.redirect("/panel/dashboard")
|
||||||
@ -369,4 +494,4 @@ class ServerHandler(BaseHandler):
|
|||||||
translate=self.translator.translate,
|
translate=self.translator.translate,
|
||||||
)
|
)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
self.redirect('/panel/dashboard')
|
self.redirect("/panel/dashboard")
|
||||||
|
@ -1,17 +1,24 @@
|
|||||||
from typing import ( Optional )
|
from typing import Optional
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import tornado.web
|
import tornado.web
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
helper.auto_installer_fix(e)
|
helper.auto_installer_fix(e)
|
||||||
|
|
||||||
|
|
||||||
class CustomStaticHandler(tornado.web.StaticFileHandler):
|
class CustomStaticHandler(tornado.web.StaticFileHandler):
|
||||||
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
|
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
|
||||||
try:
|
try:
|
||||||
return super().validate_absolute_path(root, absolute_path)
|
return super().validate_absolute_path(root, absolute_path)
|
||||||
except tornado.web.HTTPError as error:
|
except tornado.web.HTTPError as error:
|
||||||
if 'HTTP 404: Not Found' in str(error):
|
if "HTTP 404: Not Found" in str(error):
|
||||||
self.set_status(404)
|
self.set_status(404)
|
||||||
self.finish({'error':'NOT_FOUND', 'info':'The requested resource was not found on the server'})
|
self.finish(
|
||||||
|
{
|
||||||
|
"error": "NOT_FOUND",
|
||||||
|
"info": "The requested resource was not found on the server",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@ -5,39 +5,45 @@ from app.classes.web.base_handler import BaseHandler
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class StatusHandler(BaseHandler):
|
class StatusHandler(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
page_data = {}
|
page_data = {}
|
||||||
page_data['lang'] = helper.get_setting('language')
|
page_data["lang"] = helper.get_setting("language")
|
||||||
page_data['lang_page'] = helper.getLangPage(helper.get_setting('language'))
|
page_data["lang_page"] = helper.getLangPage(helper.get_setting("language"))
|
||||||
page_data['servers'] = self.controller.servers.get_all_servers_stats()
|
page_data["servers"] = self.controller.servers.get_all_servers_stats()
|
||||||
running = 0
|
running = 0
|
||||||
for srv in page_data['servers']:
|
for srv in page_data["servers"]:
|
||||||
if srv['stats']['running']:
|
if srv["stats"]["running"]:
|
||||||
running += 1
|
running += 1
|
||||||
server_data = srv.get('server_data', False)
|
server_data = srv.get("server_data", False)
|
||||||
server_id = server_data.get('server_id', False)
|
server_id = server_data.get("server_id", False)
|
||||||
srv['raw_ping_result'] = self.controller.servers.get_server_stats_by_id(server_id)
|
srv["raw_ping_result"] = self.controller.servers.get_server_stats_by_id(
|
||||||
if 'icon' not in srv['raw_ping_result']:
|
server_id
|
||||||
srv['raw_ping_result']['icon'] = False
|
)
|
||||||
|
if "icon" not in srv["raw_ping_result"]:
|
||||||
|
srv["raw_ping_result"]["icon"] = False
|
||||||
|
|
||||||
page_data['running'] = running
|
page_data["running"] = running
|
||||||
|
|
||||||
template = 'public/status.html'
|
template = "public/status.html"
|
||||||
|
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
data=page_data,
|
data=page_data,
|
||||||
translate=self.translator.translate,
|
translate=self.translator.translate,
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self):
|
def post(self):
|
||||||
page_data = {}
|
page_data = {}
|
||||||
page_data['servers'] = self.controller.servers.get_all_servers_stats()
|
page_data["servers"] = self.controller.servers.get_all_servers_stats()
|
||||||
for srv in page_data['servers']:
|
for srv in page_data["servers"]:
|
||||||
server_data = srv.get('server_data', False)
|
server_data = srv.get("server_data", False)
|
||||||
server_id = server_data.get('server_id', False)
|
server_id = server_data.get("server_id", False)
|
||||||
srv['raw_ping_result'] = self.controller.servers.get_server_stats_by_id(server_id)
|
srv["raw_ping_result"] = self.controller.servers.get_server_stats_by_id(
|
||||||
template = 'public/status.html'
|
server_id
|
||||||
|
)
|
||||||
|
template = "public/status.html"
|
||||||
|
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
|
@ -34,8 +34,8 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Webserver:
|
|
||||||
|
|
||||||
|
class Webserver:
|
||||||
def __init__(self, controller, tasks_manager):
|
def __init__(self, controller, tasks_manager):
|
||||||
self.ioloop = None
|
self.ioloop = None
|
||||||
self.HTTP_Server = None
|
self.HTTP_Server = None
|
||||||
@ -48,12 +48,12 @@ class Webserver:
|
|||||||
def log_function(handler):
|
def log_function(handler):
|
||||||
|
|
||||||
info = {
|
info = {
|
||||||
'Status_Code': handler.get_status(),
|
"Status_Code": handler.get_status(),
|
||||||
'Method': handler.request.method,
|
"Method": handler.request.method,
|
||||||
'URL': handler.request.uri,
|
"URL": handler.request.uri,
|
||||||
'Remote_IP': handler.request.remote_ip,
|
"Remote_IP": handler.request.remote_ip,
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
'Elapsed_Time': '%.2fms' % (handler.request.request_time() * 1000)
|
"Elapsed_Time": "%.2fms" % (handler.request.request_time() * 1000),
|
||||||
}
|
}
|
||||||
|
|
||||||
tornado.log.access_log.info(json.dumps(info, indent=4))
|
tornado.log.access_log.info(json.dumps(info, indent=4))
|
||||||
@ -61,23 +61,31 @@ class Webserver:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _asyncio_patch():
|
def _asyncio_patch():
|
||||||
"""
|
"""
|
||||||
As of Python 3.8 (on Windows), the asyncio default event handler has changed to "proactor",
|
As of Python 3.8 (on Windows),
|
||||||
|
the asyncio default event handler has changed to "proactor",
|
||||||
where tornado expects the "selector" handler.
|
where tornado expects the "selector" handler.
|
||||||
|
|
||||||
This function checks if the platform is windows and changes the event handler to suit.
|
This function checks if the platform is windows and
|
||||||
|
changes the event handler to suit.
|
||||||
|
|
||||||
(Taken from https://github.com/mkdocs/mkdocs/commit/cf2b136d4257787c0de51eba2d9e30ded5245b31)
|
(Taken from
|
||||||
|
https://github.com/mkdocs/mkdocs/commit/cf2b136d4257787c0de51eba2d9e30ded5245b31)
|
||||||
"""
|
"""
|
||||||
logger.debug("Checking if asyncio patch is required")
|
logger.debug("Checking if asyncio patch is required")
|
||||||
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
|
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
|
||||||
# pylint: disable=reimported,import-outside-toplevel,redefined-outer-name
|
# pylint: disable=reimported,import-outside-toplevel,redefined-outer-name
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from asyncio import WindowsSelectorEventLoopPolicy
|
from asyncio import WindowsSelectorEventLoopPolicy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.debug("asyncio patch isn't required") # Can't assign a policy which doesn't exist.
|
logger.debug(
|
||||||
|
"asyncio patch isn't required"
|
||||||
|
) # Can't assign a policy which doesn't exist.
|
||||||
else:
|
else:
|
||||||
if not isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
if not isinstance(
|
||||||
|
asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy
|
||||||
|
):
|
||||||
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||||
logger.debug("Applied asyncio patch")
|
logger.debug("Applied asyncio patch")
|
||||||
|
|
||||||
@ -86,11 +94,11 @@ class Webserver:
|
|||||||
# let's verify we have an SSL cert
|
# let's verify we have an SSL cert
|
||||||
helper.create_self_signed_cert()
|
helper.create_self_signed_cert()
|
||||||
|
|
||||||
http_port = helper.get_setting('http_port')
|
http_port = helper.get_setting("http_port")
|
||||||
https_port = helper.get_setting('https_port')
|
https_port = helper.get_setting("https_port")
|
||||||
|
|
||||||
debug_errors = helper.get_setting('show_errors')
|
debug_errors = helper.get_setting("show_errors")
|
||||||
cookie_secret = helper.get_setting('cookie_secret')
|
cookie_secret = helper.get_setting("cookie_secret")
|
||||||
|
|
||||||
if cookie_secret is False:
|
if cookie_secret is False:
|
||||||
cookie_secret = helper.random_string_generator(32)
|
cookie_secret = helper.random_string_generator(32)
|
||||||
@ -102,38 +110,46 @@ class Webserver:
|
|||||||
https_port = 8443
|
https_port = 8443
|
||||||
|
|
||||||
cert_objects = {
|
cert_objects = {
|
||||||
'certfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.cert.pem'),
|
"certfile": os.path.join(
|
||||||
'keyfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.key.pem'),
|
helper.config_dir, "web", "certs", "commander.cert.pem"
|
||||||
|
),
|
||||||
|
"keyfile": os.path.join(
|
||||||
|
helper.config_dir, "web", "certs", "commander.key.pem"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
|
logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
|
||||||
|
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||||
|
|
||||||
tornado.template.Loader('.')
|
tornado.template.Loader(".")
|
||||||
|
|
||||||
# TODO: Remove because we don't and won't use
|
# TODO: Remove because we don't and won't use
|
||||||
tornado.locale.set_default_locale('en_EN')
|
tornado.locale.set_default_locale("en_EN")
|
||||||
|
|
||||||
handler_args = {"controller": self.controller, "tasks_manager": self.tasks_manager, "translator": translation}
|
handler_args = {
|
||||||
|
"controller": self.controller,
|
||||||
|
"tasks_manager": self.tasks_manager,
|
||||||
|
"translator": translation,
|
||||||
|
}
|
||||||
handlers = [
|
handlers = [
|
||||||
(r'/', DefaultHandler, handler_args),
|
(r"/", DefaultHandler, handler_args),
|
||||||
(r'/public/(.*)', PublicHandler, handler_args),
|
(r"/public/(.*)", PublicHandler, handler_args),
|
||||||
(r'/panel/(.*)', PanelHandler, handler_args),
|
(r"/panel/(.*)", PanelHandler, handler_args),
|
||||||
(r'/server/(.*)', ServerHandler, handler_args),
|
(r"/server/(.*)", ServerHandler, handler_args),
|
||||||
(r'/ajax/(.*)', AjaxHandler, handler_args),
|
(r"/ajax/(.*)", AjaxHandler, handler_args),
|
||||||
(r'/files/(.*)', FileHandler, handler_args),
|
(r"/files/(.*)", FileHandler, handler_args),
|
||||||
(r'/api/stats/servers', ServersStats, handler_args),
|
(r"/api/stats/servers", ServersStats, handler_args),
|
||||||
(r'/api/stats/node', NodeStats, handler_args),
|
(r"/api/stats/node", NodeStats, handler_args),
|
||||||
(r'/ws', SocketHandler, handler_args),
|
(r"/ws", SocketHandler, handler_args),
|
||||||
(r'/upload', UploadHandler, handler_args),
|
(r"/upload", UploadHandler, handler_args),
|
||||||
(r'/status', StatusHandler, handler_args)
|
(r"/status", StatusHandler, handler_args),
|
||||||
]
|
]
|
||||||
|
|
||||||
app = tornado.web.Application(
|
app = tornado.web.Application(
|
||||||
handlers,
|
handlers,
|
||||||
template_path=os.path.join(helper.webroot, 'templates'),
|
template_path=os.path.join(helper.webroot, "templates"),
|
||||||
static_path=os.path.join(helper.webroot, 'static'),
|
static_path=os.path.join(helper.webroot, "static"),
|
||||||
debug=debug_errors,
|
debug=debug_errors,
|
||||||
cookie_secret=cookie_secret,
|
cookie_secret=cookie_secret,
|
||||||
xsrf_cookies=True,
|
xsrf_cookies=True,
|
||||||
@ -144,19 +160,21 @@ class Webserver:
|
|||||||
static_handler_class=CustomStaticHandler,
|
static_handler_class=CustomStaticHandler,
|
||||||
serve_traceback=debug_errors,
|
serve_traceback=debug_errors,
|
||||||
)
|
)
|
||||||
HTTPhanders = [(r'/', HTTPHandler, handler_args),
|
HTTPhanders = [
|
||||||
(r'/public/(.*)', HTTPHandlerPage, handler_args),
|
(r"/", HTTPHandler, handler_args),
|
||||||
(r'/panel/(.*)', HTTPHandlerPage, handler_args),
|
(r"/public/(.*)", HTTPHandlerPage, handler_args),
|
||||||
(r'/server/(.*)', HTTPHandlerPage, handler_args),
|
(r"/panel/(.*)", HTTPHandlerPage, handler_args),
|
||||||
(r'/ajax/(.*)', HTTPHandlerPage, handler_args),
|
(r"/server/(.*)", HTTPHandlerPage, handler_args),
|
||||||
(r'/api/stats/servers', HTTPHandlerPage, handler_args),
|
(r"/ajax/(.*)", HTTPHandlerPage, handler_args),
|
||||||
(r'/api/stats/node', HTTPHandlerPage, handler_args),
|
(r"/api/stats/servers", HTTPHandlerPage, handler_args),
|
||||||
(r'/ws', HTTPHandlerPage, handler_args),
|
(r"/api/stats/node", HTTPHandlerPage, handler_args),
|
||||||
(r'/upload', HTTPHandlerPage, handler_args)]
|
(r"/ws", HTTPHandlerPage, handler_args),
|
||||||
|
(r"/upload", HTTPHandlerPage, handler_args),
|
||||||
|
]
|
||||||
HTTPapp = tornado.web.Application(
|
HTTPapp = tornado.web.Application(
|
||||||
HTTPhanders,
|
HTTPhanders,
|
||||||
template_path=os.path.join(helper.webroot, 'templates'),
|
template_path=os.path.join(helper.webroot, "templates"),
|
||||||
static_path=os.path.join(helper.webroot, 'static'),
|
static_path=os.path.join(helper.webroot, "static"),
|
||||||
debug=debug_errors,
|
debug=debug_errors,
|
||||||
cookie_secret=cookie_secret,
|
cookie_secret=cookie_secret,
|
||||||
xsrf_cookies=True,
|
xsrf_cookies=True,
|
||||||
@ -173,8 +191,14 @@ class Webserver:
|
|||||||
self.HTTPS_Server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
self.HTTPS_Server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
||||||
self.HTTPS_Server.listen(https_port)
|
self.HTTPS_Server.listen(https_port)
|
||||||
|
|
||||||
logger.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
logger.info(
|
||||||
console.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
f"https://{helper.get_local_ip()}:{https_port} "
|
||||||
|
f"is up and ready for connections."
|
||||||
|
)
|
||||||
|
console.info(
|
||||||
|
f"https://{helper.get_local_ip()}:{https_port} "
|
||||||
|
f"is up and ready for connections."
|
||||||
|
)
|
||||||
|
|
||||||
console.info("Server Init Complete: Listening For Connections:")
|
console.info("Server Init Complete: Listening For Connections:")
|
||||||
|
|
||||||
|
@ -22,11 +22,14 @@ logger = logging.getLogger(__name__)
|
|||||||
# Class & Function Defination
|
# Class & Function Defination
|
||||||
MAX_STREAMED_SIZE = 1024 * 1024 * 1024
|
MAX_STREAMED_SIZE = 1024 * 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
@tornado.web.stream_request_body
|
@tornado.web.stream_request_body
|
||||||
class UploadHandler(BaseHandler):
|
class UploadHandler(BaseHandler):
|
||||||
|
|
||||||
# noinspection PyAttributeOutsideInit
|
# noinspection PyAttributeOutsideInit
|
||||||
def initialize(self, controller: Controller=None, tasks_manager=None, translator=None):
|
def initialize(
|
||||||
|
self, controller: Controller = None, tasks_manager=None, translator=None
|
||||||
|
):
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.tasks_manager = tasks_manager
|
self.tasks_manager = tasks_manager
|
||||||
self.translator = translator
|
self.translator = translator
|
||||||
@ -35,45 +38,78 @@ class UploadHandler(BaseHandler):
|
|||||||
self.do_upload = True
|
self.do_upload = True
|
||||||
# pylint: disable=unused-variable
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
server_id = self.get_argument('server_id', None)
|
server_id = self.get_argument("server_id", None)
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user["superuser"]
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
user_id = exec_user['user_id']
|
user_id = exec_user["user_id"]
|
||||||
|
|
||||||
if superuser:
|
if superuser:
|
||||||
exec_user_server_permissions = self.controller.server_perms.list_defined_permissions()
|
exec_user_server_permissions = (
|
||||||
|
self.controller.server_perms.list_defined_permissions()
|
||||||
|
)
|
||||||
elif api_key is not None:
|
elif api_key is not None:
|
||||||
exec_user_server_permissions = self.controller.server_perms.get_api_key_permissions_list(api_key, server_id)
|
exec_user_server_permissions = (
|
||||||
|
self.controller.server_perms.get_api_key_permissions_list(
|
||||||
|
api_key, server_id
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
exec_user_server_permissions = self.controller.server_perms.get_user_id_permissions_list(
|
exec_user_server_permissions = (
|
||||||
exec_user["user_id"], server_id)
|
self.controller.server_perms.get_user_id_permissions_list(
|
||||||
|
exec_user["user_id"], server_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
server_id = self.request.headers.get('X-ServerId', None)
|
server_id = self.request.headers.get("X-ServerId", None)
|
||||||
|
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
logger.warning('User ID not found in upload handler call')
|
logger.warning("User ID not found in upload handler call")
|
||||||
console.warning('User ID not found in upload handler call')
|
console.warning("User ID not found in upload handler call")
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
|
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning('Server ID not found in upload handler call')
|
logger.warning("Server ID not found in upload handler call")
|
||||||
console.warning('Server ID not found in upload handler call')
|
console.warning("Server ID not found in upload handler call")
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
|
|
||||||
if Enum_Permissions_Server.Files not in exec_user_server_permissions:
|
if Enum_Permissions_Server.Files not in exec_user_server_permissions:
|
||||||
logger.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
logger.warning(
|
||||||
console.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
f"User {user_id} tried to upload a file to "
|
||||||
|
f"{server_id} without permissions!"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"User {user_id} tried to upload a file to "
|
||||||
|
f"{server_id} without permissions!"
|
||||||
|
)
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
|
|
||||||
path = self.request.headers.get('X-Path', None)
|
path = self.request.headers.get("X-Path", None)
|
||||||
filename = self.request.headers.get('X-FileName', None)
|
filename = self.request.headers.get("X-FileName", None)
|
||||||
full_path = os.path.join(path, filename)
|
full_path = os.path.join(path, filename)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path):
|
if not helper.in_path(
|
||||||
print(user_id, server_id, helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), full_path)
|
helper.get_os_understandable_path(
|
||||||
logger.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!')
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
console.warning(f'User {user_id} tried to upload a file to {server_id} but the path is not inside of the server!')
|
),
|
||||||
|
full_path,
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
user_id,
|
||||||
|
server_id,
|
||||||
|
helper.get_os_understandable_path(
|
||||||
|
self.controller.servers.get_server_data_by_id(server_id)["path"]
|
||||||
|
),
|
||||||
|
full_path,
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
f"User {user_id} tried to upload a file to {server_id} "
|
||||||
|
f"but the path is not inside of the server!"
|
||||||
|
)
|
||||||
|
console.warning(
|
||||||
|
f"User {user_id} tried to upload a file to {server_id} "
|
||||||
|
f"but the path is not inside of the server!"
|
||||||
|
)
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
|
|
||||||
if self.do_upload:
|
if self.do_upload:
|
||||||
@ -87,19 +123,19 @@ class UploadHandler(BaseHandler):
|
|||||||
|
|
||||||
def post(self):
|
def post(self):
|
||||||
logger.info("Upload completed")
|
logger.info("Upload completed")
|
||||||
files_left = int(self.request.headers.get('X-Files-Left', None))
|
files_left = int(self.request.headers.get("X-Files-Left", None))
|
||||||
|
|
||||||
if self.do_upload:
|
if self.do_upload:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if files_left == 0:
|
if files_left == 0:
|
||||||
websocket_helper.broadcast('close_upload_box', 'success')
|
websocket_helper.broadcast("close_upload_box", "success")
|
||||||
self.finish('success') # Nope, I'm sending "success"
|
self.finish("success") # Nope, I'm sending "success"
|
||||||
self.f.close()
|
self.f.close()
|
||||||
else:
|
else:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if files_left == 0:
|
if files_left == 0:
|
||||||
websocket_helper.broadcast('close_upload_box', 'error')
|
websocket_helper.broadcast("close_upload_box", "error")
|
||||||
self.finish('error')
|
self.finish("error")
|
||||||
|
|
||||||
def data_received(self, chunk):
|
def data_received(self, chunk):
|
||||||
if self.do_upload:
|
if self.do_upload:
|
||||||
|
@ -15,6 +15,7 @@ except ModuleNotFoundError as e:
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SocketHandler(tornado.websocket.WebSocketHandler):
|
class SocketHandler(tornado.websocket.WebSocketHandler):
|
||||||
page = None
|
page = None
|
||||||
page_query_params = None
|
page_query_params = None
|
||||||
@ -30,56 +31,71 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
self.io_loop = tornado.ioloop.IOLoop.current()
|
self.io_loop = tornado.ioloop.IOLoop.current()
|
||||||
|
|
||||||
def get_remote_ip(self):
|
def get_remote_ip(self):
|
||||||
remote_ip = self.request.headers.get("X-Real-IP") or \
|
remote_ip = (
|
||||||
self.request.headers.get("X-Forwarded-For") or \
|
self.request.headers.get("X-Real-IP")
|
||||||
self.request.remote_ip
|
or self.request.headers.get("X-Forwarded-For")
|
||||||
|
or self.request.remote_ip
|
||||||
|
)
|
||||||
return remote_ip
|
return remote_ip
|
||||||
|
|
||||||
def get_user_id(self):
|
def get_user_id(self):
|
||||||
_, _, user = authentication.check(self.get_cookie('token'))
|
_, _, user = authentication.check(self.get_cookie("token"))
|
||||||
return user['user_id']
|
return user["user_id"]
|
||||||
|
|
||||||
def check_auth(self):
|
def check_auth(self):
|
||||||
return authentication.check_bool(self.get_cookie('token'))
|
return authentication.check_bool(self.get_cookie("token"))
|
||||||
|
|
||||||
# pylint: disable=arguments-differ
|
# pylint: disable=arguments-differ
|
||||||
def open(self):
|
def open(self):
|
||||||
logger.debug('Checking WebSocket authentication')
|
logger.debug("Checking WebSocket authentication")
|
||||||
if self.check_auth():
|
if self.check_auth():
|
||||||
self.handle()
|
self.handle()
|
||||||
else:
|
else:
|
||||||
websocket_helper.send_message(self, 'notification', 'Not authenticated for WebSocket connection')
|
websocket_helper.send_message(
|
||||||
|
self, "notification", "Not authenticated for WebSocket connection"
|
||||||
|
)
|
||||||
self.close()
|
self.close()
|
||||||
self.controller.management.add_to_audit_log_raw('unknown',
|
self.controller.management.add_to_audit_log_raw(
|
||||||
0, 0,
|
"unknown",
|
||||||
'Someone tried to connect via WebSocket without proper authentication',
|
0,
|
||||||
self.get_remote_ip())
|
0,
|
||||||
websocket_helper.broadcast('notification', 'Someone tried to connect via WebSocket without proper authentication')
|
"Someone tried to connect via WebSocket without proper authentication",
|
||||||
logger.warning('Someone tried to connect via WebSocket without proper authentication')
|
self.get_remote_ip(),
|
||||||
|
)
|
||||||
|
websocket_helper.broadcast(
|
||||||
|
"notification",
|
||||||
|
"Someone tried to connect via WebSocket without proper authentication",
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"Someone tried to connect via WebSocket without proper authentication"
|
||||||
|
)
|
||||||
|
|
||||||
def handle(self):
|
def handle(self):
|
||||||
self.page = self.get_query_argument('page')
|
self.page = self.get_query_argument("page")
|
||||||
self.page_query_params = dict(parse_qsl(helper.remove_prefix(
|
self.page_query_params = dict(
|
||||||
self.get_query_argument('page_query_params'),
|
parse_qsl(
|
||||||
'?'
|
helper.remove_prefix(self.get_query_argument("page_query_params"), "?")
|
||||||
)))
|
)
|
||||||
|
)
|
||||||
websocket_helper.add_client(self)
|
websocket_helper.add_client(self)
|
||||||
logger.debug('Opened WebSocket connection')
|
logger.debug("Opened WebSocket connection")
|
||||||
|
|
||||||
# pylint: disable=arguments-renamed
|
# pylint: disable=arguments-renamed
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def on_message(raw_message):
|
def on_message(raw_message):
|
||||||
|
|
||||||
logger.debug(f'Got message from WebSocket connection {raw_message}')
|
logger.debug(f"Got message from WebSocket connection {raw_message}")
|
||||||
message = json.loads(raw_message)
|
message = json.loads(raw_message)
|
||||||
logger.debug(f"Event Type: {message['event']}, Data: {message['data']}")
|
logger.debug(f"Event Type: {message['event']}, Data: {message['data']}")
|
||||||
|
|
||||||
def on_close(self):
|
def on_close(self):
|
||||||
websocket_helper.remove_client(self)
|
websocket_helper.remove_client(self)
|
||||||
logger.debug('Closed WebSocket connection')
|
logger.debug("Closed WebSocket connection")
|
||||||
|
|
||||||
async def write_message_int(self, message):
|
async def write_message_int(self, message):
|
||||||
self.write_message(message)
|
self.write_message(message)
|
||||||
|
|
||||||
def write_message_helper(self, message):
|
def write_message_helper(self, message):
|
||||||
asyncio.run_coroutine_threadsafe(self.write_message_int(message), self.io_loop.asyncio_loop)
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self.write_message_int(message), self.io_loop.asyncio_loop
|
||||||
|
)
|
||||||
|
@ -5,6 +5,7 @@ from app.classes.shared.console import console
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class WebSocketHelper:
|
class WebSocketHelper:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.clients = set()
|
self.clients = set()
|
||||||
@ -18,16 +19,22 @@ class WebSocketHelper:
|
|||||||
# pylint: disable=no-self-use
|
# pylint: disable=no-self-use
|
||||||
def send_message(self, client, event_type: str, data):
|
def send_message(self, client, event_type: str, data):
|
||||||
if client.check_auth():
|
if client.check_auth():
|
||||||
message = str(json.dumps({'event': event_type, 'data': data}))
|
message = str(json.dumps({"event": event_type, "data": data}))
|
||||||
client.write_message_helper(message)
|
client.write_message_helper(message)
|
||||||
|
|
||||||
def broadcast(self, event_type: str, data):
|
def broadcast(self, event_type: str, data):
|
||||||
logger.debug(f"Sending to {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
logger.debug(
|
||||||
|
f"Sending to {len(self.clients)} clients: "
|
||||||
|
f"{json.dumps({'event': event_type, 'data': data})}"
|
||||||
|
)
|
||||||
for client in self.clients:
|
for client in self.clients:
|
||||||
try:
|
try:
|
||||||
self.send_message(client, event_type, data)
|
self.send_message(client, event_type, data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f'Error caught while sending WebSocket message to {client.get_remote_ip()} {e}')
|
logger.exception(
|
||||||
|
f"Error caught while sending WebSocket message to "
|
||||||
|
f"{client.get_remote_ip()} {e}"
|
||||||
|
)
|
||||||
|
|
||||||
def broadcast_page(self, page: str, event_type: str, data):
|
def broadcast_page(self, page: str, event_type: str, data):
|
||||||
def filter_fn(client):
|
def filter_fn(client):
|
||||||
@ -51,7 +58,9 @@ class WebSocketHelper:
|
|||||||
|
|
||||||
self.broadcast_with_fn(filter_fn, event_type, data)
|
self.broadcast_with_fn(filter_fn, event_type, data)
|
||||||
|
|
||||||
def broadcast_user_page_params(self, page: str, params: dict, user_id: str, event_type: str, data):
|
def broadcast_user_page_params(
|
||||||
|
self, page: str, params: dict, user_id: str, event_type: str, data
|
||||||
|
):
|
||||||
def filter_fn(client):
|
def filter_fn(client):
|
||||||
if client.get_user_id() != user_id:
|
if client.get_user_id() != user_id:
|
||||||
return False
|
return False
|
||||||
@ -77,18 +86,25 @@ class WebSocketHelper:
|
|||||||
|
|
||||||
def broadcast_with_fn(self, filter_fn, event_type: str, data):
|
def broadcast_with_fn(self, filter_fn, event_type: str, data):
|
||||||
clients = list(filter(filter_fn, self.clients))
|
clients = list(filter(filter_fn, self.clients))
|
||||||
logger.debug(f"Sending to {len(clients)} out of {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
logger.debug(
|
||||||
|
f"Sending to {len(clients)} out of {len(self.clients)} "
|
||||||
|
f"clients: {json.dumps({'event': event_type, 'data': data})}"
|
||||||
|
)
|
||||||
|
|
||||||
for client in clients:
|
for client in clients:
|
||||||
try:
|
try:
|
||||||
self.send_message(client, event_type, data)
|
self.send_message(client, event_type, data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception(f'Error catched while sending WebSocket message to {client.get_remote_ip()} {e}')
|
logger.exception(
|
||||||
|
f"Error catched while sending WebSocket message to "
|
||||||
|
f"{client.get_remote_ip()} {e}"
|
||||||
|
)
|
||||||
|
|
||||||
def disconnect_all(self):
|
def disconnect_all(self):
|
||||||
console.info('Disconnecting WebSocket clients')
|
console.info("Disconnecting WebSocket clients")
|
||||||
for client in self.clients:
|
for client in self.clients:
|
||||||
client.close()
|
client.close()
|
||||||
console.info('Disconnected WebSocket clients')
|
console.info("Disconnected WebSocket clients")
|
||||||
|
|
||||||
|
|
||||||
websocket_helper = WebSocketHelper()
|
websocket_helper = WebSocketHelper()
|
||||||
|
@ -4,6 +4,7 @@ import datetime
|
|||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
db = database
|
db = database
|
||||||
|
|
||||||
class Users(peewee.Model):
|
class Users(peewee.Model):
|
||||||
user_id = peewee.AutoField()
|
user_id = peewee.AutoField()
|
||||||
created = peewee.DateTimeField(default=datetime.datetime.now)
|
created = peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
@ -32,12 +33,12 @@ def migrate(migrator, database, **kwargs):
|
|||||||
database = db
|
database = db
|
||||||
|
|
||||||
class User_Roles(peewee.Model):
|
class User_Roles(peewee.Model):
|
||||||
user_id = peewee.ForeignKeyField(Users, backref='user_role')
|
user_id = peewee.ForeignKeyField(Users, backref="user_role")
|
||||||
role_id = peewee.ForeignKeyField(Roles, backref='user_role')
|
role_id = peewee.ForeignKeyField(Roles, backref="user_role")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_roles'
|
table_name = "user_roles"
|
||||||
primary_key = peewee.CompositeKey('user_id', 'role_id')
|
primary_key = peewee.CompositeKey("user_id", "role_id")
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
class Audit_Log(peewee.Model):
|
class Audit_Log(peewee.Model):
|
||||||
@ -45,10 +46,10 @@ def migrate(migrator, database, **kwargs):
|
|||||||
created = peewee.DateTimeField(default=datetime.datetime.now)
|
created = peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
user_name = peewee.CharField(default="")
|
user_name = peewee.CharField(default="")
|
||||||
user_id = peewee.IntegerField(default=0, index=True)
|
user_id = peewee.IntegerField(default=0, index=True)
|
||||||
source_ip = peewee.CharField(default='127.0.0.1')
|
source_ip = peewee.CharField(default="127.0.0.1")
|
||||||
# When auditing global events, use server ID 0
|
# When auditing global events, use server ID 0
|
||||||
server_id = peewee.IntegerField(default=None, index=True)
|
server_id = peewee.IntegerField(default=None, index=True)
|
||||||
log_msg = peewee.TextField(default='')
|
log_msg = peewee.TextField(default="")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
database = db
|
database = db
|
||||||
@ -93,27 +94,27 @@ def migrate(migrator, database, **kwargs):
|
|||||||
database = db
|
database = db
|
||||||
|
|
||||||
class User_Servers(peewee.Model):
|
class User_Servers(peewee.Model):
|
||||||
user_id = peewee.ForeignKeyField(Users, backref='user_server')
|
user_id = peewee.ForeignKeyField(Users, backref="user_server")
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='user_server')
|
server_id = peewee.ForeignKeyField(Servers, backref="user_server")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_servers'
|
table_name = "user_servers"
|
||||||
primary_key = peewee.CompositeKey('user_id', 'server_id')
|
primary_key = peewee.CompositeKey("user_id", "server_id")
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
class Role_Servers(peewee.Model):
|
class Role_Servers(peewee.Model):
|
||||||
role_id = peewee.ForeignKeyField(Roles, backref='role_server')
|
role_id = peewee.ForeignKeyField(Roles, backref="role_server")
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='role_server')
|
server_id = peewee.ForeignKeyField(Servers, backref="role_server")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'role_servers'
|
table_name = "role_servers"
|
||||||
primary_key = peewee.CompositeKey('role_id', 'server_id')
|
primary_key = peewee.CompositeKey("role_id", "server_id")
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
class Server_Stats(peewee.Model):
|
class Server_Stats(peewee.Model):
|
||||||
stats_id = peewee.AutoField()
|
stats_id = peewee.AutoField()
|
||||||
created = peewee.DateTimeField(default=datetime.datetime.now)
|
created = peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='server', index=True)
|
server_id = peewee.ForeignKeyField(Servers, backref="server", index=True)
|
||||||
started = peewee.CharField(default="")
|
started = peewee.CharField(default="")
|
||||||
running = peewee.BooleanField(default=False)
|
running = peewee.BooleanField(default=False)
|
||||||
cpu = peewee.FloatField(default=0)
|
cpu = peewee.FloatField(default=0)
|
||||||
@ -137,10 +138,10 @@ def migrate(migrator, database, **kwargs):
|
|||||||
class Commands(peewee.Model):
|
class Commands(peewee.Model):
|
||||||
command_id = peewee.AutoField()
|
command_id = peewee.AutoField()
|
||||||
created = peewee.DateTimeField(default=datetime.datetime.now)
|
created = peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='server', index=True)
|
server_id = peewee.ForeignKeyField(Servers, backref="server", index=True)
|
||||||
user = peewee.ForeignKeyField(Users, backref='user', index=True)
|
user = peewee.ForeignKeyField(Users, backref="user", index=True)
|
||||||
source_ip = peewee.CharField(default='127.0.0.1')
|
source_ip = peewee.CharField(default="127.0.0.1")
|
||||||
command = peewee.CharField(default='')
|
command = peewee.CharField(default="")
|
||||||
executed = peewee.BooleanField(default=False)
|
executed = peewee.BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -161,7 +162,7 @@ def migrate(migrator, database, **kwargs):
|
|||||||
|
|
||||||
class Schedules(peewee.Model):
|
class Schedules(peewee.Model):
|
||||||
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
|
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='schedule_server')
|
server_id = peewee.ForeignKeyField(Servers, backref="schedule_server")
|
||||||
enabled = peewee.BooleanField()
|
enabled = peewee.BooleanField()
|
||||||
action = peewee.CharField()
|
action = peewee.CharField()
|
||||||
interval = peewee.IntegerField()
|
interval = peewee.IntegerField()
|
||||||
@ -171,17 +172,17 @@ def migrate(migrator, database, **kwargs):
|
|||||||
comment = peewee.CharField()
|
comment = peewee.CharField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'schedules'
|
table_name = "schedules"
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
class Backups(peewee.Model):
|
class Backups(peewee.Model):
|
||||||
directories = peewee.CharField(null=True)
|
directories = peewee.CharField(null=True)
|
||||||
max_backups = peewee.IntegerField()
|
max_backups = peewee.IntegerField()
|
||||||
server_id = peewee.ForeignKeyField(Servers, backref='backups_server')
|
server_id = peewee.ForeignKeyField(Servers, backref="backups_server")
|
||||||
schedule_id = peewee.ForeignKeyField(Schedules, backref='backups_schedule')
|
schedule_id = peewee.ForeignKeyField(Schedules, backref="backups_schedule")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'backups'
|
table_name = "backups"
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
migrator.create_table(Backups)
|
migrator.create_table(Backups)
|
||||||
@ -200,16 +201,18 @@ def migrate(migrator, database, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_table('users')
|
migrator.drop_table("users")
|
||||||
migrator.drop_table('roles')
|
migrator.drop_table("roles")
|
||||||
migrator.drop_table('user_roles')
|
migrator.drop_table("user_roles")
|
||||||
migrator.drop_table('audit_log') # ? Not 100% sure of the table name, please specify in the schema
|
migrator.drop_table(
|
||||||
migrator.drop_table('host_stats')
|
"audit_log"
|
||||||
migrator.drop_table('servers')
|
) # ? Not 100% sure of the table name, please specify in the schema
|
||||||
migrator.drop_table('user_servers')
|
migrator.drop_table("host_stats")
|
||||||
migrator.drop_table('role_servers')
|
migrator.drop_table("servers")
|
||||||
migrator.drop_table('server_stats')
|
migrator.drop_table("user_servers")
|
||||||
migrator.drop_table('commands')
|
migrator.drop_table("role_servers")
|
||||||
migrator.drop_table('webhooks')
|
migrator.drop_table("server_stats")
|
||||||
migrator.drop_table('schedules')
|
migrator.drop_table("commands")
|
||||||
migrator.drop_table('backups')
|
migrator.drop_table("webhooks")
|
||||||
|
migrator.drop_table("schedules")
|
||||||
|
migrator.drop_table("backups")
|
||||||
|
@ -1,17 +1,26 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('user_servers', permissions=peewee.CharField(default='00000000')) # First argument can be model class OR table name
|
migrator.add_columns(
|
||||||
migrator.add_columns('role_servers', permissions=peewee.CharField(default='00000000')) # First argument can be model class OR table name
|
"user_servers", permissions=peewee.CharField(default="00000000")
|
||||||
|
) # First argument can be model class OR table name
|
||||||
|
migrator.add_columns(
|
||||||
|
"role_servers", permissions=peewee.CharField(default="00000000")
|
||||||
|
) # First argument can be model class OR table name
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('user_servers', ['permissions']) # First argument can be model class OR table name
|
migrator.drop_columns(
|
||||||
migrator.drop_columns('role_servers', ['permissions']) # First argument can be model class OR table name
|
"user_servers", ["permissions"]
|
||||||
|
) # First argument can be model class OR table name
|
||||||
|
migrator.drop_columns(
|
||||||
|
"role_servers", ["permissions"]
|
||||||
|
) # First argument can be model class OR table name
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -2,25 +2,27 @@
|
|||||||
from peewee import *
|
from peewee import *
|
||||||
from app.classes.models.users import Users
|
from app.classes.models.users import Users
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
db = database
|
db = database
|
||||||
|
|
||||||
class User_Crafty(Model):
|
class User_Crafty(Model):
|
||||||
user_id = ForeignKeyField(Users, backref='users_crafty')
|
user_id = ForeignKeyField(Users, backref="users_crafty")
|
||||||
permissions = CharField(default="00000000")
|
permissions = CharField(default="00000000")
|
||||||
limit_server_creation = IntegerField(default=-1)
|
limit_server_creation = IntegerField(default=-1)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_crafty'
|
table_name = "user_crafty"
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
migrator.create_table(User_Crafty)
|
migrator.create_table(User_Crafty)
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_table('user_crafty') # Can be model class OR table name
|
migrator.drop_table("user_crafty") # Can be model class OR table name
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
|
@ -5,24 +5,25 @@ from app.classes.models.servers import Servers
|
|||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.drop_table('user_servers') # Can be model class OR table name
|
migrator.drop_table("user_servers") # Can be model class OR table name
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
db = database
|
db = database
|
||||||
|
|
||||||
class User_Servers(Model):
|
class User_Servers(Model):
|
||||||
user_id = ForeignKeyField(Users, backref='user_server')
|
user_id = ForeignKeyField(Users, backref="user_server")
|
||||||
server_id = ForeignKeyField(Servers, backref='user_server')
|
server_id = ForeignKeyField(Servers, backref="user_server")
|
||||||
permissions = CharField(default="00000000")
|
permissions = CharField(default="00000000")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'user_servers'
|
table_name = "user_servers"
|
||||||
primary_key = CompositeKey('user_id', 'server_id')
|
primary_key = CompositeKey("user_id", "server_id")
|
||||||
database = db
|
database = db
|
||||||
|
|
||||||
migrator.create_table(User_Servers)
|
migrator.create_table(User_Servers)
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
|
@ -1,24 +1,28 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('user_crafty', limit_user_creation=peewee.IntegerField(default=0))
|
migrator.add_columns(
|
||||||
migrator.add_columns('user_crafty', limit_role_creation=peewee.IntegerField(default=0))
|
"user_crafty", limit_user_creation=peewee.IntegerField(default=0)
|
||||||
migrator.add_columns('user_crafty', created_server=peewee.IntegerField(default=0))
|
)
|
||||||
migrator.add_columns('user_crafty', created_user=peewee.IntegerField(default=0))
|
migrator.add_columns(
|
||||||
migrator.add_columns('user_crafty', created_role=peewee.IntegerField(default=0))
|
"user_crafty", limit_role_creation=peewee.IntegerField(default=0)
|
||||||
|
)
|
||||||
|
migrator.add_columns("user_crafty", created_server=peewee.IntegerField(default=0))
|
||||||
|
migrator.add_columns("user_crafty", created_user=peewee.IntegerField(default=0))
|
||||||
|
migrator.add_columns("user_crafty", created_role=peewee.IntegerField(default=0))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('user_crafty', ['limit_user_creation'])
|
migrator.drop_columns("user_crafty", ["limit_user_creation"])
|
||||||
migrator.drop_columns('user_crafty', ['limit_role_creation'])
|
migrator.drop_columns("user_crafty", ["limit_role_creation"])
|
||||||
migrator.drop_columns('user_crafty', ['created_server'])
|
migrator.drop_columns("user_crafty", ["created_server"])
|
||||||
migrator.drop_columns('user_crafty', ['created_user'])
|
migrator.drop_columns("user_crafty", ["created_user"])
|
||||||
migrator.drop_columns('user_crafty', ['created_role'])
|
migrator.drop_columns("user_crafty", ["created_role"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -2,16 +2,19 @@
|
|||||||
import peewee
|
import peewee
|
||||||
from app.classes.models.management import Schedules
|
from app.classes.models.management import Schedules
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('backups', ['schedule_id'])
|
migrator.drop_columns("backups", ["schedule_id"])
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.add_columns('backups', schedule_id=peewee.ForeignKeyField(Schedules, backref='backups_schedule'))
|
migrator.add_columns(
|
||||||
|
"backups",
|
||||||
|
schedule_id=peewee.ForeignKeyField(Schedules, backref="backups_schedule"),
|
||||||
|
)
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('server_stats', crashed=peewee.BooleanField(default=False))
|
migrator.add_columns("server_stats", crashed=peewee.BooleanField(default=False))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('server_stats', ['crashed'])
|
migrator.drop_columns("server_stats", ["crashed"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('schedules', cron_string=peewee.CharField(default=""))
|
migrator.add_columns("schedules", cron_string=peewee.CharField(default=""))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('schedules', ['cron_string'])
|
migrator.drop_columns("schedules", ["cron_string"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('server_stats', first_run=peewee.BooleanField(default=True))
|
migrator.add_columns("server_stats", first_run=peewee.BooleanField(default=True))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('server_stats', ['first_run'])
|
migrator.drop_columns("server_stats", ["first_run"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('schedules', one_time=peewee.BooleanField(default=False))
|
migrator.add_columns("schedules", one_time=peewee.BooleanField(default=False))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('schedules', ['one_time'])
|
migrator.drop_columns("schedules", ["one_time"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('servers', type=peewee.CharField(default="minecraft-java"))
|
migrator.add_columns("servers", type=peewee.CharField(default="minecraft-java"))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('servers', ['type'])
|
migrator.drop_columns("servers", ["type"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', email=peewee.CharField(default="default@example.com"))
|
migrator.add_columns("users", email=peewee.CharField(default="default@example.com"))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['email'])
|
migrator.drop_columns("users", ["email"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', support_logs=peewee.CharField(default=""))
|
migrator.add_columns("users", support_logs=peewee.CharField(default=""))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['support_logs'])
|
migrator.drop_columns("users", ["support_logs"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('server_stats', waiting_start=peewee.BooleanField(default=False))
|
migrator.add_columns(
|
||||||
|
"server_stats", waiting_start=peewee.BooleanField(default=False)
|
||||||
|
)
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('server_stats', ['waiting_start'])
|
migrator.drop_columns("server_stats", ["waiting_start"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', lang=peewee.CharField(default='en_EN'))
|
migrator.add_columns("users", lang=peewee.CharField(default="en_EN"))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['lang'])
|
migrator.drop_columns("users", ["lang"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -3,10 +3,14 @@ import datetime
|
|||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', valid_tokens_from=peewee.DateTimeField(default=datetime.datetime.now))
|
migrator.add_columns(
|
||||||
migrator.drop_columns('users', ['api_token'])
|
"users", valid_tokens_from=peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
|
)
|
||||||
|
migrator.drop_columns("users", ["api_token"])
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['valid_tokens_from'])
|
migrator.drop_columns("users", ["valid_tokens_from"])
|
||||||
migrator.add_columns('users', api_token=peewee.CharField(default="", unique=True, index=True))
|
migrator.add_columns(
|
||||||
|
"users", api_token=peewee.CharField(default="", unique=True, index=True)
|
||||||
|
)
|
||||||
|
@ -6,18 +6,18 @@ from app.classes.models.users import Users
|
|||||||
def migrate(migrator, db):
|
def migrate(migrator, db):
|
||||||
class ApiKeys(peewee.Model):
|
class ApiKeys(peewee.Model):
|
||||||
token_id = peewee.AutoField()
|
token_id = peewee.AutoField()
|
||||||
name = peewee.CharField(default='', unique=True, index=True)
|
name = peewee.CharField(default="", unique=True, index=True)
|
||||||
created = peewee.DateTimeField(default=datetime.datetime.now)
|
created = peewee.DateTimeField(default=datetime.datetime.now)
|
||||||
user = peewee.ForeignKeyField(Users, backref='api_token', index=True)
|
user = peewee.ForeignKeyField(Users, backref="api_token", index=True)
|
||||||
server_permissions = peewee.CharField(default='00000000')
|
server_permissions = peewee.CharField(default="00000000")
|
||||||
crafty_permissions = peewee.CharField(default='000')
|
crafty_permissions = peewee.CharField(default="000")
|
||||||
superuser = peewee.BooleanField(default=False)
|
superuser = peewee.BooleanField(default=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table_name = 'api_keys'
|
table_name = "api_keys"
|
||||||
|
|
||||||
migrator.create_table(ApiKeys)
|
migrator.create_table(ApiKeys)
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, db):
|
def rollback(migrator, db):
|
||||||
migrator.drop_table('api_keys')
|
migrator.drop_table("api_keys")
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('schedules', parent=peewee.IntegerField(null=True))
|
migrator.add_columns("schedules", parent=peewee.IntegerField(null=True))
|
||||||
migrator.add_columns('schedules', delay=peewee.IntegerField(default=0))
|
migrator.add_columns("schedules", delay=peewee.IntegerField(default=0))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('schedules', ['parent'])
|
migrator.drop_columns("schedules", ["parent"])
|
||||||
migrator.drop_columns('schedules', ['delay'])
|
migrator.drop_columns("schedules", ["delay"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', server_order=peewee.CharField(default=''))
|
migrator.add_columns("users", server_order=peewee.CharField(default=""))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['server_order'])
|
migrator.drop_columns("users", ["server_order"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, db):
|
def migrate(migrator, db):
|
||||||
migrator.rename_column('backups', 'directories', 'excluded_dirs')
|
migrator.rename_column("backups", "directories", "excluded_dirs")
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, db):
|
def rollback(migrator, db):
|
||||||
migrator.rename_column('backups', 'excluded_dirs', 'directories')
|
migrator.rename_column("backups", "excluded_dirs", "directories")
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('backups', compress=peewee.BooleanField(default=False))
|
migrator.add_columns("backups", compress=peewee.BooleanField(default=False))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('backups', ['compress'])
|
migrator.drop_columns("backups", ["compress"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('server_stats', downloading=peewee.BooleanField(default=False))
|
migrator.add_columns("server_stats", downloading=peewee.BooleanField(default=False))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('server_stats', ['downloading'])
|
migrator.drop_columns("server_stats", ["downloading"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Generated by database migrator
|
# Generated by database migrator
|
||||||
import peewee
|
import peewee
|
||||||
|
|
||||||
|
|
||||||
def migrate(migrator, database, **kwargs):
|
def migrate(migrator, database, **kwargs):
|
||||||
migrator.add_columns('users', preparing=peewee.BooleanField(default=False))
|
migrator.add_columns("users", preparing=peewee.BooleanField(default=False))
|
||||||
"""
|
"""
|
||||||
Write your migrations here.
|
Write your migrations here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, **kwargs):
|
def rollback(migrator, database, **kwargs):
|
||||||
migrator.drop_columns('users', ['preparing'])
|
migrator.drop_columns("users", ["preparing"])
|
||||||
"""
|
"""
|
||||||
Write your rollback migrations here.
|
Write your rollback migrations here.
|
||||||
"""
|
"""
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
version: '3'
|
version: '3'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
version: '3'
|
version: '3'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
68
main.py
68
main.py
@ -7,8 +7,12 @@ import logging.config
|
|||||||
import signal
|
import signal
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
if helper.checkRoot():
|
if helper.checkRoot():
|
||||||
console.critical("Root detected. Root/Admin access denied. Run Crafty again with non-elevated permissions.")
|
console.critical(
|
||||||
|
"Root detected. Root/Admin access denied. "
|
||||||
|
"Run Crafty again with non-elevated permissions."
|
||||||
|
)
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
console.critical("Crafty shutting down. Root/Admin access denied.")
|
console.critical("Crafty shutting down. Root/Admin access denied.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
@ -39,18 +43,14 @@ def do_intro():
|
|||||||
|
|
||||||
|
|
||||||
def setup_logging(debug=True):
|
def setup_logging(debug=True):
|
||||||
logging_config_file = os.path.join(os.path.curdir,
|
logging_config_file = os.path.join(os.path.curdir, "app", "config", "logging.json")
|
||||||
'app',
|
|
||||||
'config',
|
|
||||||
'logging.json'
|
|
||||||
)
|
|
||||||
|
|
||||||
if os.path.exists(logging_config_file):
|
if os.path.exists(logging_config_file):
|
||||||
# open our logging config file
|
# open our logging config file
|
||||||
with open(logging_config_file, 'rt', encoding='utf-8') as f:
|
with open(logging_config_file, "rt", encoding="utf-8") as f:
|
||||||
logging_config = json.load(f)
|
logging_config = json.load(f)
|
||||||
if debug:
|
if debug:
|
||||||
logging_config['loggers']['']['level'] = 'DEBUG'
|
logging_config["loggers"][""]["level"] = "DEBUG"
|
||||||
|
|
||||||
logging.config.dictConfig(logging_config)
|
logging.config.dictConfig(logging_config)
|
||||||
|
|
||||||
@ -61,22 +61,22 @@ def setup_logging(debug=True):
|
|||||||
|
|
||||||
|
|
||||||
# Our Main Starter
|
# Our Main Starter
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser("Crafty Controller - A Server Management System")
|
parser = argparse.ArgumentParser("Crafty Controller - A Server Management System")
|
||||||
|
|
||||||
parser.add_argument('-i', '--ignore',
|
parser.add_argument(
|
||||||
action='store_true',
|
"-i", "--ignore", action="store_true", help="Ignore session.lock files"
|
||||||
help="Ignore session.lock files"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument('-v', '--verbose',
|
parser.add_argument(
|
||||||
action='store_true',
|
"-v", "--verbose", action="store_true", help="Sets logging level to debug."
|
||||||
help="Sets logging level to debug."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument('-d', '--daemon',
|
parser.add_argument(
|
||||||
action='store_true',
|
"-d",
|
||||||
help="Runs Crafty in daemon mode (no prompt)"
|
"--daemon",
|
||||||
|
action="store_true",
|
||||||
|
help="Runs Crafty in daemon mode (no prompt)",
|
||||||
)
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
@ -95,7 +95,6 @@ if __name__ == '__main__':
|
|||||||
# our session file, helps prevent multiple controller agents on the same machine.
|
# our session file, helps prevent multiple controller agents on the same machine.
|
||||||
helper.create_session_file(ignore=args.ignore)
|
helper.create_session_file(ignore=args.ignore)
|
||||||
|
|
||||||
|
|
||||||
migration_manager = MigrationManager(database)
|
migration_manager = MigrationManager(database)
|
||||||
migration_manager.up() # Automatically runs migrations
|
migration_manager.up() # Automatically runs migrations
|
||||||
|
|
||||||
@ -104,8 +103,12 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
if fresh_install:
|
if fresh_install:
|
||||||
console.debug("Fresh install detected")
|
console.debug("Fresh install detected")
|
||||||
console.warning("We have detected a fresh install. Please be sure to forward Crafty's port, " +
|
console.warning(
|
||||||
f"{helper.get_setting('https_port')}, through your router/firewall if you would like to be able to access Crafty remotely.")
|
f"We have detected a fresh install. Please be sure to forward "
|
||||||
|
f"Crafty's port, {helper.get_setting('https_port')}, "
|
||||||
|
f"through your router/firewall if you would like to be able "
|
||||||
|
f"to access Crafty remotely."
|
||||||
|
)
|
||||||
installer.default_settings()
|
installer.default_settings()
|
||||||
else:
|
else:
|
||||||
console.debug("Existing install detected")
|
console.debug("Existing install detected")
|
||||||
@ -116,7 +119,7 @@ if __name__ == '__main__':
|
|||||||
tasks_manager.start_webserver()
|
tasks_manager.start_webserver()
|
||||||
|
|
||||||
# slowing down reporting just for a 1/2 second so messages look cleaner
|
# slowing down reporting just for a 1/2 second so messages look cleaner
|
||||||
time.sleep(.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
# init servers
|
# init servers
|
||||||
logger.info("Initializing all servers defined")
|
logger.info("Initializing all servers defined")
|
||||||
@ -127,18 +130,23 @@ if __name__ == '__main__':
|
|||||||
# start stats logging
|
# start stats logging
|
||||||
tasks_manager.start_stats_recording()
|
tasks_manager.start_stats_recording()
|
||||||
|
|
||||||
# once the controller is up and stats are logging, we can kick off the scheduler officially
|
# once the controller is up and stats are logging, we can kick off
|
||||||
|
# the scheduler officially
|
||||||
tasks_manager.start_scheduler()
|
tasks_manager.start_scheduler()
|
||||||
|
|
||||||
# refresh our cache and schedule for every 12 hoursour cache refresh for serverjars.com
|
# refresh our cache and schedule for every 12 hoursour cache refresh
|
||||||
|
# for serverjars.com
|
||||||
tasks_manager.serverjar_cache_refresher()
|
tasks_manager.serverjar_cache_refresher()
|
||||||
|
|
||||||
logger.info("Checking Internet. This may take a minute.")
|
logger.info("Checking Internet. This may take a minute.")
|
||||||
console.info("Checking Internet. This may take a minute.")
|
console.info("Checking Internet. This may take a minute.")
|
||||||
|
|
||||||
if not helper.check_internet():
|
if not helper.check_internet():
|
||||||
console.warning("We have detected the machine running Crafty has no connection to the internet. " +
|
console.warning(
|
||||||
"Client connections to the server may be limited.")
|
"We have detected the machine running Crafty has no "
|
||||||
|
"connection to the internet. Client connections to "
|
||||||
|
"the server may be limited."
|
||||||
|
)
|
||||||
|
|
||||||
if not controller.check_system_user():
|
if not controller.check_system_user():
|
||||||
controller.add_system_user()
|
controller.add_system_user()
|
||||||
@ -152,8 +160,12 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
def sigterm_handler(*sig):
|
def sigterm_handler(*sig):
|
||||||
print() # for newline
|
print() # for newline
|
||||||
logger.info(f"Recieved {signal.Signals(sig[0]).name} [{sig[0]}], stopping Crafty...")
|
logger.info(
|
||||||
console.info(f"Recieved {signal.Signals(sig[0]).name} [{sig[0]}], stopping Crafty...")
|
f"Recieved {signal.Signals(sig[0]).name} [{sig[0]}], stopping Crafty..."
|
||||||
|
)
|
||||||
|
console.info(
|
||||||
|
f"Recieved {signal.Signals(sig[0]).name} [{sig[0]}], stopping Crafty..."
|
||||||
|
)
|
||||||
tasks_manager._main_graceful_exit()
|
tasks_manager._main_graceful_exit()
|
||||||
Crafty.universal_exit()
|
Crafty.universal_exit()
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user