mirror of
https://gitlab.com/crafty-controller/crafty-4.git
synced 2024-08-30 18:23:09 +00:00
Create pylintrc, code review pipeline & correct codebase errors
Fix uploads, Only send server stats to user page when they have access to servers
This commit is contained in:
parent
882126d1f3
commit
e0ce1d118c
@ -1,4 +1,5 @@
|
|||||||
stages:
|
stages:
|
||||||
|
- test
|
||||||
- prod-deployment
|
- prod-deployment
|
||||||
- dev-deployment
|
- dev-deployment
|
||||||
|
|
||||||
@ -6,6 +7,36 @@ variables:
|
|||||||
DOCKER_HOST: tcp://docker:2376
|
DOCKER_HOST: tcp://docker:2376
|
||||||
DOCKER_TLS_CERTDIR: "/certs"
|
DOCKER_TLS_CERTDIR: "/certs"
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
stage: test
|
||||||
|
image: python:3.7-slim
|
||||||
|
services:
|
||||||
|
- name: docker:dind
|
||||||
|
tags:
|
||||||
|
- 'docker_testers'
|
||||||
|
rules:
|
||||||
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||||
|
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
|
||||||
|
when: never
|
||||||
|
before_script:
|
||||||
|
- mkdir -p public/badges public/lint
|
||||||
|
- echo undefined > public/badges/$CI_JOB_NAME.score
|
||||||
|
- pip install pylint-gitlab
|
||||||
|
script:
|
||||||
|
- pylint --exit-zero --output-format=text $(find -type f -name "*.py" ! -path "**/.venv/**" ! -path "**/app/migrations/**") | tee /tmp/pylint.txt
|
||||||
|
- sed -n 's/^Your code has been rated at \([-0-9.]*\)\/.*/\1/p' /tmp/pylint.txt > public/badges/$CI_JOB_NAME.score
|
||||||
|
- pylint --exit-zero --output-format=pylint_gitlab.GitlabCodeClimateReporter $(find -type f -name "*.py" ! -path "**/.venv/**" ! -path "**/app/migrations/**") > codeclimate.json
|
||||||
|
after_script:
|
||||||
|
- anybadge --overwrite --label $CI_JOB_NAME --value=$(cat public/badges/$CI_JOB_NAME.score) --file=public/badges/$CI_JOB_NAME.svg 4=red 6=orange 8=yellow 10=green
|
||||||
|
- |
|
||||||
|
echo "Your score is: $(cat public/badges/$CI_JOB_NAME.score)"
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- public
|
||||||
|
reports:
|
||||||
|
codequality: codeclimate.json
|
||||||
|
when: always
|
||||||
|
|
||||||
docker-build-dev:
|
docker-build-dev:
|
||||||
image: docker:latest
|
image: docker:latest
|
||||||
services:
|
services:
|
||||||
|
603
.pylintrc
Normal file
603
.pylintrc
Normal file
@ -0,0 +1,603 @@
|
|||||||
|
[MASTER]
|
||||||
|
|
||||||
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
# run arbitrary code.
|
||||||
|
extension-pkg-allow-list=
|
||||||
|
|
||||||
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||||
|
# for backward compatibility.)
|
||||||
|
extension-pkg-whitelist=
|
||||||
|
|
||||||
|
# Return non-zero exit code if any of these messages/categories are detected,
|
||||||
|
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||||
|
# specified are enabled, while categories only check already-enabled messages.
|
||||||
|
fail-on=
|
||||||
|
|
||||||
|
# Specify a score threshold to be exceeded before program exits with error.
|
||||||
|
fail-under=10.0
|
||||||
|
|
||||||
|
# Files or directories to be skipped. They should be base names, not paths.
|
||||||
|
ignore=
|
||||||
|
|
||||||
|
# Add files or directories matching the regex patterns to the ignore-list. The
|
||||||
|
# regex matches against paths and can be in Posix or Windows format.
|
||||||
|
ignore-paths=app/migrations
|
||||||
|
|
||||||
|
# Files or directories matching the regex patterns are skipped. The regex
|
||||||
|
# matches against base names, not paths.
|
||||||
|
ignore-patterns=
|
||||||
|
|
||||||
|
# Python code to execute, usually for sys.path manipulation such as
|
||||||
|
# pygtk.require().
|
||||||
|
#init-hook=
|
||||||
|
|
||||||
|
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||||
|
# number of processors available to use.
|
||||||
|
jobs=0
|
||||||
|
|
||||||
|
# Control the amount of potential inferred values when inferring a single
|
||||||
|
# object. This can help the performance when dealing with large functions or
|
||||||
|
# complex, nested conditions.
|
||||||
|
limit-inference-results=100
|
||||||
|
|
||||||
|
# List of plugins (as comma separated values of python module names) to load,
|
||||||
|
# usually to register additional checkers.
|
||||||
|
load-plugins=
|
||||||
|
|
||||||
|
# Pickle collected data for later comparisons.
|
||||||
|
persistent=yes
|
||||||
|
|
||||||
|
# Minimum Python version to use for version dependent checks. Will default to
|
||||||
|
# the version used to run pylint.
|
||||||
|
py-version=3.9
|
||||||
|
|
||||||
|
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||||
|
# user-friendly hints instead of false-positive error messages.
|
||||||
|
suggestion-mode=yes
|
||||||
|
|
||||||
|
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||||
|
# active Python interpreter and may run arbitrary code.
|
||||||
|
unsafe-load-any-extension=no
|
||||||
|
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||||
|
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||||
|
confidence=
|
||||||
|
|
||||||
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
|
# can either give multiple identifiers separated by comma (,) or put this
|
||||||
|
# option multiple times (only on the command line, not in the configuration
|
||||||
|
# file where it should appear only once). You can also use "--disable=all" to
|
||||||
|
# disable everything first and then reenable specific checks. For example, if
|
||||||
|
# you want to run only the similarities checker, you can use "--disable=all
|
||||||
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
|
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||||
|
# --disable=W".
|
||||||
|
disable=abstract-method,
|
||||||
|
attribute-defined-outside-init,
|
||||||
|
bad-inline-option,
|
||||||
|
bare-except,
|
||||||
|
broad-except,
|
||||||
|
cell-var-from-loop,
|
||||||
|
consider-iterating-dictionary,
|
||||||
|
consider-using-with,
|
||||||
|
deprecated-pragma,
|
||||||
|
duplicate-code,
|
||||||
|
file-ignored,
|
||||||
|
fixme,
|
||||||
|
import-error,
|
||||||
|
inconsistent-return-statements,
|
||||||
|
invalid-name,
|
||||||
|
locally-disabled,
|
||||||
|
logging-format-interpolation,
|
||||||
|
logging-fstring-interpolation,
|
||||||
|
logging-not-lazy,
|
||||||
|
missing-docstring,
|
||||||
|
no-else-break,
|
||||||
|
no-else-continue,
|
||||||
|
no-else-return,
|
||||||
|
no-self-use,
|
||||||
|
no-value-for-parameter,
|
||||||
|
not-an-iterable,
|
||||||
|
protected-access,
|
||||||
|
simplifiable-condition,
|
||||||
|
simplifiable-if-statement,
|
||||||
|
suppressed-message,
|
||||||
|
too-few-public-methods,
|
||||||
|
too-many-arguments,
|
||||||
|
too-many-branches,
|
||||||
|
too-many-instance-attributes,
|
||||||
|
too-many-locals,
|
||||||
|
too-many-nested-blocks,
|
||||||
|
too-many-public-methods,
|
||||||
|
too-many-return-statements,
|
||||||
|
too-many-statements,
|
||||||
|
use-symbolic-message-instead,
|
||||||
|
useless-suppression,
|
||||||
|
raw-checker-failed
|
||||||
|
|
||||||
|
|
||||||
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time (only on the command line, not in the configuration file where
|
||||||
|
# it should appear only once). See also the "--disable" option for examples.
|
||||||
|
enable=c-extension-no-member
|
||||||
|
|
||||||
|
|
||||||
|
[REPORTS]
|
||||||
|
|
||||||
|
# Python expression which should return a score less than or equal to 10. You
|
||||||
|
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
|
||||||
|
# which contain the number of messages in each category, as well as 'statement'
|
||||||
|
# which is the total number of statements analyzed. This score is used by the
|
||||||
|
# global evaluation report (RP0004).
|
||||||
|
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||||
|
|
||||||
|
# Template used to display messages. This is a python new-style format string
|
||||||
|
# used to format the message information. See doc for all details.
|
||||||
|
#msg-template=
|
||||||
|
|
||||||
|
# Set the output format. Available formats are text, parseable, colorized, json
|
||||||
|
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||||
|
# mypackage.mymodule.MyReporterClass.
|
||||||
|
output-format=text
|
||||||
|
|
||||||
|
# Tells whether to display a full report or only the messages.
|
||||||
|
reports=no
|
||||||
|
|
||||||
|
# Activate the evaluation score.
|
||||||
|
score=yes
|
||||||
|
|
||||||
|
|
||||||
|
[REFACTORING]
|
||||||
|
|
||||||
|
# Maximum number of nested blocks for function / method body
|
||||||
|
max-nested-blocks=5
|
||||||
|
|
||||||
|
# Complete name of functions that never returns. When checking for
|
||||||
|
# inconsistent-return-statements if a never returning function is called then
|
||||||
|
# it will be considered as an explicit return statement and no message will be
|
||||||
|
# printed.
|
||||||
|
never-returning-functions=sys.exit,argparse.parse_error
|
||||||
|
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
# Naming style matching correct argument names.
|
||||||
|
argument-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct argument names. Overrides argument-
|
||||||
|
# naming-style.
|
||||||
|
#argument-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct attribute names.
|
||||||
|
attr-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||||
|
# style.
|
||||||
|
#attr-rgx=
|
||||||
|
|
||||||
|
# Bad variable names which should always be refused, separated by a comma.
|
||||||
|
bad-names=foo,
|
||||||
|
bar,
|
||||||
|
baz,
|
||||||
|
toto,
|
||||||
|
tutu,
|
||||||
|
tata
|
||||||
|
|
||||||
|
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||||
|
# they will always be refused
|
||||||
|
bad-names-rgxs=
|
||||||
|
|
||||||
|
# Naming style matching correct class attribute names.
|
||||||
|
class-attribute-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct class attribute names. Overrides class-
|
||||||
|
# attribute-naming-style.
|
||||||
|
#class-attribute-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct class constant names.
|
||||||
|
class-const-naming-style=UPPER_CASE
|
||||||
|
|
||||||
|
# Regular expression matching correct class constant names. Overrides class-
|
||||||
|
# const-naming-style.
|
||||||
|
#class-const-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct class names.
|
||||||
|
class-naming-style=PascalCase
|
||||||
|
|
||||||
|
# Regular expression matching correct class names. Overrides class-naming-
|
||||||
|
# style.
|
||||||
|
#class-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct constant names.
|
||||||
|
const-naming-style=UPPER_CASE
|
||||||
|
|
||||||
|
# Regular expression matching correct constant names. Overrides const-naming-
|
||||||
|
# style.
|
||||||
|
#const-rgx=
|
||||||
|
|
||||||
|
# Minimum line length for functions/classes that require docstrings, shorter
|
||||||
|
# ones are exempt.
|
||||||
|
docstring-min-length=-1
|
||||||
|
|
||||||
|
# Naming style matching correct function names.
|
||||||
|
function-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct function names. Overrides function-
|
||||||
|
# naming-style.
|
||||||
|
#function-rgx=
|
||||||
|
|
||||||
|
# Good variable names which should always be accepted, separated by a comma.
|
||||||
|
good-names=i,
|
||||||
|
j,
|
||||||
|
k,
|
||||||
|
ex,
|
||||||
|
Run,
|
||||||
|
_
|
||||||
|
|
||||||
|
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||||
|
# they will always be accepted
|
||||||
|
good-names-rgxs=
|
||||||
|
|
||||||
|
# Include a hint for the correct naming format with invalid-name.
|
||||||
|
include-naming-hint=no
|
||||||
|
|
||||||
|
# Naming style matching correct inline iteration names.
|
||||||
|
inlinevar-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct inline iteration names. Overrides
|
||||||
|
# inlinevar-naming-style.
|
||||||
|
#inlinevar-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct method names.
|
||||||
|
method-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct method names. Overrides method-naming-
|
||||||
|
# style.
|
||||||
|
#method-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct module names.
|
||||||
|
module-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct module names. Overrides module-naming-
|
||||||
|
# style.
|
||||||
|
#module-rgx=
|
||||||
|
|
||||||
|
# Colon-delimited sets of names that determine each other's naming style when
|
||||||
|
# the name regexes allow several styles.
|
||||||
|
name-group=
|
||||||
|
|
||||||
|
# Regular expression which should only match function or class names that do
|
||||||
|
# not require a docstring.
|
||||||
|
no-docstring-rgx=^_
|
||||||
|
|
||||||
|
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||||
|
# to this list to register other decorators that produce valid properties.
|
||||||
|
# These decorators are taken in consideration only for invalid-name.
|
||||||
|
property-classes=abc.abstractproperty
|
||||||
|
|
||||||
|
# Naming style matching correct variable names.
|
||||||
|
variable-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct variable names. Overrides variable-
|
||||||
|
# naming-style.
|
||||||
|
#variable-rgx=
|
||||||
|
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
|
||||||
|
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||||
|
expected-line-ending-format=
|
||||||
|
|
||||||
|
# Regexp for a line that is allowed to be longer than the limit.
|
||||||
|
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||||
|
|
||||||
|
# Number of spaces of indent required inside a hanging or continued line.
|
||||||
|
indent-after-paren=4
|
||||||
|
|
||||||
|
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||||
|
# tab).
|
||||||
|
indent-string=' '
|
||||||
|
|
||||||
|
# Maximum number of characters on a single line.
|
||||||
|
max-line-length=150
|
||||||
|
|
||||||
|
# Maximum number of lines in a module.
|
||||||
|
max-module-lines=2000
|
||||||
|
|
||||||
|
# Allow the body of a class to be on the same line as the declaration if body
|
||||||
|
# contains single statement.
|
||||||
|
single-line-class-stmt=no
|
||||||
|
|
||||||
|
# Allow the body of an if to be on the same line as the test if there is no
|
||||||
|
# else.
|
||||||
|
single-line-if-stmt=no
|
||||||
|
|
||||||
|
|
||||||
|
[LOGGING]
|
||||||
|
|
||||||
|
# The type of string formatting that logging methods do. `old` means using %
|
||||||
|
# formatting, `new` is for `{}` formatting.
|
||||||
|
logging-format-style=old
|
||||||
|
|
||||||
|
# Logging modules to check that the string format arguments are in logging
|
||||||
|
# function parameter format.
|
||||||
|
logging-modules=logging
|
||||||
|
|
||||||
|
|
||||||
|
[MISCELLANEOUS]
|
||||||
|
|
||||||
|
# List of note tags to take in consideration, separated by a comma.
|
||||||
|
notes=FIXME,
|
||||||
|
XXX,
|
||||||
|
TODO
|
||||||
|
|
||||||
|
# Regular expression of note tags to take in consideration.
|
||||||
|
#notes-rgx=
|
||||||
|
|
||||||
|
|
||||||
|
[SIMILARITIES]
|
||||||
|
|
||||||
|
# Comments are removed from the similarity computation
|
||||||
|
ignore-comments=yes
|
||||||
|
|
||||||
|
# Docstrings are removed from the similarity computation
|
||||||
|
ignore-docstrings=yes
|
||||||
|
|
||||||
|
# Imports are removed from the similarity computation
|
||||||
|
ignore-imports=no
|
||||||
|
|
||||||
|
# Signatures are removed from the similarity computation
|
||||||
|
ignore-signatures=no
|
||||||
|
|
||||||
|
# Minimum lines number of a similarity.
|
||||||
|
min-similarity-lines=4
|
||||||
|
|
||||||
|
|
||||||
|
[SPELLING]
|
||||||
|
|
||||||
|
# Limits count of emitted suggestions for spelling mistakes.
|
||||||
|
max-spelling-suggestions=4
|
||||||
|
|
||||||
|
# Spelling dictionary name. Available dictionaries: none. To make it work,
|
||||||
|
# install the 'python-enchant' package.
|
||||||
|
spelling-dict=
|
||||||
|
|
||||||
|
# List of comma separated words that should be considered directives if they
|
||||||
|
# appear and the beginning of a comment and should not be checked.
|
||||||
|
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||||
|
|
||||||
|
# List of comma separated words that should not be checked.
|
||||||
|
spelling-ignore-words=
|
||||||
|
|
||||||
|
# A path to a file that contains the private dictionary; one word per line.
|
||||||
|
spelling-private-dict-file=
|
||||||
|
|
||||||
|
# Tells whether to store unknown words to the private dictionary (see the
|
||||||
|
# --spelling-private-dict-file option) instead of raising a message.
|
||||||
|
spelling-store-unknown-words=no
|
||||||
|
|
||||||
|
|
||||||
|
[STRING]
|
||||||
|
|
||||||
|
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||||
|
# character used as a quote delimiter is used inconsistently within a module.
|
||||||
|
check-quote-consistency=no
|
||||||
|
|
||||||
|
# This flag controls whether the implicit-str-concat should generate a warning
|
||||||
|
# on implicit string concatenation in sequences defined over several lines.
|
||||||
|
check-str-concat-over-line-jumps=no
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
# List of decorators that produce context managers, such as
|
||||||
|
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||||
|
# produce valid context managers.
|
||||||
|
contextmanager-decorators=contextlib.contextmanager
|
||||||
|
|
||||||
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
|
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||||
|
# expressions are accepted.
|
||||||
|
generated-members=os.*
|
||||||
|
|
||||||
|
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||||
|
# class is considered mixin if its name matches the mixin-class-rgx option.
|
||||||
|
ignore-mixin-members=yes
|
||||||
|
|
||||||
|
# Tells whether to warn about missing members when the owner of the attribute
|
||||||
|
# is inferred to be None.
|
||||||
|
ignore-none=yes
|
||||||
|
|
||||||
|
# This flag controls whether pylint should warn about no-member and similar
|
||||||
|
# checks whenever an opaque object is returned when inferring. The inference
|
||||||
|
# can return multiple potential results while evaluating a Python object, but
|
||||||
|
# some branches might not be evaluated, which results in partial inference. In
|
||||||
|
# that case, it might be useful to still emit no-member and other checks for
|
||||||
|
# the rest of the inferred objects.
|
||||||
|
ignore-on-opaque-inference=yes
|
||||||
|
|
||||||
|
# List of class names for which member attributes should not be checked (useful
|
||||||
|
# for classes with dynamically set attributes). This supports the use of
|
||||||
|
# qualified names.
|
||||||
|
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||||
|
|
||||||
|
# List of module names for which member attributes should not be checked
|
||||||
|
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||||
|
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||||
|
# supports qualified module names, as well as Unix pattern matching.
|
||||||
|
ignored-modules=
|
||||||
|
|
||||||
|
# Show a hint with possible names when a member name was not found. The aspect
|
||||||
|
# of finding the hint is based on edit distance.
|
||||||
|
missing-member-hint=yes
|
||||||
|
|
||||||
|
# The minimum edit distance a name should have in order to be considered a
|
||||||
|
# similar match for a missing member name.
|
||||||
|
missing-member-hint-distance=1
|
||||||
|
|
||||||
|
# The total number of similar names that should be taken in consideration when
|
||||||
|
# showing a hint for a missing member.
|
||||||
|
missing-member-max-choices=1
|
||||||
|
|
||||||
|
# Regex pattern to define which classes are considered mixins ignore-mixin-
|
||||||
|
# members is set to 'yes'
|
||||||
|
mixin-class-rgx=.*[Mm]ixin
|
||||||
|
|
||||||
|
# List of decorators that change the signature of a decorated function.
|
||||||
|
signature-mutators=
|
||||||
|
|
||||||
|
|
||||||
|
[VARIABLES]
|
||||||
|
|
||||||
|
# List of additional names supposed to be defined in builtins. Remember that
|
||||||
|
# you should avoid defining new builtins when possible.
|
||||||
|
additional-builtins=
|
||||||
|
|
||||||
|
# Tells whether unused global variables should be treated as a violation.
|
||||||
|
allow-global-unused-variables=yes
|
||||||
|
|
||||||
|
# List of names allowed to shadow builtins
|
||||||
|
allowed-redefined-builtins=
|
||||||
|
|
||||||
|
# List of strings which can identify a callback function by name. A callback
|
||||||
|
# name must start or end with one of those strings.
|
||||||
|
callbacks=cb_,
|
||||||
|
_cb
|
||||||
|
|
||||||
|
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||||
|
# not be used).
|
||||||
|
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Argument names that match this expression will be ignored. Default to name
|
||||||
|
# with leading underscore.
|
||||||
|
ignored-argument-names=_.*|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Tells whether we should check for unused import in __init__ files.
|
||||||
|
init-import=no
|
||||||
|
|
||||||
|
# List of qualified module names which can have objects that can redefine
|
||||||
|
# builtins.
|
||||||
|
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||||
|
|
||||||
|
|
||||||
|
[CLASSES]
|
||||||
|
|
||||||
|
# Warn about protected attribute access inside special methods
|
||||||
|
check-protected-access-in-special-methods=no
|
||||||
|
|
||||||
|
# List of method names used to declare (i.e. assign) instance attributes.
|
||||||
|
defining-attr-methods=__init__,
|
||||||
|
__new__,
|
||||||
|
setUp,
|
||||||
|
__post_init__
|
||||||
|
|
||||||
|
# List of member names, which should be excluded from the protected access
|
||||||
|
# warning.
|
||||||
|
exclude-protected=_asdict,
|
||||||
|
_fields,
|
||||||
|
_replace,
|
||||||
|
_source,
|
||||||
|
_make
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a class method.
|
||||||
|
valid-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a metaclass class method.
|
||||||
|
valid-metaclass-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
|
||||||
|
# List of regular expressions of class ancestor names to ignore when counting
|
||||||
|
# public methods (see R0903)
|
||||||
|
exclude-too-few-public-methods=
|
||||||
|
|
||||||
|
# List of qualified class names to ignore when counting class parents (see
|
||||||
|
# R0901)
|
||||||
|
ignored-parents=
|
||||||
|
|
||||||
|
# Maximum number of arguments for function / method.
|
||||||
|
max-args=8
|
||||||
|
|
||||||
|
# Maximum number of attributes for a class (see R0902).
|
||||||
|
max-attributes=7
|
||||||
|
|
||||||
|
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||||
|
max-bool-expr=5
|
||||||
|
|
||||||
|
# Maximum number of branch for function / method body.
|
||||||
|
max-branches=12
|
||||||
|
|
||||||
|
# Maximum number of locals for function / method body.
|
||||||
|
max-locals=15
|
||||||
|
|
||||||
|
# Maximum number of parents for a class (see R0901).
|
||||||
|
max-parents=7
|
||||||
|
|
||||||
|
# Maximum number of public methods for a class (see R0904).
|
||||||
|
max-public-methods=20
|
||||||
|
|
||||||
|
# Maximum number of return / yield for function / method body.
|
||||||
|
max-returns=6
|
||||||
|
|
||||||
|
# Maximum number of statements in function / method body.
|
||||||
|
max-statements=50
|
||||||
|
|
||||||
|
# Minimum number of public methods for a class (see R0903).
|
||||||
|
min-public-methods=2
|
||||||
|
|
||||||
|
|
||||||
|
[IMPORTS]
|
||||||
|
|
||||||
|
# List of modules that can be imported at any level, not just the top level
|
||||||
|
# one.
|
||||||
|
allow-any-import-level=
|
||||||
|
|
||||||
|
# Allow wildcard imports from modules that define __all__.
|
||||||
|
allow-wildcard-with-all=no
|
||||||
|
|
||||||
|
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||||
|
# 3 compatible code, which means that the block might have code that exists
|
||||||
|
# only in one or another interpreter, leading to false positives when analysed.
|
||||||
|
analyse-fallback-blocks=no
|
||||||
|
|
||||||
|
# Deprecated modules which should not be used, separated by a comma.
|
||||||
|
deprecated-modules=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of external dependencies
|
||||||
|
# to the given file (report RP0402 must not be disabled).
|
||||||
|
ext-import-graph=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||||
|
# external) dependencies to the given file (report RP0402 must not be
|
||||||
|
# disabled).
|
||||||
|
import-graph=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||||
|
# to the given file (report RP0402 must not be disabled).
|
||||||
|
int-import-graph=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of the standard
|
||||||
|
# compatibility libraries.
|
||||||
|
known-standard-library=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of a third party library.
|
||||||
|
known-third-party=enchant
|
||||||
|
|
||||||
|
# Couples of modules and preferred modules, separated by a comma.
|
||||||
|
preferred-modules=
|
||||||
|
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
|
||||||
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
|
# "BaseException, Exception".
|
||||||
|
overgeneral-exceptions=BaseException,
|
||||||
|
Exception
|
@ -1,23 +1,6 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
||||||
|
|
||||||
from app.classes.shared.server import Server
|
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
|
||||||
from app.classes.models.users import ApiKeys
|
from app.classes.models.users import ApiKeys
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -43,13 +26,13 @@ class Crafty_Perms_Controller:
|
|||||||
return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Server_Creation)
|
return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Server_Creation)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_add_user(user_id):
|
def can_add_user(): # Add back argument 'user_id' when you work on this
|
||||||
#TODO: Complete if we need a User Addition limit
|
#TODO: Complete if we need a User Addition limit
|
||||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.User_Config)
|
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.User_Config)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_add_role(user_id):
|
def can_add_role(): # Add back argument 'user_id' when you work on this
|
||||||
#TODO: Complete if we need a Role Addition limit
|
#TODO: Complete if we need a Role Addition limit
|
||||||
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Roles_Config)
|
#return crafty_permissions.can_add_in_crafty(user_id, Enum_Permissions_Crafty.Roles_Config)
|
||||||
return True
|
return True
|
||||||
|
@ -1,25 +1,8 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.models.management import management_helper
|
from app.classes.models.management import management_helper
|
||||||
from app.classes.models.servers import servers_helper
|
from app.classes.models.servers import servers_helper
|
||||||
|
|
||||||
from app.classes.shared.server import Server
|
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Management_Controller:
|
class Management_Controller:
|
||||||
@ -43,8 +26,7 @@ class Management_Controller:
|
|||||||
server_name = servers_helper.get_server_friendly_name(server_id)
|
server_name = servers_helper.get_server_friendly_name(server_id)
|
||||||
|
|
||||||
# Example: Admin issued command start_server for server Survival
|
# Example: Admin issued command start_server for server Survival
|
||||||
management_helper.add_to_audit_log(user_id, "issued command {} for server {}".format(command, server_name),
|
management_helper.add_to_audit_log(user_id, f"issued command {command} for server {server_name}", server_id, remote_ip)
|
||||||
server_id, remote_ip)
|
|
||||||
management_helper.add_command(server_id, user_id, remote_ip, command)
|
management_helper.add_command(server_id, user_id, remote_ip, command)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -71,7 +53,16 @@ class Management_Controller:
|
|||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment=None, enabled=True):
|
def create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment=None, enabled=True):
|
||||||
return management_helper.create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment, enabled)
|
return management_helper.create_scheduled_task(
|
||||||
|
server_id,
|
||||||
|
action,
|
||||||
|
interval,
|
||||||
|
interval_type,
|
||||||
|
start_time,
|
||||||
|
command,
|
||||||
|
comment,
|
||||||
|
enabled
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_scheduled_task(schedule_id):
|
def delete_scheduled_task(schedule_id):
|
||||||
|
@ -1,25 +1,10 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.models.roles import roles_helper
|
from app.classes.models.roles import roles_helper
|
||||||
from app.classes.models.server_permissions import server_permissions
|
from app.classes.models.server_permissions import server_permissions
|
||||||
from app.classes.models.users import users_helper
|
from app.classes.models.users import users_helper
|
||||||
|
|
||||||
from app.classes.shared.server import Server
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -45,7 +30,6 @@ class Roles_Controller:
|
|||||||
base_data = Roles_Controller.get_role_with_servers(role_id)
|
base_data = Roles_Controller.get_role_with_servers(role_id)
|
||||||
up_data = {}
|
up_data = {}
|
||||||
added_servers = set()
|
added_servers = set()
|
||||||
edited_servers = set()
|
|
||||||
removed_servers = set()
|
removed_servers = set()
|
||||||
for key in role_data:
|
for key in role_data:
|
||||||
if key == "role_id":
|
if key == "role_id":
|
||||||
@ -56,7 +40,7 @@ class Roles_Controller:
|
|||||||
elif base_data[key] != role_data[key]:
|
elif base_data[key] != role_data[key]:
|
||||||
up_data[key] = role_data[key]
|
up_data[key] = role_data[key]
|
||||||
up_data['last_update'] = helper.get_time_as_string()
|
up_data['last_update'] = helper.get_time_as_string()
|
||||||
logger.debug("role: {} +server:{} -server{}".format(role_data, added_servers, removed_servers))
|
logger.debug(f"role: {role_data} +server:{added_servers} -server{removed_servers}")
|
||||||
for server in added_servers:
|
for server in added_servers:
|
||||||
server_permissions.get_or_create(role_id, server, permissions_mask)
|
server_permissions.get_or_create(role_id, server, permissions_mask)
|
||||||
for server in base_data['servers']:
|
for server in base_data['servers']:
|
||||||
|
@ -1,32 +1,20 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.shared.main_models import db_helper
|
|
||||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
||||||
from app.classes.models.users import users_helper, ApiKeys
|
from app.classes.models.users import users_helper, ApiKeys
|
||||||
from app.classes.models.roles import roles_helper
|
from app.classes.models.roles import roles_helper
|
||||||
from app.classes.models.servers import servers_helper
|
from app.classes.models.servers import servers_helper
|
||||||
|
|
||||||
from app.classes.shared.server import Server
|
from app.classes.shared.main_models import db_helper
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Server_Perms_Controller:
|
class Server_Perms_Controller:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_server_user_list(server_id):
|
||||||
|
return server_permissions.get_server_user_list(server_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def list_defined_permissions():
|
def list_defined_permissions():
|
||||||
permissions_list = server_permissions.get_permissions_list()
|
permissions_list = server_permissions.get_permissions_list()
|
||||||
@ -54,7 +42,9 @@ class Server_Perms_Controller:
|
|||||||
def backup_role_swap(old_server_id, new_server_id):
|
def backup_role_swap(old_server_id, new_server_id):
|
||||||
role_list = server_permissions.get_server_roles(old_server_id)
|
role_list = server_permissions.get_server_roles(old_server_id)
|
||||||
for role in role_list:
|
for role in role_list:
|
||||||
server_permissions.add_role_server(new_server_id, role.role_id, server_permissions.get_permissions_mask(int(role.role_id), int(old_server_id)))
|
server_permissions.add_role_server(
|
||||||
|
new_server_id, role.role_id,
|
||||||
|
server_permissions.get_permissions_mask(int(role.role_id), int(old_server_id)))
|
||||||
#server_permissions.add_role_server(new_server_id, role.role_id, '00001000')
|
#server_permissions.add_role_server(new_server_id, role.role_id, '00001000')
|
||||||
|
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@ -85,19 +75,6 @@ class Server_Perms_Controller:
|
|||||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
||||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
return server_permissions.get_api_key_permissions_list(key, server_id)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_user_id_permissions_list(user_id: str, server_id: str):
|
|
||||||
return server_permissions.get_user_id_permissions_list(user_id, server_id)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_api_key_id_permissions_list(key_id: str, server_id: str):
|
|
||||||
key = users_helper.get_user_api_key(key_id)
|
|
||||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
|
|
||||||
return server_permissions.get_api_key_permissions_list(key, server_id)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_authorized_servers_stats_from_roles(user_id):
|
def get_authorized_servers_stats_from_roles(user_id):
|
||||||
user_roles = users_helper.get_user_roles_id(user_id)
|
user_roles = users_helper.get_user_roles_id(user_id)
|
||||||
|
@ -1,29 +1,15 @@
|
|||||||
from app.classes.controllers.roles_controller import Roles_Controller
|
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.shared.main_models import db_helper
|
|
||||||
from app.classes.models.servers import servers_helper
|
from app.classes.models.servers import servers_helper
|
||||||
from app.classes.models.roles import roles_helper
|
|
||||||
from app.classes.models.users import users_helper, ApiKeys
|
from app.classes.models.users import users_helper, ApiKeys
|
||||||
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
from app.classes.models.server_permissions import server_permissions, Enum_Permissions_Server
|
||||||
|
|
||||||
from app.classes.shared.server import Server
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
from app.classes.shared.main_models import db_helper
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
from app.classes.controllers.roles_controller import Roles_Controller
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -33,8 +19,26 @@ class Servers_Controller:
|
|||||||
# Generic Servers Methods
|
# Generic Servers Methods
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_server(name: str, server_uuid: str, server_dir: str, backup_path: str, server_command: str, server_file: str, server_log_file: str, server_stop: str, server_port=25565):
|
def create_server(
|
||||||
return servers_helper.create_server(name, server_uuid, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, server_port)
|
name: str,
|
||||||
|
server_uuid: str,
|
||||||
|
server_dir: str,
|
||||||
|
backup_path: str,
|
||||||
|
server_command: str,
|
||||||
|
server_file: str,
|
||||||
|
server_log_file: str,
|
||||||
|
server_stop: str,
|
||||||
|
server_port=25565):
|
||||||
|
return servers_helper.create_server(
|
||||||
|
name,
|
||||||
|
server_uuid,
|
||||||
|
server_dir,
|
||||||
|
backup_path,
|
||||||
|
server_command,
|
||||||
|
server_file,
|
||||||
|
server_log_file,
|
||||||
|
server_stop,
|
||||||
|
server_port)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_obj(server_id):
|
def get_server_obj(server_id):
|
||||||
@ -183,7 +187,7 @@ class Servers_Controller:
|
|||||||
path = os.path.join(server_path, 'banned-players.json')
|
path = os.path.join(server_path, 'banned-players.json')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(helper.get_os_understandable_path(path)) as file:
|
with open(helper.get_os_understandable_path(path), encoding='utf-8') as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
file.close()
|
file.close()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
@ -210,4 +214,3 @@ class Servers_Controller:
|
|||||||
if helper.check_file_exists(log_file_path) and \
|
if helper.check_file_exists(log_file_path) and \
|
||||||
helper.is_file_older_than_x_days(log_file_path, logs_delete_after):
|
helper.is_file_older_than_x_days(log_file_path, logs_delete_after):
|
||||||
os.remove(log_file_path)
|
os.remove(log_file_path)
|
||||||
|
|
||||||
|
@ -1,23 +1,11 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
from app.classes.models.users import Users, users_helper
|
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
|
|
||||||
|
from app.classes.models.users import users_helper
|
||||||
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
from app.classes.models.crafty_permissions import crafty_permissions, Enum_Permissions_Crafty
|
||||||
from app.classes.models.management import management_helper
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -60,7 +48,6 @@ class Users_Controller:
|
|||||||
up_data = {}
|
up_data = {}
|
||||||
added_roles = set()
|
added_roles = set()
|
||||||
removed_roles = set()
|
removed_roles = set()
|
||||||
removed_servers = set()
|
|
||||||
for key in user_data:
|
for key in user_data:
|
||||||
if key == "user_id":
|
if key == "user_id":
|
||||||
continue
|
continue
|
||||||
@ -74,7 +61,7 @@ class Users_Controller:
|
|||||||
up_data[key] = user_data[key]
|
up_data[key] = user_data[key]
|
||||||
up_data['last_update'] = helper.get_time_as_string()
|
up_data['last_update'] = helper.get_time_as_string()
|
||||||
up_data['lang'] = user_data['lang']
|
up_data['lang'] = user_data['lang']
|
||||||
logger.debug("user: {} +role:{} -role:{}".format(user_data, added_roles, removed_roles))
|
logger.debug(f"user: {user_data} +role:{added_roles} -role:{removed_roles}")
|
||||||
for role in added_roles:
|
for role in added_roles:
|
||||||
users_helper.get_or_create(user_id=user_id, role_id=role)
|
users_helper.get_or_create(user_id=user_id, role_id=role)
|
||||||
permissions_mask = user_crafty_data.get('permissions_mask', '000')
|
permissions_mask = user_crafty_data.get('permissions_mask', '000')
|
||||||
@ -90,7 +77,12 @@ class Users_Controller:
|
|||||||
limit_user_creation = 0
|
limit_user_creation = 0
|
||||||
limit_role_creation = 0
|
limit_role_creation = 0
|
||||||
|
|
||||||
crafty_permissions.add_or_update_user(user_id, permissions_mask, limit_server_creation, limit_user_creation, limit_role_creation)
|
crafty_permissions.add_or_update_user(
|
||||||
|
user_id,
|
||||||
|
permissions_mask,
|
||||||
|
limit_server_creation,
|
||||||
|
limit_user_creation,
|
||||||
|
limit_role_creation)
|
||||||
|
|
||||||
users_helper.delete_user_roles(user_id, removed_roles)
|
users_helper.delete_user_roles(user_id, removed_roles)
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
from app.classes.shared.helpers import Helpers
|
|
||||||
import struct
|
import struct
|
||||||
import socket
|
import socket
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import logging.config
|
import logging.config
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -101,13 +100,13 @@ def get_code_format(format_name):
|
|||||||
if format_name in data.keys():
|
if format_name in data.keys():
|
||||||
return data.get(format_name)
|
return data.get(format_name)
|
||||||
else:
|
else:
|
||||||
logger.error("Format MOTD Error: format name {} does not exist".format(format_name))
|
logger.error(f"Format MOTD Error: format name {format_name} does not exist")
|
||||||
console.error("Format MOTD Error: format name {} does not exist".format(format_name))
|
console.error(f"Format MOTD Error: format name {format_name} does not exist")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical("Config File Error: Unable to read {} due to {}".format(format_file, e))
|
logger.critical(f"Config File Error: Unable to read {format_file} due to {e}")
|
||||||
console.critical("Config File Error: Unable to read {} due to {}".format(format_file, e))
|
console.critical(f"Config File Error: Unable to read {format_file} due to {e}")
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
@ -126,15 +125,14 @@ def ping(ip, port):
|
|||||||
j += 1
|
j += 1
|
||||||
if j > 5:
|
if j > 5:
|
||||||
raise ValueError('var_int too big')
|
raise ValueError('var_int too big')
|
||||||
if not (k & 0x80):
|
if not k & 0x80:
|
||||||
return i
|
return i
|
||||||
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
try:
|
try:
|
||||||
sock.connect((ip, port))
|
sock.connect((ip, port))
|
||||||
|
|
||||||
except socket.error as err:
|
except socket.error:
|
||||||
pass
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -163,7 +161,7 @@ def ping(ip, port):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
data += chunk
|
data += chunk
|
||||||
logger.debug("Server reports this data on ping: {}".format(data))
|
logger.debug(f"Server reports this data on ping: {data}")
|
||||||
return Server(json.loads(data))
|
return Server(json.loads(data))
|
||||||
finally:
|
finally:
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -9,7 +9,7 @@ class ServerProps:
|
|||||||
|
|
||||||
def _parse(self):
|
def _parse(self):
|
||||||
"""Loads and parses the file specified in self.filepath"""
|
"""Loads and parses the file specified in self.filepath"""
|
||||||
with open(self.filepath) as fp:
|
with open(self.filepath, encoding='utf-8') as fp:
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
d = {}
|
d = {}
|
||||||
if os.path.exists(".header"):
|
if os.path.exists(".header"):
|
||||||
@ -24,7 +24,7 @@ class ServerProps:
|
|||||||
s2 = s[s.find('=')+1:]
|
s2 = s[s.find('=')+1:]
|
||||||
d[s1] = s2
|
d[s1] = s2
|
||||||
else:
|
else:
|
||||||
with open(".header", "a+") as h:
|
with open(".header", "a+", encoding='utf-8') as h:
|
||||||
h.write(line)
|
h.write(line)
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
return d
|
return d
|
||||||
@ -47,9 +47,9 @@ class ServerProps:
|
|||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Writes to the new file"""
|
"""Writes to the new file"""
|
||||||
with open(self.filepath, "a+") as f:
|
with open(self.filepath, "a+", encoding='utf-8') as f:
|
||||||
f.truncate(0)
|
f.truncate(0)
|
||||||
with open(".header") as header:
|
with open(".header", encoding='utf-8') as header:
|
||||||
line = header.readline()
|
line = header.readline()
|
||||||
while line:
|
while line:
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
@ -9,19 +8,15 @@ from datetime import datetime
|
|||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.models.servers import Servers
|
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
from app.classes.models.server_permissions import server_permissions
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as err:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {err.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {err.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -31,7 +26,7 @@ class ServerJars:
|
|||||||
self.base_url = "https://serverjars.com"
|
self.base_url = "https://serverjars.com"
|
||||||
|
|
||||||
def _get_api_result(self, call_url: str):
|
def _get_api_result(self, call_url: str):
|
||||||
full_url = "{base}{call_url}".format(base=self.base_url, call_url=call_url)
|
full_url = f"{self.base_url}{call_url}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = requests.get(full_url, timeout=2)
|
r = requests.get(full_url, timeout=2)
|
||||||
@ -39,20 +34,20 @@ class ServerJars:
|
|||||||
if r.status_code not in [200, 201]:
|
if r.status_code not in [200, 201]:
|
||||||
return {}
|
return {}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to connect to serverjar.com api due to error: {}".format(e))
|
logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
api_data = json.loads(r.content)
|
api_data = json.loads(r.content)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to parse serverjar.com api result due to error: {}".format(e))
|
logger.error(f"Unable to parse serverjar.com api result due to error: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
api_result = api_data.get('status')
|
api_result = api_data.get('status')
|
||||||
api_response = api_data.get('response', {})
|
api_response = api_data.get('response', {})
|
||||||
|
|
||||||
if api_result != "success":
|
if api_result != "success":
|
||||||
logger.error("Api returned a failed status: {}".format(api_result))
|
logger.error(f"Api returned a failed status: {api_result}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
return api_response
|
return api_response
|
||||||
@ -62,11 +57,11 @@ class ServerJars:
|
|||||||
cache_file = helper.serverjar_cache
|
cache_file = helper.serverjar_cache
|
||||||
cache = {}
|
cache = {}
|
||||||
try:
|
try:
|
||||||
with open(cache_file, "r") as f:
|
with open(cache_file, "r", encoding='utf-8') as f:
|
||||||
cache = json.load(f)
|
cache = json.load(f)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to read serverjars.com cache file: {}".format(e))
|
logger.error(f"Unable to read serverjars.com cache file: {e}")
|
||||||
|
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
@ -100,7 +95,7 @@ class ServerJars:
|
|||||||
def _check_api_alive(self):
|
def _check_api_alive(self):
|
||||||
logger.info("Checking serverjars.com API status")
|
logger.info("Checking serverjars.com API status")
|
||||||
|
|
||||||
check_url = "{base}/api/fetchTypes".format(base=self.base_url)
|
check_url = f"{self.base_url}/api/fetchTypes"
|
||||||
try:
|
try:
|
||||||
r = requests.get(check_url, timeout=2)
|
r = requests.get(check_url, timeout=2)
|
||||||
|
|
||||||
@ -108,7 +103,7 @@ class ServerJars:
|
|||||||
logger.info("Serverjars.com API is alive")
|
logger.info("Serverjars.com API is alive")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to connect to serverjar.com api due to error: {}".format(e))
|
logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
logger.error("unable to contact serverjars.com api")
|
logger.error("unable to contact serverjars.com api")
|
||||||
@ -154,15 +149,15 @@ class ServerJars:
|
|||||||
|
|
||||||
# save our cache
|
# save our cache
|
||||||
try:
|
try:
|
||||||
with open(cache_file, "w") as f:
|
with open(cache_file, "w", encoding='utf-8') as f:
|
||||||
f.write(json.dumps(data, indent=4))
|
f.write(json.dumps(data, indent=4))
|
||||||
logger.info("Cache file refreshed")
|
logger.info("Cache file refreshed")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to update serverjars.com cache file: {}".format(e))
|
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
||||||
|
|
||||||
def _get_jar_details(self, jar_type='servers'):
|
def _get_jar_details(self, jar_type='servers'):
|
||||||
url = '/api/fetchAll/{type}'.format(type=jar_type)
|
url = f'/api/fetchAll/{jar_type}'
|
||||||
response = self._get_api_result(url)
|
response = self._get_api_result(url)
|
||||||
temp = []
|
temp = []
|
||||||
for v in response:
|
for v in response:
|
||||||
@ -175,12 +170,12 @@ class ServerJars:
|
|||||||
response = self._get_api_result(url)
|
response = self._get_api_result(url)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def download_jar(self, server, version, path, name):
|
def download_jar(self, server, version, path):
|
||||||
update_thread = threading.Thread(target=self.a_download_jar, daemon=True, name="exe_download", args=(server, version, path, name))
|
update_thread = threading.Thread(target=self.a_download_jar, daemon=True, args=(server, version, path))
|
||||||
update_thread.start()
|
update_thread.start()
|
||||||
|
|
||||||
def a_download_jar(self, server, version, path, name):
|
def a_download_jar(self, server, version, path):
|
||||||
fetch_url = "{base}/api/fetchJar/{server}/{version}".format(base=self.base_url, server=server, version=version)
|
fetch_url = f"{self.base_url}/api/fetchJar/{server}/{version}"
|
||||||
|
|
||||||
# open a file stream
|
# open a file stream
|
||||||
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
||||||
@ -189,9 +184,7 @@ class ServerJars:
|
|||||||
shutil.copyfileobj(r.raw, output)
|
shutil.copyfileobj(r.raw, output)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to save jar to {path} due to error:{error}".format(path=path, error=e))
|
logger.error(f"Unable to save jar to {path} due to error:{e}")
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import time
|
|
||||||
import psutil
|
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
import base64
|
import base64
|
||||||
|
import psutil
|
||||||
|
|
||||||
|
from app.classes.models.management import Host_Stats
|
||||||
|
from app.classes.models.servers import Server_Stats, servers_helper
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.minecraft.mc_ping import ping
|
from app.classes.minecraft.mc_ping import ping
|
||||||
from app.classes.models.management import Host_Stats
|
|
||||||
from app.classes.models.servers import Server_Stats, servers_helper
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -64,8 +63,6 @@ class Stats:
|
|||||||
|
|
||||||
real_cpu = round(p.cpu_percent(interval=0.5) / psutil.cpu_count(), 2)
|
real_cpu = round(p.cpu_percent(interval=0.5) / psutil.cpu_count(), 2)
|
||||||
|
|
||||||
process_start_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(p.create_time()))
|
|
||||||
|
|
||||||
# this is a faster way of getting data for a process
|
# this is a faster way of getting data for a process
|
||||||
with p.oneshot():
|
with p.oneshot():
|
||||||
process_stats = {
|
process_stats = {
|
||||||
@ -76,7 +73,7 @@ class Stats:
|
|||||||
return process_stats
|
return process_stats
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to get process details for pid: {} due to error: {}".format(process_pid, e))
|
logger.error(f"Unable to get process details for pid: {process_pid} due to error: {e}")
|
||||||
|
|
||||||
# Dummy Data
|
# Dummy Data
|
||||||
process_stats = {
|
process_stats = {
|
||||||
@ -119,13 +116,13 @@ class Stats:
|
|||||||
total_size = 0
|
total_size = 0
|
||||||
|
|
||||||
# do a scan of the directories in the server path.
|
# do a scan of the directories in the server path.
|
||||||
for root, dirs, files in os.walk(world_path, topdown=False):
|
for root, dirs, _files in os.walk(world_path, topdown=False):
|
||||||
|
|
||||||
# for each directory we find
|
# for each directory we find
|
||||||
for name in dirs:
|
for name in dirs:
|
||||||
|
|
||||||
# if the directory name is "region"
|
# if the directory name is "region"
|
||||||
if name == "region":
|
if str(name) == "region":
|
||||||
# log it!
|
# log it!
|
||||||
logger.debug("Path %s is called region. Getting directory size", os.path.join(root, name))
|
logger.debug("Path %s is called region. Getting directory size", os.path.join(root, name))
|
||||||
|
|
||||||
@ -144,14 +141,14 @@ class Stats:
|
|||||||
online_stats = json.loads(ping_obj.players)
|
online_stats = json.loads(ping_obj.players)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info("Unable to read json from ping_obj: {}".format(e))
|
logger.info(f"Unable to read json from ping_obj: {e}")
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
server_icon = base64.encodebytes(ping_obj.icon)
|
server_icon = base64.encodebytes(ping_obj.icon)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
server_icon = False
|
server_icon = False
|
||||||
logger.info("Unable to read the server icon : {}".format(e))
|
logger.info(f"Unable to read the server icon : {e}")
|
||||||
|
|
||||||
ping_data = {
|
ping_data = {
|
||||||
'online': online_stats.get("online", 0),
|
'online': online_stats.get("online", 0),
|
||||||
@ -168,18 +165,18 @@ class Stats:
|
|||||||
|
|
||||||
server = servers_helper.get_server_data_by_id(server_id)
|
server = servers_helper.get_server_data_by_id(server_id)
|
||||||
|
|
||||||
logger.info("Getting players for server {}".format(server))
|
logger.info(f"Getting players for server {server}")
|
||||||
|
|
||||||
# get our settings and data dictionaries
|
# get our settings and data dictionaries
|
||||||
server_settings = server.get('server_settings', {})
|
# server_settings = server.get('server_settings', {})
|
||||||
server_data = server.get('server_data_obj', {})
|
# server_data = server.get('server_data_obj', {})
|
||||||
|
|
||||||
|
|
||||||
# TODO: search server properties file for possible override of 127.0.0.1
|
# TODO: search server properties file for possible override of 127.0.0.1
|
||||||
internal_ip = server['server_ip']
|
internal_ip = server['server_ip']
|
||||||
server_port = server['server_port']
|
server_port = server['server_port']
|
||||||
|
|
||||||
logger.debug("Pinging {} on port {}".format(internal_ip, server_port))
|
logger.debug("Pinging {internal_ip} on port {server_port}")
|
||||||
int_mc_ping = ping(internal_ip, int(server_port))
|
int_mc_ping = ping(internal_ip, int(server_port))
|
||||||
|
|
||||||
ping_data = {}
|
ping_data = {}
|
||||||
@ -205,7 +202,7 @@ class Stats:
|
|||||||
server = servers_helper.get_server_data_by_id(server_id)
|
server = servers_helper.get_server_data_by_id(server_id)
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Getting stats for server: {}'.format(server_id))
|
logger.debug(f'Getting stats for server: {server_id}')
|
||||||
|
|
||||||
# get our server object, settings and data dictionaries
|
# get our server object, settings and data dictionaries
|
||||||
server_obj = s.get('server_obj', None)
|
server_obj = s.get('server_obj', None)
|
||||||
@ -223,8 +220,9 @@ class Stats:
|
|||||||
# TODO: search server properties file for possible override of 127.0.0.1
|
# TODO: search server properties file for possible override of 127.0.0.1
|
||||||
internal_ip = server['server_ip']
|
internal_ip = server['server_ip']
|
||||||
server_port = server['server_port']
|
server_port = server['server_port']
|
||||||
|
server = s.get('server_name', f"ID#{server_id}")
|
||||||
|
|
||||||
logger.debug("Pinging server '{}' on {}:{}".format(s.get('server_name', "ID#{}".format(server_id)), internal_ip, server_port))
|
logger.debug("Pinging server '{server}' on {internal_ip}:{server_port}")
|
||||||
int_mc_ping = ping(internal_ip, int(server_port))
|
int_mc_ping = ping(internal_ip, int(server_port))
|
||||||
|
|
||||||
int_data = False
|
int_data = False
|
||||||
@ -263,7 +261,7 @@ class Stats:
|
|||||||
server_stats = {}
|
server_stats = {}
|
||||||
server = self.controller.get_server_obj(server_id)
|
server = self.controller.get_server_obj(server_id)
|
||||||
|
|
||||||
logger.debug('Getting stats for server: {}'.format(server_id))
|
logger.debug(f'Getting stats for server: {server_id}')
|
||||||
|
|
||||||
# get our server object, settings and data dictionaries
|
# get our server object, settings and data dictionaries
|
||||||
server_obj = self.controller.get_server_obj(server_id)
|
server_obj = self.controller.get_server_obj(server_id)
|
||||||
@ -285,7 +283,7 @@ class Stats:
|
|||||||
server_port = server_settings.get('server-port', "25565")
|
server_port = server_settings.get('server-port', "25565")
|
||||||
|
|
||||||
|
|
||||||
logger.debug("Pinging server '{}' on {}:{}".format(server.name, internal_ip, server_port))
|
logger.debug(f"Pinging server '{server.name}' on {internal_ip}:{server_port}")
|
||||||
int_mc_ping = ping(internal_ip, int(server_port))
|
int_mc_ping = ping(internal_ip, int(server_port))
|
||||||
|
|
||||||
int_data = False
|
int_data = False
|
||||||
|
@ -1,26 +1,23 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.models.users import Users, ApiKeys
|
|
||||||
from app.classes.shared.permission_helper import permission_helper
|
from app.classes.shared.permission_helper import permission_helper
|
||||||
|
|
||||||
|
from app.classes.models.users import Users, ApiKeys
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger('peewee')
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DoesNotExist
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -124,7 +121,7 @@ class Permissions_Crafty:
|
|||||||
def get_User_Crafty(user_id):
|
def get_User_Crafty(user_id):
|
||||||
try:
|
try:
|
||||||
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
user_crafty = User_Crafty.select().where(User_Crafty.user_id == user_id).get()
|
||||||
except User_Crafty.DoesNotExist:
|
except DoesNotExist:
|
||||||
user_crafty = User_Crafty.insert({
|
user_crafty = User_Crafty.insert({
|
||||||
User_Crafty.user_id: user_id,
|
User_Crafty.user_id: user_id,
|
||||||
User_Crafty.permissions: "000",
|
User_Crafty.permissions: "000",
|
||||||
@ -173,7 +170,6 @@ class Permissions_Crafty:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_crafty_limit_value(user_id, permission):
|
def get_crafty_limit_value(user_id, permission):
|
||||||
user_crafty = crafty_permissions.get_User_Crafty(user_id)
|
|
||||||
quantity_list = crafty_permissions.get_permission_quantity_list(user_id)
|
quantity_list = crafty_permissions.get_permission_quantity_list(user_id)
|
||||||
return quantity_list[permission]
|
return quantity_list[permission]
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
@ -6,26 +5,24 @@ import datetime
|
|||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.main_models import db_helper
|
from app.classes.shared.main_models import db_helper
|
||||||
from app.classes.models.users import Users, users_helper
|
|
||||||
from app.classes.models.servers import Servers, servers_helper
|
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
from app.classes.models.server_permissions import server_permissions
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
from app.classes.models.users import Users, users_helper
|
||||||
|
from app.classes.models.servers import Servers
|
||||||
|
from app.classes.models.server_permissions import server_permissions
|
||||||
|
|
||||||
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger('peewee')
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, IntegerField, DateTimeField, FloatField, TextField, AutoField, BooleanField
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
from enum import Enum
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -140,7 +137,7 @@ class helpers_management:
|
|||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_latest_hosts_stats():
|
def get_latest_hosts_stats():
|
||||||
query = Host_Stats.select().order_by(Host_Stats.id.desc()).get()
|
query = Host_Stats.select().order_by(Host_Stats).get()
|
||||||
return model_to_dict(query)
|
return model_to_dict(query)
|
||||||
|
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@ -163,7 +160,7 @@ class helpers_management:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def mark_command_complete(command_id=None):
|
def mark_command_complete(command_id=None):
|
||||||
if command_id is not None:
|
if command_id is not None:
|
||||||
logger.debug("Marking Command {} completed".format(command_id))
|
logger.debug(f"Marking Command {command_id} completed")
|
||||||
Commands.update({
|
Commands.update({
|
||||||
Commands.executed: True
|
Commands.executed: True
|
||||||
}).where(Commands.command_id == command_id).execute()
|
}).where(Commands.command_id == command_id).execute()
|
||||||
@ -178,10 +175,10 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
def add_to_audit_log(user_id, log_msg, server_id=None, source_ip=None):
|
||||||
logger.debug("Adding to audit log User:{} - Message: {} ".format(user_id, log_msg))
|
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
|
||||||
user_data = users_helper.get_user(user_id)
|
user_data = users_helper.get_user(user_id)
|
||||||
|
|
||||||
audit_msg = "{} {}".format(str(user_data['username']).capitalize(), log_msg)
|
audit_msg = f"{str(user_data['username']).capitalize()} {log_msg}"
|
||||||
|
|
||||||
server_users = server_permissions.get_server_user_list(server_id)
|
server_users = server_permissions.get_server_user_list(server_id)
|
||||||
for user in server_users:
|
for user in server_users:
|
||||||
@ -209,7 +206,17 @@ class helpers_management:
|
|||||||
# Schedules Methods
|
# Schedules Methods
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_scheduled_task(server_id, action, interval, interval_type, start_time, command, comment=None, enabled=True, one_time=False, cron_string='* * * * *'):
|
def create_scheduled_task(
|
||||||
|
server_id,
|
||||||
|
action,
|
||||||
|
interval,
|
||||||
|
interval_type,
|
||||||
|
start_time,
|
||||||
|
command,
|
||||||
|
comment=None,
|
||||||
|
enabled=True,
|
||||||
|
one_time=False,
|
||||||
|
cron_string='* * * * *'):
|
||||||
sch_id = Schedules.insert({
|
sch_id = Schedules.insert({
|
||||||
Schedules.server_id: server_id,
|
Schedules.server_id: server_id,
|
||||||
Schedules.action: action,
|
Schedules.action: action,
|
||||||
@ -240,7 +247,7 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scheduled_task(schedule_id):
|
def get_scheduled_task(schedule_id):
|
||||||
return model_to_dict(Schedules.get(Schedules.schedule_id == schedule_id)).execute()
|
return model_to_dict(Schedules.get(Schedules.schedule_id == schedule_id))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scheduled_task_model(schedule_id):
|
def get_scheduled_task_model(schedule_id):
|
||||||
@ -256,7 +263,7 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_schedules_enabled():
|
def get_schedules_enabled():
|
||||||
return Schedules.select().where(Schedules.enabled == True).execute()
|
return Schedules.select().where(Schedules.enabled is True).execute()
|
||||||
|
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
# Backups Methods
|
# Backups Methods
|
||||||
@ -282,12 +289,10 @@ class helpers_management:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None):
|
def set_backup_config(server_id: int, backup_path: str = None, max_backups: int = None):
|
||||||
logger.debug("Updating server {} backup config with {}".format(server_id, locals()))
|
logger.debug(f"Updating server {server_id} backup config with {locals()}")
|
||||||
try:
|
try:
|
||||||
row = Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
|
|
||||||
new_row = False
|
new_row = False
|
||||||
conf = {}
|
conf = {}
|
||||||
schd = {}
|
|
||||||
except IndexError:
|
except IndexError:
|
||||||
conf = {
|
conf = {
|
||||||
"directories": None,
|
"directories": None,
|
||||||
@ -304,13 +309,13 @@ class helpers_management:
|
|||||||
else:
|
else:
|
||||||
u1 = 0
|
u1 = 0
|
||||||
u2 = Backups.update(conf).where(Backups.server_id == server_id).execute()
|
u2 = Backups.update(conf).where(Backups.server_id == server_id).execute()
|
||||||
logger.debug("Updating existing backup record. {}+{} rows affected".format(u1, u2))
|
logger.debug(f"Updating existing backup record. {u1}+{u2} rows affected")
|
||||||
else:
|
else:
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
conf["server_id"] = server_id
|
conf["server_id"] = server_id
|
||||||
if backup_path is not None:
|
if backup_path is not None:
|
||||||
u = Servers.update(backup_path=backup_path).where(Servers.server_id == server_id)
|
Servers.update(backup_path=backup_path).where(Servers.server_id == server_id)
|
||||||
b = Backups.create(**conf)
|
Backups.create(**conf)
|
||||||
logger.debug("Creating new backup record.")
|
logger.debug("Creating new backup record.")
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
@ -11,14 +10,12 @@ peewee_logger = logging.getLogger('peewee')
|
|||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, CharField, DoesNotExist, AutoField, DateTimeField
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
from enum import Enum
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
|
@ -1,28 +1,26 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
from app.classes.shared.permission_helper import permission_helper
|
||||||
|
|
||||||
from app.classes.models.servers import Servers
|
from app.classes.models.servers import Servers
|
||||||
from app.classes.models.roles import Roles
|
from app.classes.models.roles import Roles
|
||||||
from app.classes.models.users import User_Roles, users_helper, ApiKeys, Users
|
from app.classes.models.users import User_Roles, users_helper, ApiKeys, Users
|
||||||
from app.classes.shared.permission_helper import permission_helper
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger('peewee')
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, CompositeKey, JOIN
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -118,7 +116,8 @@ class Permissions_Servers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
def add_role_server(server_id, role_id, rs_permissions="00000000"):
|
||||||
servers = Role_Servers.insert({Role_Servers.server_id: server_id, Role_Servers.role_id: role_id, Role_Servers.permissions: rs_permissions}).execute()
|
servers = Role_Servers.insert({Role_Servers.server_id: server_id, Role_Servers.role_id: role_id,
|
||||||
|
Role_Servers.permissions: rs_permissions}).execute()
|
||||||
return servers
|
return servers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -181,6 +180,7 @@ class Permissions_Servers:
|
|||||||
def get_server_user_list(server_id):
|
def get_server_user_list(server_id):
|
||||||
final_users = []
|
final_users = []
|
||||||
server_roles = Role_Servers.select().where(Role_Servers.server_id == server_id)
|
server_roles = Role_Servers.select().where(Role_Servers.server_id == server_id)
|
||||||
|
# pylint: disable=singleton-comparison
|
||||||
super_users = Users.select().where(Users.superuser == True)
|
super_users = Users.select().where(Users.superuser == True)
|
||||||
for role in server_roles:
|
for role in server_roles:
|
||||||
users = User_Roles.select().where(User_Roles.role_id == role.role_id)
|
users = User_Roles.select().where(User_Roles.role_id == role.role_id)
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
|
||||||
from app.classes.shared.main_models import db_helper
|
from app.classes.shared.main_models import db_helper
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -13,14 +11,11 @@ peewee_logger = logging.getLogger('peewee')
|
|||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, IntegerField, FloatField
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
from enum import Enum
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -94,7 +89,16 @@ class helper_servers:
|
|||||||
# Generic Servers Methods
|
# Generic Servers Methods
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_server(name: str, server_uuid: str, server_dir: str, backup_path: str, server_command: str, server_file: str, server_log_file: str, server_stop: str, server_port=25565):
|
def create_server(
|
||||||
|
name: str,
|
||||||
|
server_uuid: str,
|
||||||
|
server_dir: str,
|
||||||
|
backup_path: str,
|
||||||
|
server_command: str,
|
||||||
|
server_file: str,
|
||||||
|
server_log_file: str,
|
||||||
|
server_stop: str,
|
||||||
|
server_port=25565):
|
||||||
return Servers.insert({
|
return Servers.insert({
|
||||||
Servers.server_name: name,
|
Servers.server_name: name,
|
||||||
Servers.server_uuid: server_uuid,
|
Servers.server_uuid: server_uuid,
|
||||||
@ -153,7 +157,7 @@ class helper_servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_server_friendly_name(server_id):
|
def get_server_friendly_name(server_id):
|
||||||
server_data = servers_helper.get_server_data_by_id(server_id)
|
server_data = servers_helper.get_server_data_by_id(server_id)
|
||||||
friendly_name = "{} with ID: {}".format(server_data.get('server_name', None), server_data.get('server_id', 0))
|
friendly_name = f"{server_data.get('server_name', None)} with ID: {server_data.get('server_id', 0)}"
|
||||||
return friendly_name
|
return friendly_name
|
||||||
|
|
||||||
#************************************************************************************************
|
#************************************************************************************************
|
||||||
@ -177,9 +181,10 @@ class helper_servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def set_update(server_id, value):
|
def set_update(server_id, value):
|
||||||
try:
|
try:
|
||||||
row = Server_Stats.select().where(Server_Stats.server_id == server_id)
|
#Checks if server even exists
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error("Database entry not found. ".format(ex))
|
logger.error(f"Database entry not found! {ex}")
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(updating=value).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(updating=value).where(Server_Stats.server_id == server_id).execute()
|
||||||
|
|
||||||
@ -192,9 +197,11 @@ class helper_servers:
|
|||||||
def set_first_run(server_id):
|
def set_first_run(server_id):
|
||||||
#Sets first run to false
|
#Sets first run to false
|
||||||
try:
|
try:
|
||||||
row = Server_Stats.select().where(Server_Stats.server_id == server_id)
|
#Checks if server even exists
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error("Database entry not found. ".format(ex))
|
logger.error(f"Database entry not found! {ex}")
|
||||||
|
return
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(first_run=False).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(first_run=False).where(Server_Stats.server_id == server_id).execute()
|
||||||
|
|
||||||
@ -206,7 +213,12 @@ class helper_servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_TTL_without_player(server_id):
|
def get_TTL_without_player(server_id):
|
||||||
last_stat = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).first()
|
last_stat = Server_Stats.select().where(Server_Stats.server_id == server_id).order_by(Server_Stats.created.desc()).first()
|
||||||
last_stat_with_player = Server_Stats.select().where(Server_Stats.server_id == server_id).where(Server_Stats.online > 0).order_by(Server_Stats.created.desc()).first()
|
last_stat_with_player = (Server_Stats
|
||||||
|
.select()
|
||||||
|
.where(Server_Stats.server_id == server_id)
|
||||||
|
.where(Server_Stats.online > 0)
|
||||||
|
.order_by(Server_Stats.created.desc())
|
||||||
|
.first())
|
||||||
return last_stat.created - last_stat_with_player.created
|
return last_stat.created - last_stat_with_player.created
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -220,9 +232,10 @@ class helper_servers:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def set_waiting_start(server_id, value):
|
def set_waiting_start(server_id, value):
|
||||||
try:
|
try:
|
||||||
row = Server_Stats.select().where(Server_Stats.server_id == server_id)
|
# Checks if server even exists
|
||||||
|
Server_Stats.select().where(Server_Stats.server_id == server_id)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error("Database entry not found. ".format(ex))
|
logger.error(f"Database entry not found! {ex}")
|
||||||
with database.atomic():
|
with database.atomic():
|
||||||
Server_Stats.update(waiting_start=value).where(Server_Stats.server_id == server_id).execute()
|
Server_Stats.update(waiting_start=value).where(Server_Stats.server_id == server_id).execute()
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
from typing import Optional, List, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
@ -14,14 +13,12 @@ peewee_logger = logging.getLogger('peewee')
|
|||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
from peewee import SqliteDatabase, Model, ForeignKeyField, CharField, AutoField, DateTimeField, BooleanField, CompositeKey, DoesNotExist, JOIN
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
from enum import Enum
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -58,7 +55,7 @@ class ApiKeys(Model):
|
|||||||
token_id = AutoField()
|
token_id = AutoField()
|
||||||
name = CharField(default='', unique=True, index=True)
|
name = CharField(default='', unique=True, index=True)
|
||||||
created = DateTimeField(default=datetime.datetime.now)
|
created = DateTimeField(default=datetime.datetime.now)
|
||||||
user = ForeignKeyField(Users, backref='api_token', index=True)
|
user_id = ForeignKeyField(Users, backref='api_token', index=True)
|
||||||
server_permissions = CharField(default='00000000')
|
server_permissions = CharField(default='00000000')
|
||||||
crafty_permissions = CharField(default='000')
|
crafty_permissions = CharField(default='000')
|
||||||
superuser = BooleanField(default=False)
|
superuser = BooleanField(default=False)
|
||||||
@ -138,9 +135,11 @@ class helper_users:
|
|||||||
#logger.debug("user: ({}) {}".format(user_id, {}))
|
#logger.debug("user: ({}) {}".format(user_id, {}))
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def check_system_user(user_id):
|
def check_system_user(user_id):
|
||||||
try:
|
try:
|
||||||
Users.get(Users.user_id == user_id).user_id == user_id
|
result = Users.get(Users.user_id == user_id).user_id == user_id
|
||||||
|
if result:
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
@ -177,6 +176,7 @@ class helper_users:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_super_user_list():
|
def get_super_user_list():
|
||||||
final_users = []
|
final_users = []
|
||||||
|
# pylint: disable=singleton-comparison
|
||||||
super_users = Users.select().where(Users.superuser == True)
|
super_users = Users.select().where(Users.superuser == True)
|
||||||
for suser in super_users:
|
for suser in super_users:
|
||||||
if suser.user_id not in final_users:
|
if suser.user_id not in final_users:
|
||||||
@ -233,7 +233,7 @@ class helper_users:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user_roles(user: Union[dict, Users]):
|
def add_user_roles(user: Union[dict, Users]):
|
||||||
if type(user) == dict:
|
if isinstance(user, dict):
|
||||||
user_id = user['user_id']
|
user_id = user['user_id']
|
||||||
else:
|
else:
|
||||||
user_id = user.user_id
|
user_id = user.user_id
|
||||||
@ -246,7 +246,7 @@ class helper_users:
|
|||||||
for r in roles_query:
|
for r in roles_query:
|
||||||
roles.add(r.role_id.role_id)
|
roles.add(r.role_id.role_id)
|
||||||
|
|
||||||
if type(user) == dict:
|
if isinstance(user, dict):
|
||||||
user['roles'] = roles
|
user['roles'] = roles
|
||||||
else:
|
else:
|
||||||
user.roles = roles
|
user.roles = roles
|
||||||
@ -283,7 +283,12 @@ class helper_users:
|
|||||||
return ApiKeys.get(ApiKeys.token_id == key_id)
|
return ApiKeys.get(ApiKeys.token_id == key_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_user_api_key(name: str, user_id: str, superuser: bool = False, server_permissions_mask: Optional[str] = None, crafty_permissions_mask: Optional[str] = None):
|
def add_user_api_key(
|
||||||
|
name: str,
|
||||||
|
user_id: str,
|
||||||
|
superuser: bool = False,
|
||||||
|
server_permissions_mask: Optional[str] = None,
|
||||||
|
crafty_permissions_mask: Optional[str] = None):
|
||||||
return ApiKeys.insert({
|
return ApiKeys.insert({
|
||||||
ApiKeys.name: name,
|
ApiKeys.name: name,
|
||||||
ApiKeys.user_id: user_id,
|
ApiKeys.user_id: user_id,
|
||||||
|
@ -1,28 +1,16 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import cmd
|
import cmd
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from app.classes.shared.tasks import TasksManager
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
try:
|
logger = logging.getLogger(__name__)
|
||||||
import requests
|
class MainPrompt(cmd.Cmd):
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class MainPrompt(cmd.Cmd, object):
|
|
||||||
|
|
||||||
def __init__(self, tasks_manager, migration_manager):
|
def __init__(self, tasks_manager, migration_manager):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
@ -30,12 +18,13 @@ class MainPrompt(cmd.Cmd, object):
|
|||||||
self.migration_manager = migration_manager
|
self.migration_manager = migration_manager
|
||||||
|
|
||||||
# overrides the default Prompt
|
# overrides the default Prompt
|
||||||
prompt = "Crafty Controller v{} > ".format(helper.get_version_string())
|
prompt = f"Crafty Controller v{helper.get_version_string()} > "
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def emptyline():
|
def emptyline():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
#pylint: disable=unused-argument
|
||||||
def do_exit(self, line):
|
def do_exit(self, line):
|
||||||
self.tasks_manager._main_graceful_exit()
|
self.tasks_manager._main_graceful_exit()
|
||||||
self.universal_exit()
|
self.universal_exit()
|
||||||
@ -52,9 +41,9 @@ class MainPrompt(cmd.Cmd, object):
|
|||||||
elif line == 'diff':
|
elif line == 'diff':
|
||||||
console.info(self.migration_manager.diff)
|
console.info(self.migration_manager.diff)
|
||||||
elif line == 'info':
|
elif line == 'info':
|
||||||
console.info('Done: {}'.format(self.migration_manager.done))
|
console.info(f'Done: {self.migration_manager.done}')
|
||||||
console.info('FS: {}'.format(self.migration_manager.todo))
|
console.info(f'FS: {self.migration_manager.todo}')
|
||||||
console.info('Todo: {}'.format(self.migration_manager.diff))
|
console.info(f'Todo: {self.migration_manager.diff}')
|
||||||
elif line.startswith('add '):
|
elif line.startswith('add '):
|
||||||
migration_name = line[len('add '):]
|
migration_name = line[len('add '):]
|
||||||
self.migration_manager.create(migration_name, False)
|
self.migration_manager.create(migration_name, False)
|
@ -9,13 +9,12 @@ try:
|
|||||||
from termcolor import colored
|
from termcolor import colored
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
print("Import Error: Unable to load {} module".format(e.name))
|
print(f"Import Error: Unable to load {e.name} module")
|
||||||
from app.classes.shared.installer import installer
|
from app.classes.shared.installer import installer
|
||||||
installer.do_install()
|
installer.do_install()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
class Console:
|
class Console:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -49,28 +48,27 @@ class Console:
|
|||||||
|
|
||||||
def debug(self, message):
|
def debug(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.magenta("[+] Crafty: {} - DEBUG:\t{}".format(dt, message))
|
self.magenta(f"[+] Crafty: {dt} - DEBUG:\t{message}")
|
||||||
|
|
||||||
def info(self, message):
|
def info(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.white("[+] Crafty: {} - INFO:\t{}".format(dt, message))
|
self.white(f"[+] Crafty: {dt} - INFO:\t{message}")
|
||||||
|
|
||||||
def warning(self, message):
|
def warning(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.cyan("[+] Crafty: {} - WARNING:\t{}".format(dt, message))
|
self.cyan(f"[+] Crafty: {dt} - WARNING:\t{message}")
|
||||||
|
|
||||||
def error(self, message):
|
def error(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.yellow("[+] Crafty: {} - ERROR:\t{}".format(dt, message))
|
self.yellow(f"[+] Crafty: {dt} - ERROR:\t{message}")
|
||||||
|
|
||||||
def critical(self, message):
|
def critical(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.red("[+] Crafty: {} - CRITICAL:\t{}".format(dt, message))
|
self.red(f"[+] Crafty: {dt} - CRITICAL:\t{message}")
|
||||||
|
|
||||||
def help(self, message):
|
def help(self, message):
|
||||||
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
dt = datetime.datetime.now().strftime("%Y-%m-%d %I:%M:%S %p")
|
||||||
self.green("[+] Crafty: {} - HELP:\t{}".format(dt, message))
|
self.green(f"[+] Crafty: {dt} - HELP:\t{message}")
|
||||||
|
|
||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
|
@ -14,15 +14,13 @@ import html
|
|||||||
import zipfile
|
import zipfile
|
||||||
import pathlib
|
import pathlib
|
||||||
import shutil
|
import shutil
|
||||||
from requests import get
|
|
||||||
from contextlib import suppress
|
|
||||||
import ctypes
|
import ctypes
|
||||||
import telnetlib
|
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from socket import gethostname
|
from socket import gethostname
|
||||||
|
from contextlib import suppress
|
||||||
|
from requests import get
|
||||||
|
|
||||||
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -32,9 +30,9 @@ try:
|
|||||||
from OpenSSL import crypto
|
from OpenSSL import crypto
|
||||||
from argon2 import PasswordHasher
|
from argon2 import PasswordHasher
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as err:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {err.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {err.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
class Helpers:
|
class Helpers:
|
||||||
@ -68,8 +66,8 @@ class Helpers:
|
|||||||
|
|
||||||
def check_file_perms(self, path):
|
def check_file_perms(self, path):
|
||||||
try:
|
try:
|
||||||
fp = open(path, "r").close()
|
open(path, "r", encoding='utf-8').close()
|
||||||
logger.info("{} is readable".format(path))
|
logger.info(f"{path} is readable")
|
||||||
return True
|
return True
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
return False
|
return False
|
||||||
@ -82,7 +80,7 @@ class Helpers:
|
|||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
logger.error("{} does not exist".format(file))
|
logger.error(f"{file} does not exist")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_servers_root_dir(self):
|
def get_servers_root_dir(self):
|
||||||
@ -93,7 +91,7 @@ class Helpers:
|
|||||||
try:
|
try:
|
||||||
requests.get('https://google.com', timeout=1)
|
requests.get('https://google.com', timeout=1)
|
||||||
return True
|
return True
|
||||||
except Exception as err:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -139,7 +137,7 @@ class Helpers:
|
|||||||
esc = False # whether an escape character was encountered
|
esc = False # whether an escape character was encountered
|
||||||
stch = None # if we're dealing with a quote, save the quote type here. Nested quotes to be dealt with by the command
|
stch = None # if we're dealing with a quote, save the quote type here. Nested quotes to be dealt with by the command
|
||||||
for c in cmd_in: # for character in string
|
for c in cmd_in: # for character in string
|
||||||
if np == True: # if set, begin a new argument and increment the command index. Continue the loop.
|
if np: # if set, begin a new argument and increment the command index. Continue the loop.
|
||||||
if c == ' ':
|
if c == ' ':
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
@ -154,11 +152,13 @@ class Helpers:
|
|||||||
else:
|
else:
|
||||||
if c == '\\': # if the current character is an escape character, set the esc flag and continue to next loop
|
if c == '\\': # if the current character is an escape character, set the esc flag and continue to next loop
|
||||||
esc = True
|
esc = True
|
||||||
elif c == ' ' and stch is None: # if we encounter a space and are not dealing with a quote, set the new argument flag and continue to next loop
|
elif c == ' ' and stch is None: # if we encounter a space and are not dealing with a quote,
|
||||||
|
# set the new argument flag and continue to next loop
|
||||||
np = True
|
np = True
|
||||||
elif c == stch: # if we encounter the character that matches our start quote, end the quote and continue to next loop
|
elif c == stch: # if we encounter the character that matches our start quote, end the quote and continue to next loop
|
||||||
stch = None
|
stch = None
|
||||||
elif stch is None and (c in Helpers.allowed_quotes): # if we're not in the middle of a quote and we get a quotable character, start a quote and proceed to the next loop
|
elif stch is None and (c in Helpers.allowed_quotes): # if we're not in the middle of a quote and we get a quotable character,
|
||||||
|
# start a quote and proceed to the next loop
|
||||||
stch = c
|
stch = c
|
||||||
else: # else, just store the character in the current arg
|
else: # else, just store the character in the current arg
|
||||||
cmd_out[ci] += c
|
cmd_out[ci] += c
|
||||||
@ -167,20 +167,20 @@ class Helpers:
|
|||||||
def get_setting(self, key, default_return=False):
|
def get_setting(self, key, default_return=False):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(self.settings_file, "r") as f:
|
with open(self.settings_file, "r", encoding='utf-8') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
if key in data.keys():
|
if key in data.keys():
|
||||||
return data.get(key)
|
return data.get(key)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.error("Config File Error: setting {} does not exist".format(key))
|
logger.error(f"Config File Error: setting {key} does not exist")
|
||||||
console.error("Config File Error: setting {} does not exist".format(key))
|
console.error(f"Config File Error: setting {key} does not exist")
|
||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical("Config File Error: Unable to read {} due to {}".format(self.settings_file, e))
|
logger.critical(f"Config File Error: Unable to read {self.settings_file} due to {e}")
|
||||||
console.critical("Config File Error: Unable to read {} due to {}".format(self.settings_file, e))
|
console.critical(f"Config File Error: Unable to read {self.settings_file} due to {e}")
|
||||||
|
|
||||||
return default_return
|
return default_return
|
||||||
|
|
||||||
@ -199,11 +199,11 @@ class Helpers:
|
|||||||
def get_version(self):
|
def get_version(self):
|
||||||
version_data = {}
|
version_data = {}
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(self.config_dir, 'version.json'), 'r') as f:
|
with open(os.path.join(self.config_dir, 'version.json'), 'r', encoding='utf-8') as f:
|
||||||
version_data = json.load(f)
|
version_data = json.load(f)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.critical("Unable to get version data!")
|
console.critical(f"Unable to get version data! \n{e}")
|
||||||
|
|
||||||
return version_data
|
return version_data
|
||||||
|
|
||||||
@ -217,7 +217,7 @@ class Helpers:
|
|||||||
try:
|
try:
|
||||||
data = json.loads(r.content)
|
data = json.loads(r.content)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to load json content with error: {} ".format(e))
|
logger.error(f"Failed to load json content with error: {e}")
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -225,11 +225,13 @@ class Helpers:
|
|||||||
def get_version_string(self):
|
def get_version_string(self):
|
||||||
|
|
||||||
version_data = self.get_version()
|
version_data = self.get_version()
|
||||||
|
major = version_data.get('major', '?')
|
||||||
|
minor = version_data.get('minor', '?')
|
||||||
|
sub = version_data.get('sub', '?')
|
||||||
|
meta = version_data.get('meta', '?')
|
||||||
|
|
||||||
# set some defaults if we don't get version_data from our helper
|
# set some defaults if we don't get version_data from our helper
|
||||||
version = "{}.{}.{}-{}".format(version_data.get('major', '?'),
|
version = f"{major}.{minor}.{sub}-{meta}"
|
||||||
version_data.get('minor', '?'),
|
|
||||||
version_data.get('sub', '?'),
|
|
||||||
version_data.get('meta', '?'))
|
|
||||||
return str(version)
|
return str(version)
|
||||||
|
|
||||||
def encode_pass(self, password):
|
def encode_pass(self, password):
|
||||||
@ -240,7 +242,6 @@ class Helpers:
|
|||||||
self.passhasher.verify(currenthash, password)
|
self.passhasher.verify(currenthash, password)
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
pass
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def log_colors(self, line):
|
def log_colors(self, line):
|
||||||
@ -264,6 +265,7 @@ class Helpers:
|
|||||||
|
|
||||||
# highlight users keywords
|
# highlight users keywords
|
||||||
for keyword in user_keywords:
|
for keyword in user_keywords:
|
||||||
|
# pylint: disable=consider-using-f-string
|
||||||
search_replace = (r'({})'.format(keyword), r'<span class="mc-log-keyword">\1</span>')
|
search_replace = (r'({})'.format(keyword), r'<span class="mc-log-keyword">\1</span>')
|
||||||
replacements.append(search_replace)
|
replacements.append(search_replace)
|
||||||
|
|
||||||
@ -274,7 +276,7 @@ class Helpers:
|
|||||||
|
|
||||||
|
|
||||||
def validate_traversal(self, base_path, filename):
|
def validate_traversal(self, base_path, filename):
|
||||||
logger.debug("Validating traversal (\"{x}\", \"{y}\")".format(x=base_path, y=filename))
|
logger.debug(f"Validating traversal (\"{base_path}\", \"{filename}\")")
|
||||||
base = pathlib.Path(base_path).resolve()
|
base = pathlib.Path(base_path).resolve()
|
||||||
file = pathlib.Path(filename)
|
file = pathlib.Path(filename)
|
||||||
fileabs = base.joinpath(file).resolve()
|
fileabs = base.joinpath(file).resolve()
|
||||||
@ -287,8 +289,8 @@ class Helpers:
|
|||||||
|
|
||||||
def tail_file(self, file_name, number_lines=20):
|
def tail_file(self, file_name, number_lines=20):
|
||||||
if not self.check_file_exists(file_name):
|
if not self.check_file_exists(file_name):
|
||||||
logger.warning("Unable to find file to tail: {}".format(file_name))
|
logger.warning(f"Unable to find file to tail: {file_name}")
|
||||||
return ["Unable to find file to tail: {}".format(file_name)]
|
return [f"Unable to find file to tail: {file_name}"]
|
||||||
|
|
||||||
# length of lines is X char here
|
# length of lines is X char here
|
||||||
avg_line_length = 255
|
avg_line_length = 255
|
||||||
@ -297,7 +299,7 @@ class Helpers:
|
|||||||
line_buffer = number_lines * avg_line_length
|
line_buffer = number_lines * avg_line_length
|
||||||
|
|
||||||
# open our file
|
# open our file
|
||||||
with open(file_name, 'r') as f:
|
with open(file_name, 'r', encoding='utf-8') as f:
|
||||||
|
|
||||||
# seek
|
# seek
|
||||||
f.seek(0, 2)
|
f.seek(0, 2)
|
||||||
@ -313,8 +315,7 @@ class Helpers:
|
|||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning('Unable to read a line in the file:{} - due to error: {}'.format(file_name, e))
|
logger.warning(f'Unable to read a line in the file:{file_name} - due to error: {e}')
|
||||||
pass
|
|
||||||
|
|
||||||
# now we are done getting the lines, let's return it
|
# now we are done getting the lines, let's return it
|
||||||
return lines
|
return lines
|
||||||
@ -323,14 +324,14 @@ class Helpers:
|
|||||||
def check_writeable(path: str):
|
def check_writeable(path: str):
|
||||||
filename = os.path.join(path, "tempfile.txt")
|
filename = os.path.join(path, "tempfile.txt")
|
||||||
try:
|
try:
|
||||||
fp = open(filename, "w").close()
|
open(filename, "w", encoding='utf-8').close()
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
logger.info("{} is writable".format(filename))
|
logger.info(f"{filename} is writable")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.critical("Unable to write to {} - Error: {}".format(path, e))
|
logger.critical(f"Unable to write to {path} - Error: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def checkRoot(self):
|
def checkRoot(self):
|
||||||
@ -360,24 +361,17 @@ class Helpers:
|
|||||||
try:
|
try:
|
||||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||||
zip_ref.extractall(tempDir)
|
zip_ref.extractall(tempDir)
|
||||||
for i in range(len(zip_ref.filelist)):
|
for i in enumerate(zip_ref.filelist):
|
||||||
if len(zip_ref.filelist) > 1 or not zip_ref.filelist[i].filename.endswith('/'):
|
if len(zip_ref.filelist) > 1 or not zip_ref.filelist[i].filename.endswith('/'):
|
||||||
test = zip_ref.filelist[i].filename
|
|
||||||
break
|
break
|
||||||
path_list = test.split('/')
|
|
||||||
root_path = path_list[0]
|
|
||||||
'''
|
|
||||||
if len(path_list) > 1:
|
|
||||||
for i in range(len(path_list) - 2):
|
|
||||||
root_path = os.path.join(root_path, path_list[i + 1])
|
|
||||||
'''
|
|
||||||
full_root_path = tempDir
|
full_root_path = tempDir
|
||||||
|
|
||||||
for item in os.listdir(full_root_path):
|
for item in os.listdir(full_root_path):
|
||||||
try:
|
try:
|
||||||
shutil.move(os.path.join(full_root_path, item), os.path.join(new_dir, item))
|
shutil.move(os.path.join(full_root_path, item), os.path.join(new_dir, item))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error('ERROR IN ZIP IMPORT: {}'.format(ex))
|
logger.error(f'ERROR IN ZIP IMPORT: {ex}')
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print(ex)
|
print(ex)
|
||||||
else:
|
else:
|
||||||
@ -394,7 +388,7 @@ class Helpers:
|
|||||||
|
|
||||||
# if not writeable, let's bomb out
|
# if not writeable, let's bomb out
|
||||||
if not writeable:
|
if not writeable:
|
||||||
logger.critical("Unable to write to {} directory!".format(self.root_dir))
|
logger.critical(f"Unable to write to {self.root_dir} directory!")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# ensure the log directory is there
|
# ensure the log directory is there
|
||||||
@ -402,20 +396,20 @@ class Helpers:
|
|||||||
with suppress(FileExistsError):
|
with suppress(FileExistsError):
|
||||||
os.makedirs(os.path.join(self.root_dir, 'logs'))
|
os.makedirs(os.path.join(self.root_dir, 'logs'))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error("Failed to make logs directory with error: {} ".format(e))
|
console.error(f"Failed to make logs directory with error: {e} ")
|
||||||
|
|
||||||
# ensure the log file is there
|
# ensure the log file is there
|
||||||
try:
|
try:
|
||||||
open(log_file, 'a').close()
|
open(log_file, 'a', encoding='utf-8').close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.critical("Unable to open log file!")
|
console.critical(f"Unable to open log file! {e}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# del any old session.lock file as this is a new session
|
# del any old session.lock file as this is a new session
|
||||||
try:
|
try:
|
||||||
os.remove(session_log_file)
|
os.remove(session_log_file)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Deleting Session.lock failed with error: {} ".format(e))
|
logger.error(f"Deleting Session.lock failed with error: {e}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_time_as_string():
|
def get_time_as_string():
|
||||||
@ -424,10 +418,10 @@ class Helpers:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_file_exists(path: str):
|
def check_file_exists(path: str):
|
||||||
logger.debug('Looking for path: {}'.format(path))
|
logger.debug(f'Looking for path: {path}')
|
||||||
|
|
||||||
if os.path.exists(path) and os.path.isfile(path):
|
if os.path.exists(path) and os.path.isfile(path):
|
||||||
logger.debug('Found path: {}'.format(path))
|
logger.debug(f'Found path: {path}')
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -436,18 +430,20 @@ class Helpers:
|
|||||||
def human_readable_file_size(num: int, suffix='B'):
|
def human_readable_file_size(num: int, suffix='B'):
|
||||||
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
|
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
|
||||||
if abs(num) < 1024.0:
|
if abs(num) < 1024.0:
|
||||||
|
# pylint: disable=consider-using-f-string
|
||||||
return "%3.1f%s%s" % (num, unit, suffix)
|
return "%3.1f%s%s" % (num, unit, suffix)
|
||||||
num /= 1024.0
|
num /= 1024.0
|
||||||
|
# pylint: disable=consider-using-f-string
|
||||||
return "%.1f%s%s" % (num, 'Y', suffix)
|
return "%.1f%s%s" % (num, 'Y', suffix)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_path_exists(path: str):
|
def check_path_exists(path: str):
|
||||||
if not path:
|
if not path:
|
||||||
return False
|
return False
|
||||||
logger.debug('Looking for path: {}'.format(path))
|
logger.debug(f'Looking for path: {path}')
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
logger.debug('Found path: {}'.format(path))
|
logger.debug(f'Found path: {path}')
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -459,17 +455,17 @@ class Helpers:
|
|||||||
|
|
||||||
if os.path.exists(path) and os.path.isfile(path):
|
if os.path.exists(path) and os.path.isfile(path):
|
||||||
try:
|
try:
|
||||||
with open(path, 'r') as f:
|
with open(path, 'r', encoding='utf-8') as f:
|
||||||
for line in (f.readlines() [-lines:]):
|
for line in (f.readlines() [-lines:]):
|
||||||
contents = contents + line
|
contents = contents + line
|
||||||
|
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to read file: {}. \n Error: ".format(path, e))
|
logger.error(f"Unable to read file: {path}. \n Error: {e}")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.error("Unable to read file: {}. File not found, or isn't a file.".format(path))
|
logger.error(f"Unable to read file: {path}. File not found, or isn't a file.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_session_file(self, ignore=False):
|
def create_session_file(self, ignore=False):
|
||||||
@ -484,11 +480,10 @@ class Helpers:
|
|||||||
data = json.loads(file_data)
|
data = json.loads(file_data)
|
||||||
pid = data.get('pid')
|
pid = data.get('pid')
|
||||||
started = data.get('started')
|
started = data.get('started')
|
||||||
console.critical("Another Crafty Controller agent seems to be running...\npid: {} \nstarted on: {}".format(pid, started))
|
console.critical(f"Another Crafty Controller agent seems to be running...\npid: {pid} \nstarted on: {started}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to locate existing session.lock with error: {} ".format(e))
|
logger.error(f"Failed to locate existing session.lock with error: {e} ")
|
||||||
console.error("Failed to locate existing session.lock with error: {} ".format(e))
|
console.error(f"Failed to locate existing session.lock with error: {e} ")
|
||||||
|
|
||||||
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -499,7 +494,7 @@ class Helpers:
|
|||||||
'pid': pid,
|
'pid': pid,
|
||||||
'started': now.strftime("%d-%m-%Y, %H:%M:%S")
|
'started': now.strftime("%d-%m-%Y, %H:%M:%S")
|
||||||
}
|
}
|
||||||
with open(self.session_file, 'w') as f:
|
with open(self.session_file, 'w', encoding='utf-8') as f:
|
||||||
json.dump(session_data, f, indent=True)
|
json.dump(session_data, f, indent=True)
|
||||||
|
|
||||||
# because this is a recursive function, we will return bytes, and set human readable later
|
# because this is a recursive function, we will return bytes, and set human readable later
|
||||||
@ -526,14 +521,14 @@ class Helpers:
|
|||||||
return sizes
|
return sizes
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def base64_encode_string(string: str):
|
def base64_encode_string(fun_str: str):
|
||||||
s_bytes = str(string).encode('utf-8')
|
s_bytes = str(fun_str).encode('utf-8')
|
||||||
b64_bytes = base64.encodebytes(s_bytes)
|
b64_bytes = base64.encodebytes(s_bytes)
|
||||||
return b64_bytes.decode('utf-8')
|
return b64_bytes.decode('utf-8')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def base64_decode_string(string: str):
|
def base64_decode_string(fun_str: str):
|
||||||
s_bytes = str(string).encode('utf-8')
|
s_bytes = str(fun_str).encode('utf-8')
|
||||||
b64_bytes = base64.decodebytes(s_bytes)
|
b64_bytes = base64.decodebytes(s_bytes)
|
||||||
return b64_bytes.decode("utf-8")
|
return b64_bytes.decode("utf-8")
|
||||||
|
|
||||||
@ -553,7 +548,7 @@ class Helpers:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(path)
|
os.makedirs(path)
|
||||||
logger.debug("Created Directory : {}".format(path))
|
logger.debug(f"Created Directory : {path}")
|
||||||
|
|
||||||
# directory already exists - non-blocking error
|
# directory already exists - non-blocking error
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
@ -570,8 +565,8 @@ class Helpers:
|
|||||||
cert_file = os.path.join(cert_dir, 'commander.cert.pem')
|
cert_file = os.path.join(cert_dir, 'commander.cert.pem')
|
||||||
key_file = os.path.join(cert_dir, 'commander.key.pem')
|
key_file = os.path.join(cert_dir, 'commander.key.pem')
|
||||||
|
|
||||||
logger.info("SSL Cert File is set to: {}".format(cert_file))
|
logger.info(f"SSL Cert File is set to: {cert_file}")
|
||||||
logger.info("SSL Key File is set to: {}".format(key_file))
|
logger.info(f"SSL Key File is set to: {key_file}")
|
||||||
|
|
||||||
# don't create new files if we already have them.
|
# don't create new files if we already have them.
|
||||||
if self.check_file_exists(cert_file) and self.check_file_exists(key_file):
|
if self.check_file_exists(cert_file) and self.check_file_exists(key_file):
|
||||||
@ -602,11 +597,11 @@ class Helpers:
|
|||||||
cert.set_pubkey(k)
|
cert.set_pubkey(k)
|
||||||
cert.sign(k, 'sha256')
|
cert.sign(k, 'sha256')
|
||||||
|
|
||||||
f = open(cert_file, "w")
|
f = open(cert_file, "w", encoding='utf-8')
|
||||||
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode())
|
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode())
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
f = open(key_file, "w")
|
f = open(key_file, "w", encoding='utf-8')
|
||||||
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode())
|
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode())
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
@ -645,7 +640,7 @@ class Helpers:
|
|||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
if self.check_file_exists(default_file):
|
if self.check_file_exists(default_file):
|
||||||
with open(default_file, 'r') as f:
|
with open(default_file, 'r', encoding='utf-8') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
del_json = helper.get_setting('delete_default_json')
|
del_json = helper.get_setting('delete_default_json')
|
||||||
@ -662,25 +657,26 @@ class Helpers:
|
|||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += \
|
||||||
"""<li class="tree-item" data-path="{}">
|
f"""<li class="tree-item" data-path="{dpath}">
|
||||||
\n<div id="{}" data-path="{}" data-name="{}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<span id="{}span" class="files-tree-title" data-path="{}" data-name="{}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</div><li>
|
</div><li>
|
||||||
\n"""\
|
\n"""\
|
||||||
.format(os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, os.path.join(folder, filename), os.path.join(folder, filename), filename, filename)
|
|
||||||
else:
|
else:
|
||||||
if filename != "crafty_managed.txt":
|
if filename != "crafty_managed.txt":
|
||||||
output += """<li
|
output += f"""<li
|
||||||
class="tree-item tree-ctx-item tree-file"
|
class="tree-item tree-ctx-item tree-file"
|
||||||
data-path="{}"
|
data-path="{dpath}"
|
||||||
data-name="{}"
|
data-name="{filename}"
|
||||||
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{}</li>""".format(os.path.join(folder, filename), filename, filename)
|
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{filename}</li>"""
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -688,29 +684,30 @@ class Helpers:
|
|||||||
file_list = os.listdir(folder)
|
file_list = os.listdir(folder)
|
||||||
file_list = sorted(file_list, key=str.casefold)
|
file_list = sorted(file_list, key=str.casefold)
|
||||||
output += \
|
output += \
|
||||||
"""<ul class="tree-nested d-block" id="{}ul">"""\
|
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
||||||
.format(folder)
|
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
|
dpath = os.path.join(folder, filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += \
|
||||||
"""<li class="tree-item" data-path="{}">
|
f"""<li class="tree-item" data-path="{dpath}">
|
||||||
\n<div id="{}" data-path="{}" data-name="{}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<span id="{}span" class="files-tree-title" data-path="{}" data-name="{}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</div><li>"""\
|
</div><li>"""\
|
||||||
.format(os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, os.path.join(folder, filename), os.path.join(folder, filename), filename, filename)
|
|
||||||
else:
|
else:
|
||||||
if filename != "crafty_managed.txt":
|
if filename != "crafty_managed.txt":
|
||||||
output += """<li
|
output += f"""<li
|
||||||
class="tree-item tree-ctx-item tree-file"
|
class="tree-item tree-ctx-item tree-file"
|
||||||
data-path="{}"
|
data-path="{dpath}"
|
||||||
data-name="{}"
|
data-name="{filename}"
|
||||||
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{}</li>""".format(os.path.join(folder, filename), filename, filename)
|
onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>{filename}</li>"""
|
||||||
output += '</ul>\n'
|
output += '</ul>\n'
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -719,24 +716,25 @@ class Helpers:
|
|||||||
file_list = os.listdir(folder)
|
file_list = os.listdir(folder)
|
||||||
file_list = sorted(file_list, key=str.casefold)
|
file_list = sorted(file_list, key=str.casefold)
|
||||||
output += \
|
output += \
|
||||||
"""<ul class="tree-nested d-block" id="{}ul">"""\
|
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
||||||
.format(folder)
|
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += \
|
||||||
"""<li class="tree-item" data-path="{}">
|
f"""<li class="tree-item" data-path="{dpath}">
|
||||||
\n<div id="{}" data-path="{}" data-name="{}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="radio" name="root_path" value="{}">
|
<input type="radio" name="root_path" value="{dpath}">
|
||||||
<span id="{}span" class="files-tree-title" data-path="{}" data-name="{}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>
|
</input></div><li>
|
||||||
\n"""\
|
\n"""\
|
||||||
.format(os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, filename)
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -744,23 +742,24 @@ class Helpers:
|
|||||||
file_list = os.listdir(folder)
|
file_list = os.listdir(folder)
|
||||||
file_list = sorted(file_list, key=str.casefold)
|
file_list = sorted(file_list, key=str.casefold)
|
||||||
output += \
|
output += \
|
||||||
"""<ul class="tree-nested d-block" id="{}ul">"""\
|
f"""<ul class="tree-nested d-block" id="{folder}ul">"""\
|
||||||
.format(folder)
|
|
||||||
for raw_filename in file_list:
|
for raw_filename in file_list:
|
||||||
filename = html.escape(raw_filename)
|
filename = html.escape(raw_filename)
|
||||||
rel = os.path.join(folder, raw_filename)
|
rel = os.path.join(folder, raw_filename)
|
||||||
|
dpath = os.path.join(folder, filename)
|
||||||
if os.path.isdir(rel):
|
if os.path.isdir(rel):
|
||||||
output += \
|
output += \
|
||||||
"""<li class="tree-item" data-path="{}">
|
f"""<li class="tree-item" data-path="{dpath}">
|
||||||
\n<div id="{}" data-path="{}" data-name="{}" class="tree-caret tree-ctx-item tree-folder">
|
\n<div id="{dpath}" data-path="{dpath}" data-name="{filename}" class="tree-caret tree-ctx-item tree-folder">
|
||||||
<input type="radio" name="root_path" value="{}">
|
<input type="radio" name="root_path" value="{dpath}">
|
||||||
<span id="{}span" class="files-tree-title" data-path="{}" data-name="{}" onclick="getDirView(event)">
|
<span id="{dpath}span" class="files-tree-title" data-path="{dpath}" data-name="{filename}" onclick="getDirView(event)">
|
||||||
<i class="far fa-folder"></i>
|
<i class="far fa-folder"></i>
|
||||||
<i class="far fa-folder-open"></i>
|
<i class="far fa-folder-open"></i>
|
||||||
{}
|
{filename}
|
||||||
</span>
|
</span>
|
||||||
</input></div><li>"""\
|
</input></div><li>"""\
|
||||||
.format(os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, os.path.join(folder, filename), os.path.join(folder, filename), os.path.join(folder, filename), filename, filename)
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -774,7 +773,6 @@ class Helpers:
|
|||||||
websocket_helper.broadcast_user(user_id, 'send_temp_path',{
|
websocket_helper.broadcast_user(user_id, 'send_temp_path',{
|
||||||
'path': tempDir
|
'path': tempDir
|
||||||
})
|
})
|
||||||
return
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def unzip_backup_archive(backup_path, zip_name):
|
def unzip_backup_archive(backup_path, zip_name):
|
||||||
@ -794,7 +792,9 @@ class Helpers:
|
|||||||
parent_path = os.path.abspath(parent_path)
|
parent_path = os.path.abspath(parent_path)
|
||||||
child_path = os.path.abspath(child_path)
|
child_path = os.path.abspath(child_path)
|
||||||
|
|
||||||
# Compare the common path of the parent and child path with the common path of just the parent path. Using the commonpath method on just the parent path will regularise the path name in the same way as the comparison that deals with both paths, removing any trailing path separator
|
# Compare the common path of the parent and child path with the common path of just the parent path.
|
||||||
|
# Using the commonpath method on just the parent path will regularise the path name in the same way
|
||||||
|
# as the comparison that deals with both paths, removing any trailing path separator
|
||||||
return os.path.commonpath([parent_path]) == os.path.commonpath([parent_path, child_path])
|
return os.path.commonpath([parent_path]) == os.path.commonpath([parent_path, child_path])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
class install:
|
class install:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -21,5 +20,4 @@ class install:
|
|||||||
print("Crafty has installed it's dependencies, please restart Crafty")
|
print("Crafty has installed it's dependencies, please restart Crafty")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
installer = install()
|
installer = install()
|
@ -2,34 +2,12 @@ import os
|
|||||||
import pathlib
|
import pathlib
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
|
||||||
from app.classes.models.users import helper_users
|
|
||||||
from peewee import DoesNotExist
|
|
||||||
import yaml
|
|
||||||
import asyncio
|
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
|
||||||
from distutils import dir_util
|
from distutils import dir_util
|
||||||
from app.classes.models.management import helpers_management
|
from typing import Union
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
from peewee import DoesNotExist
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
# Importing Models
|
|
||||||
from app.classes.models.servers import servers_helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.server import Server
|
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
|
||||||
from app.classes.minecraft.stats import Stats
|
|
||||||
|
|
||||||
# Importing Controllers
|
|
||||||
from app.classes.controllers.crafty_perms_controller import Crafty_Perms_Controller
|
from app.classes.controllers.crafty_perms_controller import Crafty_Perms_Controller
|
||||||
from app.classes.controllers.management_controller import Management_Controller
|
from app.classes.controllers.management_controller import Management_Controller
|
||||||
from app.classes.controllers.users_controller import Users_Controller
|
from app.classes.controllers.users_controller import Users_Controller
|
||||||
@ -37,6 +15,21 @@ from app.classes.controllers.roles_controller import Roles_Controller
|
|||||||
from app.classes.controllers.server_perms_controller import Server_Perms_Controller
|
from app.classes.controllers.server_perms_controller import Server_Perms_Controller
|
||||||
from app.classes.controllers.servers_controller import Servers_Controller
|
from app.classes.controllers.servers_controller import Servers_Controller
|
||||||
|
|
||||||
|
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||||
|
from app.classes.models.users import helper_users
|
||||||
|
from app.classes.models.management import helpers_management
|
||||||
|
from app.classes.models.servers import servers_helper
|
||||||
|
|
||||||
|
from app.classes.shared.console import console
|
||||||
|
from app.classes.shared.helpers import helper
|
||||||
|
from app.classes.shared.server import Server
|
||||||
|
|
||||||
|
from app.classes.minecraft.server_props import ServerProps
|
||||||
|
from app.classes.minecraft.serverjars import server_jar_obj
|
||||||
|
from app.classes.minecraft.stats import Stats
|
||||||
|
|
||||||
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Controller:
|
class Controller:
|
||||||
@ -53,7 +46,7 @@ class Controller:
|
|||||||
|
|
||||||
def check_server_loaded(self, server_id_to_check: int):
|
def check_server_loaded(self, server_id_to_check: int):
|
||||||
|
|
||||||
logger.info("Checking to see if we already registered {}".format(server_id_to_check))
|
logger.info(f"Checking to see if we already registered {server_id_to_check}")
|
||||||
|
|
||||||
for s in self.servers_list:
|
for s in self.servers_list:
|
||||||
known_server = s.get('server_id')
|
known_server = s.get('server_id')
|
||||||
@ -61,7 +54,7 @@ class Controller:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if known_server == server_id_to_check:
|
if known_server == server_id_to_check:
|
||||||
logger.info('skipping initialization of server {} because it is already loaded'.format(server_id_to_check))
|
logger.info(f'skipping initialization of server {server_id_to_check} because it is already loaded')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@ -79,19 +72,17 @@ class Controller:
|
|||||||
|
|
||||||
# if this server path no longer exists - let's warn and bomb out
|
# if this server path no longer exists - let's warn and bomb out
|
||||||
if not helper.check_path_exists(helper.get_os_understandable_path(s['path'])):
|
if not helper.check_path_exists(helper.get_os_understandable_path(s['path'])):
|
||||||
logger.warning("Unable to find server {} at path {}. Skipping this server".format(s['server_name'],
|
logger.warning(f"Unable to find server {s['server_name']} at path {s['path']}. Skipping this server")
|
||||||
s['path']))
|
|
||||||
|
|
||||||
console.warning("Unable to find server {} at path {}. Skipping this server".format(s['server_name'],
|
console.warning(f"Unable to find server {s['server_name']} at path {s['path']}. Skipping this server")
|
||||||
s['path']))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
settings_file = os.path.join(helper.get_os_understandable_path(s['path']), 'server.properties')
|
settings_file = os.path.join(helper.get_os_understandable_path(s['path']), 'server.properties')
|
||||||
|
|
||||||
# if the properties file isn't there, let's warn
|
# if the properties file isn't there, let's warn
|
||||||
if not helper.check_file_exists(settings_file):
|
if not helper.check_file_exists(settings_file):
|
||||||
logger.error("Unable to find {}. Skipping this server.".format(settings_file))
|
logger.error(f"Unable to find {settings_file}. Skipping this server.")
|
||||||
console.error("Unable to find {}. Skipping this server.".format(settings_file))
|
console.error(f"Unable to find {settings_file}. Skipping this server.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
settings = ServerProps(settings_file)
|
settings = ServerProps(settings_file)
|
||||||
@ -114,12 +105,10 @@ class Controller:
|
|||||||
|
|
||||||
self.refresh_server_settings(s['server_id'])
|
self.refresh_server_settings(s['server_id'])
|
||||||
|
|
||||||
console.info("Loaded Server: ID {} | Name: {} | Autostart: {} | Delay: {} ".format(
|
console.info(f"Loaded Server: ID {s['server_id']}" +
|
||||||
s['server_id'],
|
f" | Name: {s['server_name']}" +
|
||||||
s['server_name'],
|
f" | Autostart: {s['auto_start']}" +
|
||||||
s['auto_start'],
|
f" | Delay: {s['auto_start_delay']} ")
|
||||||
s['auto_start_delay']
|
|
||||||
))
|
|
||||||
|
|
||||||
def refresh_server_settings(self, server_id: int):
|
def refresh_server_settings(self, server_id: int):
|
||||||
server_obj = self.get_server_obj(server_id)
|
server_obj = self.get_server_obj(server_id)
|
||||||
@ -155,10 +144,10 @@ class Controller:
|
|||||||
user_servers = self.servers.get_authorized_servers(int(exec_user['user_id']))
|
user_servers = self.servers.get_authorized_servers(int(exec_user['user_id']))
|
||||||
auth_servers = []
|
auth_servers = []
|
||||||
for server in user_servers:
|
for server in user_servers:
|
||||||
if Enum_Permissions_Server.Logs in self.server_perms.get_user_permissions_list(exec_user['user_id'], server["server_id"]):
|
if Enum_Permissions_Server.Logs in self.server_perms.get_user_id_permissions_list(exec_user['user_id'], server["server_id"]):
|
||||||
auth_servers.append(server)
|
auth_servers.append(server)
|
||||||
else:
|
else:
|
||||||
logger.info("Logs permission not available for server {}. Skipping.".format(server["server_name"]))
|
logger.info(f"Logs permission not available for server {server['server_name']}. Skipping.")
|
||||||
#we'll iterate through our list of log paths from auth servers.
|
#we'll iterate through our list of log paths from auth servers.
|
||||||
for server in auth_servers:
|
for server in auth_servers:
|
||||||
final_path = os.path.join(server_path, str(server['server_name']))
|
final_path = os.path.join(server_path, str(server['server_name']))
|
||||||
@ -166,7 +155,7 @@ class Controller:
|
|||||||
try:
|
try:
|
||||||
shutil.copy(server['log_path'], final_path)
|
shutil.copy(server['log_path'], final_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Failed to copy file with error: {}".format(e))
|
logger.warning(f"Failed to copy file with error: {e}")
|
||||||
#Copy crafty logs to archive dir
|
#Copy crafty logs to archive dir
|
||||||
full_log_name = os.path.join(crafty_path, 'logs')
|
full_log_name = os.path.join(crafty_path, 'logs')
|
||||||
shutil.copytree(os.path.join(self.project_root, 'logs'), full_log_name)
|
shutil.copytree(os.path.join(self.project_root, 'logs'), full_log_name)
|
||||||
@ -187,7 +176,7 @@ class Controller:
|
|||||||
if int(s['server_id']) == int(server_id):
|
if int(s['server_id']) == int(server_id):
|
||||||
return s['server_settings']
|
return s['server_settings']
|
||||||
|
|
||||||
logger.warning("Unable to find server object for server id {}".format(server_id))
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_server_obj(self, server_id: Union[str, int]) -> Union[bool, Server]:
|
def get_server_obj(self, server_id: Union[str, int]) -> Union[bool, Server]:
|
||||||
@ -195,7 +184,7 @@ class Controller:
|
|||||||
if str(s['server_id']) == str(server_id):
|
if str(s['server_id']) == str(server_id):
|
||||||
return s['server_obj']
|
return s['server_obj']
|
||||||
|
|
||||||
logger.warning("Unable to find server object for server id {}".format(server_id))
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False # TODO: Change to None
|
return False # TODO: Change to None
|
||||||
|
|
||||||
def get_server_data(self, server_id: str):
|
def get_server_data(self, server_id: str):
|
||||||
@ -203,7 +192,7 @@ class Controller:
|
|||||||
if str(s['server_id']) == str(server_id):
|
if str(s['server_id']) == str(server_id):
|
||||||
return s['server_data_obj']
|
return s['server_data_obj']
|
||||||
|
|
||||||
logger.warning("Unable to find server object for server id {}".format(server_id))
|
logger.warning(f"Unable to find server object for server id {server_id}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -231,15 +220,15 @@ class Controller:
|
|||||||
|
|
||||||
def stop_all_servers(self):
|
def stop_all_servers(self):
|
||||||
servers = self.list_running_servers()
|
servers = self.list_running_servers()
|
||||||
logger.info("Found {} running server(s)".format(len(servers)))
|
logger.info(f"Found {len(servers)} running server(s)")
|
||||||
console.info("Found {} running server(s)".format(len(servers)))
|
console.info(f"Found {len(servers)} running server(s)")
|
||||||
|
|
||||||
logger.info("Stopping All Servers")
|
logger.info("Stopping All Servers")
|
||||||
console.info("Stopping All Servers")
|
console.info("Stopping All Servers")
|
||||||
|
|
||||||
for s in servers:
|
for s in servers:
|
||||||
logger.info("Stopping Server ID {} - {}".format(s['id'], s['name']))
|
logger.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
||||||
console.info("Stopping Server ID {} - {}".format(s['id'], s['name']))
|
console.info(f"Stopping Server ID {s['id']} - {s['name']}")
|
||||||
|
|
||||||
self.stop_server(s['id'])
|
self.stop_server(s['id'])
|
||||||
|
|
||||||
@ -264,7 +253,7 @@ class Controller:
|
|||||||
server_dir.replace(' ', '^ ')
|
server_dir.replace(' ', '^ ')
|
||||||
backup_path.replace(' ', '^ ')
|
backup_path.replace(' ', '^ ')
|
||||||
|
|
||||||
server_file = "{server}-{version}.jar".format(server=server, version=version)
|
server_file = f"{server}-{version}.jar"
|
||||||
full_jar_path = os.path.join(server_dir, server_file)
|
full_jar_path = os.path.join(server_dir, server_file)
|
||||||
|
|
||||||
# make the dir - perhaps a UUID?
|
# make the dir - perhaps a UUID?
|
||||||
@ -273,32 +262,29 @@ class Controller:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# do a eula.txt
|
# do a eula.txt
|
||||||
with open(os.path.join(server_dir, "eula.txt"), 'w') as f:
|
with open(os.path.join(server_dir, "eula.txt"), 'w', encoding='utf-8') as f:
|
||||||
f.write("eula=false")
|
f.write("eula=false")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
# setup server.properties with the port
|
# setup server.properties with the port
|
||||||
with open(os.path.join(server_dir, "server.properties"), "w") as f:
|
with open(os.path.join(server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
||||||
f.write("server-port={}".format(port))
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to create required server files due to :{}".format(e))
|
logger.error(f"Unable to create required server files due to :{e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
#must remain non-fstring due to string addtion
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar "{}" nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
||||||
helper.float_to_string(max_mem),
|
|
||||||
full_jar_path)
|
|
||||||
else:
|
else:
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
||||||
helper.float_to_string(max_mem),
|
server_log_file = f"{server_dir}/logs/latest.log"
|
||||||
full_jar_path)
|
|
||||||
server_log_file = "{}/logs/latest.log".format(server_dir)
|
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
# download the jar
|
# download the jar
|
||||||
server_jar_obj.download_jar(server, version, full_jar_path, name)
|
server_jar_obj.download_jar(server, version, full_jar_path)
|
||||||
|
|
||||||
new_id = self.register_server(name, server_id, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, port)
|
new_id = self.register_server(name, server_id, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, port)
|
||||||
return new_id
|
return new_id
|
||||||
@ -340,22 +326,19 @@ class Controller:
|
|||||||
if str(item) == 'server.properties':
|
if str(item) == 'server.properties':
|
||||||
has_properties = True
|
has_properties = True
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info("No server.properties found on zip file import. Creating one with port selection of {}".format(str(port)))
|
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w") as f:
|
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
||||||
f.write("server-port={}".format(port))
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_jar)
|
full_jar_path = os.path.join(new_server_dir, server_jar)
|
||||||
|
|
||||||
|
#due to adding strings this must not be an fstring
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
||||||
helper.float_to_string(max_mem),
|
|
||||||
'"'+full_jar_path+'"')
|
|
||||||
else:
|
else:
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
||||||
helper.float_to_string(max_mem),
|
server_log_file = f"{new_server_dir}/logs/latest.log"
|
||||||
full_jar_path)
|
|
||||||
server_log_file = "{}/logs/latest.log".format(new_server_dir)
|
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
||||||
@ -383,25 +366,22 @@ class Controller:
|
|||||||
try:
|
try:
|
||||||
shutil.move(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
shutil.move(os.path.join(tempDir, item), os.path.join(new_server_dir, item))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error('ERROR IN ZIP IMPORT: {}'.format(ex))
|
logger.error(f'ERROR IN ZIP IMPORT: {ex}')
|
||||||
if not has_properties:
|
if not has_properties:
|
||||||
logger.info("No server.properties found on zip file import. Creating one with port selection of {}".format(str(port)))
|
logger.info(f"No server.properties found on zip file import. Creating one with port selection of {str(port)}")
|
||||||
with open(os.path.join(new_server_dir, "server.properties"), "w") as f:
|
with open(os.path.join(new_server_dir, "server.properties"), "w", encoding='utf-8') as f:
|
||||||
f.write("server-port={}".format(port))
|
f.write(f"server-port={port}")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
full_jar_path = os.path.join(new_server_dir, server_jar)
|
full_jar_path = os.path.join(new_server_dir, server_jar)
|
||||||
|
|
||||||
|
#due to strings being added we need to leave this as not an fstring
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar "{full_jar_path}" nogui'
|
||||||
helper.float_to_string(max_mem),
|
|
||||||
'"'+full_jar_path+'"')
|
|
||||||
else:
|
else:
|
||||||
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
|
server_command = f'java -Xms{helper.float_to_string(min_mem)}M -Xmx{helper.float_to_string(max_mem)}M -jar {full_jar_path} nogui'
|
||||||
helper.float_to_string(max_mem),
|
|
||||||
full_jar_path)
|
|
||||||
logger.debug('command: ' + server_command)
|
logger.debug('command: ' + server_command)
|
||||||
server_log_file = "{}/logs/latest.log".format(new_server_dir)
|
server_log_file = f"{new_server_dir}/logs/latest.log"
|
||||||
server_stop = "stop"
|
server_stop = "stop"
|
||||||
|
|
||||||
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
new_id = self.register_server(server_name, server_id, new_server_dir, backup_path, server_command, server_jar,
|
||||||
@ -424,20 +404,30 @@ class Controller:
|
|||||||
os.rmdir(new_bu_path)
|
os.rmdir(new_bu_path)
|
||||||
backup_path.rename(new_bu_path)
|
backup_path.rename(new_bu_path)
|
||||||
|
|
||||||
def register_server(self, name: str, server_uuid: str, server_dir: str, backup_path: str, server_command: str, server_file: str, server_log_file: str, server_stop: str, server_port: int):
|
def register_server(self, name: str,
|
||||||
|
server_uuid: str,
|
||||||
|
server_dir: str,
|
||||||
|
backup_path: str,
|
||||||
|
server_command: str,
|
||||||
|
server_file: str,
|
||||||
|
server_log_file: str,
|
||||||
|
server_stop: str,
|
||||||
|
server_port: int):
|
||||||
# put data in the db
|
# put data in the db
|
||||||
|
|
||||||
new_id = self.servers.create_server(name, server_uuid, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, server_port)
|
new_id = self.servers.create_server(
|
||||||
|
name, server_uuid, server_dir, backup_path, server_command, server_file, server_log_file, server_stop, server_port)
|
||||||
|
|
||||||
if not helper.check_file_exists(os.path.join(server_dir, "crafty_managed.txt")):
|
if not helper.check_file_exists(os.path.join(server_dir, "crafty_managed.txt")):
|
||||||
try:
|
try:
|
||||||
# place a file in the dir saying it's owned by crafty
|
# place a file in the dir saying it's owned by crafty
|
||||||
with open(os.path.join(server_dir, "crafty_managed.txt"), 'w') as f:
|
with open(os.path.join(server_dir, "crafty_managed.txt"), 'w', encoding='utf-8') as f:
|
||||||
f.write(
|
f.write(
|
||||||
"The server is managed by Crafty Controller.\n Leave this directory/files alone please")
|
"The server is managed by Crafty Controller.\n Leave this directory/files alone please")
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to create required server files due to :{}".format(e))
|
logger.error(f"Unable to create required server files due to :{e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# let's re-init all servers
|
# let's re-init all servers
|
||||||
@ -453,10 +443,9 @@ class Controller:
|
|||||||
if str(s['server_id']) == str(server_id):
|
if str(s['server_id']) == str(server_id):
|
||||||
server_data = self.get_server_data(server_id)
|
server_data = self.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data['server_name']
|
||||||
backup_dir = self.servers.get_server_data_by_id(server_id)['backup_path']
|
|
||||||
|
|
||||||
logger.info("Deleting Server: ID {} | Name: {} ".format(server_id, server_name))
|
logger.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
||||||
console.info("Deleting Server: ID {} | Name: {} ".format(server_id, server_name))
|
console.info(f"Deleting Server: ID {server_id} | Name: {server_name} ")
|
||||||
|
|
||||||
srv_obj = s['server_obj']
|
srv_obj = s['server_obj']
|
||||||
running = srv_obj.check_running()
|
running = srv_obj.check_running()
|
||||||
@ -467,7 +456,7 @@ class Controller:
|
|||||||
try:
|
try:
|
||||||
shutil.rmtree(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['path']))
|
shutil.rmtree(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['path']))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Unable to delete server files for server with ID: {} with error logged: {}".format(server_id, e))
|
logger.error(f"Unable to delete server files for server with ID: {server_id} with error logged: {e}")
|
||||||
if helper.check_path_exists(self.servers.get_server_data_by_id(server_id)['backup_path']):
|
if helper.check_path_exists(self.servers.get_server_data_by_id(server_id)['backup_path']):
|
||||||
shutil.rmtree(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['backup_path']))
|
shutil.rmtree(helper.get_os_understandable_path(self.servers.get_server_data_by_id(server_id)['backup_path']))
|
||||||
|
|
||||||
@ -484,4 +473,3 @@ class Controller:
|
|||||||
self.servers_list.pop(counter)
|
self.servers_list.pop(counter)
|
||||||
|
|
||||||
counter += 1
|
counter += 1
|
||||||
return
|
|
||||||
|
@ -1,32 +1,27 @@
|
|||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.models.users import Users, users_helper
|
|
||||||
from app.classes.minecraft.server_props import ServerProps
|
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
|
|
||||||
|
from app.classes.models.users import Users, users_helper
|
||||||
|
|
||||||
# To disable warning about unused import ; Users is imported from here in other places
|
# To disable warning about unused import ; Users is imported from here in other places
|
||||||
|
#pylint: disable=self-assigning-variable
|
||||||
Users = Users
|
Users = Users
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger('peewee')
|
||||||
peewee_logger.setLevel(logging.INFO)
|
peewee_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from peewee import *
|
# pylint: disable=unused-import
|
||||||
|
from peewee import SqliteDatabase, fn
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
from enum import Enum
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as err:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {err.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {err.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
database = SqliteDatabase(helper.db_path, pragmas={
|
database = SqliteDatabase(helper.db_path, pragmas={
|
||||||
@ -43,24 +38,17 @@ class db_builder:
|
|||||||
|
|
||||||
username = default_data.get("username", 'admin')
|
username = default_data.get("username", 'admin')
|
||||||
password = default_data.get("password", 'crafty')
|
password = default_data.get("password", 'crafty')
|
||||||
#
|
|
||||||
#Users.insert({
|
users_helper.add_user(username=username, password=password, email="default@example.com", superuser=True)
|
||||||
# Users.username: username.lower(),
|
|
||||||
# Users.password: helper.encode_pass(password),
|
|
||||||
# Users.enabled: True,
|
|
||||||
# Users.superuser: True
|
|
||||||
#}).execute()
|
|
||||||
user_id = users_helper.add_user(username=username, password=password, email="default@example.com", superuser=True)
|
|
||||||
#users_helper.update_user(user_id, user_crafty_data={"permissions_mask":"111", "server_quantity":[-1,-1,-1]} )
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_fresh_install():
|
def is_fresh_install():
|
||||||
try:
|
try:
|
||||||
user = users_helper.get_by_id(1)
|
user = users_helper.get_by_id(1)
|
||||||
|
if user:
|
||||||
return False
|
return False
|
||||||
except:
|
except:
|
||||||
return True
|
return True
|
||||||
pass
|
|
||||||
|
|
||||||
class db_shortcuts:
|
class db_shortcuts:
|
||||||
|
|
||||||
@ -76,8 +64,7 @@ class db_shortcuts:
|
|||||||
for s in query:
|
for s in query:
|
||||||
rows.append(model_to_dict(s))
|
rows.append(model_to_dict(s))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Database Error: {}".format(e))
|
logger.warning(f"Database Error: {e}")
|
||||||
pass
|
|
||||||
|
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ import sys
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
@ -16,16 +16,14 @@ logger = logging.getLogger(__name__)
|
|||||||
try:
|
try:
|
||||||
import peewee
|
import peewee
|
||||||
from playhouse.migrate import (
|
from playhouse.migrate import (
|
||||||
SchemaMigrator as ScM,
|
|
||||||
SqliteMigrator,
|
SqliteMigrator,
|
||||||
Operation, SQL, operation, SqliteDatabase,
|
Operation, SQL, SqliteDatabase,
|
||||||
make_index_name, Context
|
make_index_name
|
||||||
)
|
)
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
e.name), exc_info=True)
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
MIGRATE_TABLE = 'migratehistory'
|
MIGRATE_TABLE = 'migratehistory'
|
||||||
@ -78,7 +76,7 @@ def get_model(method):
|
|||||||
|
|
||||||
|
|
||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
class Migrator(object):
|
class Migrator():
|
||||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
||||||
"""
|
"""
|
||||||
Initializes the migrator
|
Initializes the migrator
|
||||||
@ -105,7 +103,7 @@ class Migrator(object):
|
|||||||
"""
|
"""
|
||||||
Cleans the operations.
|
Cleans the operations.
|
||||||
"""
|
"""
|
||||||
self.operations = list()
|
self.operations = []
|
||||||
|
|
||||||
def sql(self, sql: str, *params):
|
def sql(self, sql: str, *params):
|
||||||
"""
|
"""
|
||||||
@ -279,7 +277,7 @@ class Migrator(object):
|
|||||||
|
|
||||||
|
|
||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
class MigrationManager(object):
|
class MigrationManager():
|
||||||
filemask = re.compile(r"[\d]+_[^\.]+\.py$")
|
filemask = re.compile(r"[\d]+_[^\.]+\.py$")
|
||||||
|
|
||||||
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
|
||||||
@ -287,7 +285,7 @@ class MigrationManager(object):
|
|||||||
Initializes the migration manager.
|
Initializes the migration manager.
|
||||||
"""
|
"""
|
||||||
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
if not isinstance(database, (peewee.Database, peewee.Proxy)):
|
||||||
raise RuntimeError('Invalid database: {}'.format(database))
|
raise RuntimeError(f'Invalid database: {database}')
|
||||||
self.database = database
|
self.database = database
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@ -295,6 +293,7 @@ class MigrationManager(object):
|
|||||||
"""
|
"""
|
||||||
Initialize and cache the MigrationHistory model.
|
Initialize and cache the MigrationHistory model.
|
||||||
"""
|
"""
|
||||||
|
#pylint: disable=no-member
|
||||||
MigrateHistory._meta.database = self.database
|
MigrateHistory._meta.database = self.database
|
||||||
MigrateHistory._meta.table_name = 'migratehistory'
|
MigrateHistory._meta.table_name = 'migratehistory'
|
||||||
MigrateHistory._meta.schema = None
|
MigrateHistory._meta.schema = None
|
||||||
@ -306,6 +305,7 @@ class MigrationManager(object):
|
|||||||
"""
|
"""
|
||||||
Scans migrations in the database.
|
Scans migrations in the database.
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=no-member
|
||||||
return [mm.name for mm in self.model.select().order_by(self.model.id)]
|
return [mm.name for mm in self.model.select().order_by(self.model.id)]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -314,8 +314,7 @@ class MigrationManager(object):
|
|||||||
Scans migrations in the file system.
|
Scans migrations in the file system.
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(helper.migration_dir):
|
if not os.path.exists(helper.migration_dir):
|
||||||
logger.warning('Migration directory: {} does not exist.'.format(
|
logger.warning(f'Migration directory: {helper.migration_dir} does not exist.')
|
||||||
helper.migration_dir))
|
|
||||||
os.makedirs(helper.migration_dir)
|
os.makedirs(helper.migration_dir)
|
||||||
return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f))
|
return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f))
|
||||||
|
|
||||||
@ -344,7 +343,7 @@ class MigrationManager(object):
|
|||||||
name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name
|
name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name
|
||||||
filename = name + '.py'
|
filename = name + '.py'
|
||||||
path = os.path.join(helper.migration_dir, filename)
|
path = os.path.join(helper.migration_dir, filename)
|
||||||
with open(path, 'w') as f:
|
with open(path, 'w', encoding='utf-8') as f:
|
||||||
f.write(MIGRATE_TEMPLATE.format(
|
f.write(MIGRATE_TEMPLATE.format(
|
||||||
migrate=migrate, rollback=rollback, name=filename))
|
migrate=migrate, rollback=rollback, name=filename))
|
||||||
|
|
||||||
@ -358,13 +357,14 @@ class MigrationManager(object):
|
|||||||
if auto:
|
if auto:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
logger.info('Creating migration "{}"'.format(name))
|
logger.info(f'Creating migration "{name}"')
|
||||||
name = self.compile(name, migrate, rollback)
|
name = self.compile(name, migrate, rollback)
|
||||||
logger.info('Migration has been created as "{}"'.format(name))
|
logger.info(f'Migration has been created as "{name}"')
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
"""Clear migrations."""
|
"""Clear migrations."""
|
||||||
|
# pylint: disable=no-member
|
||||||
self.model.delete().execute()
|
self.model.delete().execute()
|
||||||
|
|
||||||
def up(self, name: t.Optional[str] = None):
|
def up(self, name: t.Optional[str] = None):
|
||||||
@ -381,7 +381,6 @@ class MigrationManager(object):
|
|||||||
console.info('There is nothing to migrate')
|
console.info('There is nothing to migrate')
|
||||||
return done
|
return done
|
||||||
|
|
||||||
migrator = self.migrator
|
|
||||||
for mname in diff:
|
for mname in diff:
|
||||||
done.append(self.up_one(mname, self.migrator))
|
done.append(self.up_one(mname, self.migrator))
|
||||||
if name and name == mname:
|
if name and name == mname:
|
||||||
@ -393,14 +392,15 @@ class MigrationManager(object):
|
|||||||
"""
|
"""
|
||||||
Reads a migration from a file.
|
Reads a migration from a file.
|
||||||
"""
|
"""
|
||||||
call_params = dict()
|
call_params = {}
|
||||||
if helper.is_os_windows() and sys.version_info >= (3, 0):
|
if helper.is_os_windows() and sys.version_info >= (3, 0):
|
||||||
# if system is windows - force utf-8 encoding
|
# if system is windows - force utf-8 encoding
|
||||||
call_params['encoding'] = 'utf-8'
|
call_params['encoding'] = 'utf-8'
|
||||||
with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f:
|
with open(os.path.join(helper.migration_dir, name + '.py'), **call_params, encoding='utf-8') as f:
|
||||||
code = f.read()
|
code = f.read()
|
||||||
scope = {}
|
scope = {}
|
||||||
code = compile(code, '<string>', 'exec', dont_inherit=True)
|
code = compile(code, '<string>', 'exec', dont_inherit=True)
|
||||||
|
# pylint: disable=exec-used
|
||||||
exec(code, scope, None)
|
exec(code, scope, None)
|
||||||
return scope.get('migrate', lambda m, d: None), scope.get('rollback', lambda m, d: None)
|
return scope.get('migrate', lambda m, d: None), scope.get('rollback', lambda m, d: None)
|
||||||
|
|
||||||
@ -417,24 +417,26 @@ class MigrationManager(object):
|
|||||||
return name
|
return name
|
||||||
with self.database.transaction():
|
with self.database.transaction():
|
||||||
if rollback:
|
if rollback:
|
||||||
logger.info('Rolling back "{}"'.format(name))
|
logger.info(f'Rolling back "{name}"')
|
||||||
rollback_fn(migrator, self.database)
|
rollback_fn(migrator, self.database)
|
||||||
migrator.run()
|
migrator.run()
|
||||||
|
# pylint: disable=no-member
|
||||||
self.model.delete().where(self.model.name == name).execute()
|
self.model.delete().where(self.model.name == name).execute()
|
||||||
else:
|
else:
|
||||||
logger.info('Migrate "{}"'.format(name))
|
logger.info(f'Migrate "{name}"')
|
||||||
migrate_fn(migrator, self.database)
|
migrate_fn(migrator, self.database)
|
||||||
migrator.run()
|
migrator.run()
|
||||||
if name not in self.done:
|
if name not in self.done:
|
||||||
|
# pylint: disable=no-member
|
||||||
self.model.create(name=name)
|
self.model.create(name=name)
|
||||||
|
|
||||||
logger.info('Done "{}"'.format(name))
|
logger.info(f'Done "{name}"')
|
||||||
return name
|
return name
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.database.rollback()
|
self.database.rollback()
|
||||||
operation_name = 'Rollback' if rollback else 'Migration'
|
operation_name = 'Rollback' if rollback else 'Migration'
|
||||||
logger.exception('{} failed: {}'.format(operation_name, name))
|
logger.exception(f'{operation_name} failed: {name}')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def down(self):
|
def down(self):
|
||||||
@ -448,4 +450,4 @@ class MigrationManager(object):
|
|||||||
|
|
||||||
migrator = self.migrator
|
migrator = self.migrator
|
||||||
self.up_one(name, migrator, False, True)
|
self.up_one(name, migrator, False, True)
|
||||||
logger.warning('Rolled back migration: {}'.format(name))
|
logger.warning(f'Rolled back migration: {name}')
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class PermissionHelper:
|
class PermissionHelper:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def both_have_perm(a: str, b: str, permission_tested: Enum):
|
def both_have_perm(a: str, b: str, permission_tested: Enum):
|
||||||
|
@ -1,42 +1,36 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
import logging.config
|
import logging.config
|
||||||
import zipfile
|
|
||||||
from threading import Thread
|
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import zlib
|
|
||||||
import html
|
import html
|
||||||
import apscheduler
|
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
#TZLocal is set as a hidden import on win pipeline
|
#TZLocal is set as a hidden import on win pipeline
|
||||||
from tzlocal import get_localzone
|
from tzlocal import get_localzone
|
||||||
|
|
||||||
|
from app.classes.models.servers import servers_helper
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.models.servers import Servers, helper_servers, servers_helper
|
|
||||||
from app.classes.models.management import management_helper
|
from app.classes.models.management import management_helper
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
from app.classes.shared.translation import translation
|
|
||||||
from app.classes.models.users import users_helper
|
from app.classes.models.users import users_helper
|
||||||
from app.classes.models.server_permissions import server_permissions
|
from app.classes.models.server_permissions import server_permissions
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
from app.classes.shared.helpers import helper
|
||||||
|
from app.classes.shared.console import console
|
||||||
|
from app.classes.shared.translation import translation
|
||||||
|
|
||||||
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import psutil
|
import psutil
|
||||||
#import pexpect
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -118,7 +112,6 @@ class Server:
|
|||||||
self.name = None
|
self.name = None
|
||||||
self.is_crashed = False
|
self.is_crashed = False
|
||||||
self.restart_count = 0
|
self.restart_count = 0
|
||||||
self.crash_watcher_schedule = None
|
|
||||||
self.stats = stats
|
self.stats = stats
|
||||||
tz = get_localzone()
|
tz = get_localzone()
|
||||||
self.server_scheduler = BackgroundScheduler(timezone=str(tz))
|
self.server_scheduler = BackgroundScheduler(timezone=str(tz))
|
||||||
@ -130,13 +123,13 @@ class Server:
|
|||||||
self.settings = server_data
|
self.settings = server_data
|
||||||
|
|
||||||
def do_server_setup(self, server_data_obj):
|
def do_server_setup(self, server_data_obj):
|
||||||
logger.info('Creating Server object: {} | Server Name: {} | Auto Start: {}'.format(
|
serverId = server_data_obj['server_id']
|
||||||
server_data_obj['server_id'],
|
serverName = server_data_obj['server_name']
|
||||||
server_data_obj['server_name'],
|
autoStart = server_data_obj['auto_start']
|
||||||
server_data_obj['auto_start']
|
|
||||||
))
|
logger.info(f'Creating Server object: {serverId} | Server Name: {serverName} | Auto Start: {autoStart}')
|
||||||
self.server_id = server_data_obj['server_id']
|
self.server_id = serverId
|
||||||
self.name = server_data_obj['server_name']
|
self.name = serverName
|
||||||
self.settings = server_data_obj
|
self.settings = server_data_obj
|
||||||
|
|
||||||
# build our server run command
|
# build our server run command
|
||||||
@ -144,15 +137,15 @@ class Server:
|
|||||||
if server_data_obj['auto_start']:
|
if server_data_obj['auto_start']:
|
||||||
delay = int(self.settings['auto_start_delay'])
|
delay = int(self.settings['auto_start_delay'])
|
||||||
|
|
||||||
logger.info("Scheduling server {} to start in {} seconds".format(self.name, delay))
|
logger.info(f"Scheduling server {self.name} to start in {delay} seconds")
|
||||||
console.info("Scheduling server {} to start in {} seconds".format(self.name, delay))
|
console.info(f"Scheduling server {self.name} to start in {delay} seconds")
|
||||||
|
|
||||||
self.server_scheduler.add_job(self.run_scheduled_server, 'interval', seconds=delay, id=str(self.server_id))
|
self.server_scheduler.add_job(self.run_scheduled_server, 'interval', seconds=delay, id=str(self.server_id))
|
||||||
self.server_scheduler.start()
|
self.server_scheduler.start()
|
||||||
|
|
||||||
def run_scheduled_server(self):
|
def run_scheduled_server(self):
|
||||||
console.info("Starting server ID: {} - {}".format(self.server_id, self.name))
|
console.info(f"Starting server ID: {self.server_id} - {self.name}")
|
||||||
logger.info("Starting server {}".format(self.server_id, self.name))
|
logger.info(f"Starting server ID: {self.server_id} - {self.name}")
|
||||||
#Sets waiting start to false since we're attempting to start the server.
|
#Sets waiting start to false since we're attempting to start the server.
|
||||||
servers_helper.set_waiting_start(self.server_id, False)
|
servers_helper.set_waiting_start(self.server_id, False)
|
||||||
self.run_threaded_server(None)
|
self.run_threaded_server(None)
|
||||||
@ -162,7 +155,7 @@ class Server:
|
|||||||
|
|
||||||
def run_threaded_server(self, user_id):
|
def run_threaded_server(self, user_id):
|
||||||
# start the server
|
# start the server
|
||||||
self.server_thread = threading.Thread(target=self.start_server, daemon=True, args=(user_id,), name='{}_server_thread'.format(self.server_id))
|
self.server_thread = threading.Thread(target=self.start_server, daemon=True, args=(user_id,), name=f'{self.server_id}_server_thread')
|
||||||
self.server_thread.start()
|
self.server_thread.start()
|
||||||
|
|
||||||
def setup_server_run_command(self):
|
def setup_server_run_command(self):
|
||||||
@ -174,16 +167,16 @@ class Server:
|
|||||||
# let's do some quick checking to make sure things actually exists
|
# let's do some quick checking to make sure things actually exists
|
||||||
full_path = os.path.join(self.server_path, server_exec_path)
|
full_path = os.path.join(self.server_path, server_exec_path)
|
||||||
if not helper.check_file_exists(full_path):
|
if not helper.check_file_exists(full_path):
|
||||||
logger.critical("Server executable path: {} does not seem to exist".format(full_path))
|
logger.critical(f"Server executable path: {full_path} does not seem to exist")
|
||||||
console.critical("Server executable path: {} does not seem to exist".format(full_path))
|
console.critical(f"Server executable path: {full_path} does not seem to exist")
|
||||||
|
|
||||||
if not helper.check_path_exists(self.server_path):
|
if not helper.check_path_exists(self.server_path):
|
||||||
logger.critical("Server path: {} does not seem to exits".format(self.server_path))
|
logger.critical(f"Server path: {self.server_path} does not seem to exits")
|
||||||
console.critical("Server path: {} does not seem to exits".format(self.server_path))
|
console.critical(f"Server path: {self.server_path} does not seem to exits")
|
||||||
|
|
||||||
if not helper.check_writeable(self.server_path):
|
if not helper.check_writeable(self.server_path):
|
||||||
logger.critical("Unable to write/access {}".format(self.server_path))
|
logger.critical(f"Unable to write/access {self.server_path}")
|
||||||
console.warning("Unable to write/access {}".format(self.server_path))
|
console.warning(f"Unable to write/access {self.server_path}")
|
||||||
|
|
||||||
def start_server(self, user_id):
|
def start_server(self, user_id):
|
||||||
if not user_id:
|
if not user_id:
|
||||||
@ -191,7 +184,7 @@ class Server:
|
|||||||
else:
|
else:
|
||||||
user_lang = users_helper.get_user_lang_by_id(user_id)
|
user_lang = users_helper.get_user_lang_by_id(user_id)
|
||||||
|
|
||||||
logger.info("Start command detected. Reloading settings from DB for server {}".format(self.name))
|
logger.info(f"Start command detected. Reloading settings from DB for server {self.name}")
|
||||||
self.setup_server_run_command()
|
self.setup_server_run_command()
|
||||||
# fail safe in case we try to start something already running
|
# fail safe in case we try to start something already running
|
||||||
if self.check_running():
|
if self.check_running():
|
||||||
@ -202,13 +195,13 @@ class Server:
|
|||||||
logger.error("Server is updating. Terminating startup.")
|
logger.error("Server is updating. Terminating startup.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.info("Launching Server {} with command {}".format(self.name, self.server_command))
|
logger.info(f"Launching Server {self.name} with command {self.server_command}")
|
||||||
console.info("Launching Server {} with command {}".format(self.name, self.server_command))
|
console.info(f"Launching Server {self.name} with command {self.server_command}")
|
||||||
|
|
||||||
#Checks for eula. Creates one if none detected.
|
#Checks for eula. Creates one if none detected.
|
||||||
#If EULA is detected and not set to one of these true vaiants we offer to set it true.
|
#If EULA is detected and not set to one of these true vaiants we offer to set it true.
|
||||||
if helper.check_file_exists(os.path.join(self.settings['path'], 'eula.txt')):
|
if helper.check_file_exists(os.path.join(self.settings['path'], 'eula.txt')):
|
||||||
f = open(os.path.join(self.settings['path'], 'eula.txt'), 'r')
|
f = open(os.path.join(self.settings['path'], 'eula.txt'), 'r', encoding='utf-8')
|
||||||
line = f.readline().lower()
|
line = f.readline().lower()
|
||||||
if line == 'eula=true':
|
if line == 'eula=true':
|
||||||
e_flag = True
|
e_flag = True
|
||||||
@ -227,7 +220,7 @@ class Server:
|
|||||||
else:
|
else:
|
||||||
e_flag = False
|
e_flag = False
|
||||||
|
|
||||||
if e_flag == False:
|
if not e_flag:
|
||||||
if user_id:
|
if user_id:
|
||||||
websocket_helper.broadcast_user(user_id, 'send_eula_bootbox', {
|
websocket_helper.broadcast_user(user_id, 'send_eula_bootbox', {
|
||||||
'id': self.server_id
|
'id': self.server_id
|
||||||
@ -239,27 +232,24 @@ class Server:
|
|||||||
f.close()
|
f.close()
|
||||||
if helper.is_os_windows():
|
if helper.is_os_windows():
|
||||||
logger.info("Windows Detected")
|
logger.info("Windows Detected")
|
||||||
creationflags=subprocess.CREATE_NEW_CONSOLE
|
|
||||||
else:
|
else:
|
||||||
logger.info("Unix Detected")
|
logger.info("Unix Detected")
|
||||||
creationflags=None
|
|
||||||
|
|
||||||
logger.info("Starting server in {p} with command: {c}".format(p=self.server_path, c=self.server_command))
|
logger.info(f"Starting server in {self.server_path} with command: {self.server_command}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.process = subprocess.Popen(self.server_command, cwd=self.server_path, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
self.process = subprocess.Popen(
|
||||||
|
self.server_command, cwd=self.server_path, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
#Checks for java on initial fail
|
#Checks for java on initial fail
|
||||||
if os.system("java -version") == 32512:
|
if os.system("java -version") == 32512:
|
||||||
msg = "Server {} failed to start with error code: {}".format(self.name, "Java not found. Please install Java then try again.")
|
|
||||||
if user_id:
|
if user_id:
|
||||||
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
||||||
'error': translation.translate('error', 'noJava', user_lang).format(self.name)
|
'error': translation.translate('error', 'noJava', user_lang).format(self.name)
|
||||||
})
|
})
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
msg = "Server {} failed to start with error code: {}".format(self.name, ex)
|
logger.error(f"Server {self.name} failed to start with error code: {ex}")
|
||||||
logger.error(msg)
|
|
||||||
if user_id:
|
if user_id:
|
||||||
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
websocket_helper.broadcast_user(user_id, 'send_start_error',{
|
||||||
'error': translation.translate('error', 'start-error', user_lang).format(self.name, ex)
|
'error': translation.translate('error', 'start-error', user_lang).format(self.name, ex)
|
||||||
@ -268,19 +258,20 @@ class Server:
|
|||||||
|
|
||||||
out_buf = ServerOutBuf(self.process, self.server_id)
|
out_buf = ServerOutBuf(self.process, self.server_id)
|
||||||
|
|
||||||
logger.debug('Starting virtual terminal listener for server {}'.format(self.name))
|
logger.debug(f'Starting virtual terminal listener for server {self.name}')
|
||||||
threading.Thread(target=out_buf.check, daemon=True, name='{}_virtual_terminal'.format(self.server_id)).start()
|
threading.Thread(target=out_buf.check, daemon=True, name=f'{self.server_id}_virtual_terminal').start()
|
||||||
|
|
||||||
self.is_crashed = False
|
self.is_crashed = False
|
||||||
|
|
||||||
self.start_time = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
|
self.start_time = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
|
||||||
|
|
||||||
if self.process.poll() is None:
|
if self.process.poll() is None:
|
||||||
logger.info("Server {} running with PID {}".format(self.name, self.process.pid))
|
logger.info(f"Server {self.name} running with PID {self.process.pid}")
|
||||||
console.info("Server {} running with PID {}".format(self.name, self.process.pid))
|
console.info(f"Server {self.name} running with PID {self.process.pid}")
|
||||||
self.is_crashed = False
|
self.is_crashed = False
|
||||||
self.stats.record_stats()
|
self.stats.record_stats()
|
||||||
check_internet_thread = threading.Thread(target=self.check_internet_thread, daemon=True, args=(user_id, user_lang, ), name="{self.name}_Internet")
|
check_internet_thread = threading.Thread(
|
||||||
|
target=self.check_internet_thread, daemon=True, args=(user_id, user_lang, ), name=f"{self.name}_Internet")
|
||||||
check_internet_thread.start()
|
check_internet_thread.start()
|
||||||
#Checks if this is the servers first run.
|
#Checks if this is the servers first run.
|
||||||
if servers_helper.get_first_run(self.server_id):
|
if servers_helper.get_first_run(self.server_id):
|
||||||
@ -301,14 +292,14 @@ class Server:
|
|||||||
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
websocket_helper.broadcast_user(user, 'send_start_reload', {
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
logger.warning("Server PID {} died right after starting - is this a server config issue?".format(self.process.pid))
|
logger.warning(f"Server PID {self.process.pid} died right after starting - is this a server config issue?")
|
||||||
console.warning("Server PID {} died right after starting - is this a server config issue?".format(self.process.pid))
|
console.warning(f"Server PID {self.process.pid} died right after starting - is this a server config issue?")
|
||||||
|
|
||||||
if self.settings['crash_detection']:
|
if self.settings['crash_detection']:
|
||||||
logger.info("Server {} has crash detection enabled - starting watcher task".format(self.name))
|
logger.info(f"Server {self.name} has crash detection enabled - starting watcher task")
|
||||||
console.info("Server {} has crash detection enabled - starting watcher task".format(self.name))
|
console.info(f"Server {self.name} has crash detection enabled - starting watcher task")
|
||||||
|
|
||||||
self.crash_watcher_schedule = self.server_scheduler.add_job(self.detect_crash, 'interval', seconds=30, id="crash_watcher")
|
self.server_scheduler.add_job(self.detect_crash, 'interval', seconds=30, id="c_{self.server_id}")
|
||||||
|
|
||||||
def check_internet_thread(self, user_id, user_lang):
|
def check_internet_thread(self, user_id, user_lang):
|
||||||
if user_id:
|
if user_id:
|
||||||
@ -316,7 +307,6 @@ class Server:
|
|||||||
websocket_helper.broadcast_user(user_id, 'send_start_error', {
|
websocket_helper.broadcast_user(user_id, 'send_start_error', {
|
||||||
'error': translation.translate('error', 'internet', user_lang)
|
'error': translation.translate('error', 'internet', user_lang)
|
||||||
})
|
})
|
||||||
return
|
|
||||||
|
|
||||||
def stop_threaded_server(self):
|
def stop_threaded_server(self):
|
||||||
self.stop_server()
|
self.stop_server()
|
||||||
@ -332,8 +322,8 @@ class Server:
|
|||||||
self.process.terminate()
|
self.process.terminate()
|
||||||
running = self.check_running()
|
running = self.check_running()
|
||||||
if not running:
|
if not running:
|
||||||
logger.info("Can't stop server {} if it's not running".format(self.name))
|
logger.info(f"Can't stop server {self.name} if it's not running")
|
||||||
console.info("Can't stop server {} if it's not running".format(self.name))
|
console.info(f"Can't stop server {self.name} if it's not running")
|
||||||
return
|
return
|
||||||
x = 0
|
x = 0
|
||||||
|
|
||||||
@ -343,7 +333,7 @@ class Server:
|
|||||||
|
|
||||||
while running:
|
while running:
|
||||||
x = x+1
|
x = x+1
|
||||||
logstr = "Server {} is still running - waiting 2s to see if it stops ({} seconds until force close)".format(server_name, int(60-(x*2)))
|
logstr = f"Server {server_name} is still running - waiting 2s to see if it stops ({int(60-(x*2))} seconds until force close)"
|
||||||
logger.info(logstr)
|
logger.info(logstr)
|
||||||
console.info(logstr)
|
console.info(logstr)
|
||||||
running = self.check_running()
|
running = self.check_running()
|
||||||
@ -351,12 +341,12 @@ class Server:
|
|||||||
|
|
||||||
# if we haven't closed in 60 seconds, let's just slam down on the PID
|
# if we haven't closed in 60 seconds, let's just slam down on the PID
|
||||||
if x >= 30:
|
if x >= 30:
|
||||||
logger.info("Server {} is still running - Forcing the process down".format(server_name))
|
logger.info(f"Server {server_name} is still running - Forcing the process down")
|
||||||
console.info("Server {} is still running - Forcing the process down".format(server_name))
|
console.info(f"Server {server_name} is still running - Forcing the process down")
|
||||||
self.kill()
|
self.kill()
|
||||||
|
|
||||||
logger.info("Stopped Server {} with PID {}".format(server_name, server_pid))
|
logger.info(f"Stopped Server {server_name} with PID {server_pid}")
|
||||||
console.info("Stopped Server {} with PID {}".format(server_name, server_pid))
|
console.info(f"Stopped Server {server_name} with PID {server_pid}")
|
||||||
|
|
||||||
# massive resetting of variables
|
# massive resetting of variables
|
||||||
self.cleanup_server_object()
|
self.cleanup_server_object()
|
||||||
@ -397,13 +387,13 @@ class Server:
|
|||||||
|
|
||||||
def send_command(self, command):
|
def send_command(self, command):
|
||||||
if not self.check_running() and command.lower() != 'start':
|
if not self.check_running() and command.lower() != 'start':
|
||||||
logger.warning("Server not running, unable to send command \"{}\"".format(command))
|
logger.warning(f"Server not running, unable to send command \"{command}\"")
|
||||||
return False
|
return False
|
||||||
console.info("COMMAND TIME: {}".format(command))
|
console.info(f"COMMAND TIME: {command}")
|
||||||
logger.debug("Sending command {} to server".format(command))
|
logger.debug(f"Sending command {command} to server")
|
||||||
|
|
||||||
# send it
|
# send it
|
||||||
self.process.stdin.write("{}\n".format(command).encode('utf-8'))
|
self.process.stdin.write(f"{command}\n".encode('utf-8'))
|
||||||
self.process.stdin.flush()
|
self.process.stdin.flush()
|
||||||
|
|
||||||
def crash_detected(self, name):
|
def crash_detected(self, name):
|
||||||
@ -412,28 +402,26 @@ class Server:
|
|||||||
self.remove_watcher_thread()
|
self.remove_watcher_thread()
|
||||||
|
|
||||||
# the server crashed, or isn't found - so let's reset things.
|
# the server crashed, or isn't found - so let's reset things.
|
||||||
logger.warning("The server {} seems to have vanished unexpectedly, did it crash?".format(name))
|
logger.warning(f"The server {name} seems to have vanished unexpectedly, did it crash?")
|
||||||
|
|
||||||
if self.settings['crash_detection']:
|
if self.settings['crash_detection']:
|
||||||
logger.warning("The server {} has crashed and will be restarted. Restarting server".format(name))
|
logger.warning(f"The server {name} has crashed and will be restarted. Restarting server")
|
||||||
console.warning("The server {} has crashed and will be restarted. Restarting server".format(name))
|
console.warning(f"The server {name} has crashed and will be restarted. Restarting server")
|
||||||
self.run_threaded_server(None)
|
self.run_threaded_server(None)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.critical(
|
logger.critical(f"The server {name} has crashed, crash detection is disabled and it will not be restarted")
|
||||||
"The server {} has crashed, crash detection is disabled and it will not be restarted".format(name))
|
console.critical(f"The server {name} has crashed, crash detection is disabled and it will not be restarted")
|
||||||
console.critical(
|
|
||||||
"The server {} has crashed, crash detection is disabled and it will not be restarted".format(name))
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def kill(self):
|
def kill(self):
|
||||||
logger.info("Terminating server {} and all child processes".format(self.server_id))
|
logger.info(f"Terminating server {self.server_id} and all child processes")
|
||||||
process = psutil.Process(self.process.pid)
|
process = psutil.Process(self.process.pid)
|
||||||
|
|
||||||
# for every sub process...
|
# for every sub process...
|
||||||
for proc in process.children(recursive=True):
|
for proc in process.children(recursive=True):
|
||||||
# kill all the child processes - it sounds too wrong saying kill all the children (kevdagoat: lol!)
|
# kill all the child processes - it sounds too wrong saying kill all the children (kevdagoat: lol!)
|
||||||
logger.info("Sending SIGKILL to server {}".format(proc.name))
|
logger.info(f"Sending SIGKILL to server {proc.name}")
|
||||||
proc.kill()
|
proc.kill()
|
||||||
# kill the main process we are after
|
# kill the main process we are after
|
||||||
logger.info('Sending SIGKILL to parent')
|
logger.info('Sending SIGKILL to parent')
|
||||||
@ -453,7 +441,7 @@ class Server:
|
|||||||
|
|
||||||
def detect_crash(self):
|
def detect_crash(self):
|
||||||
|
|
||||||
logger.info("Detecting possible crash for server: {} ".format(self.name))
|
logger.info(f"Detecting possible crash for server: {self.name} ")
|
||||||
|
|
||||||
running = self.check_running()
|
running = self.check_running()
|
||||||
|
|
||||||
@ -473,11 +461,8 @@ class Server:
|
|||||||
|
|
||||||
# we have tried to restart 4 times...
|
# we have tried to restart 4 times...
|
||||||
elif self.restart_count == 4:
|
elif self.restart_count == 4:
|
||||||
logger.critical("Server {} has been restarted {} times. It has crashed, not restarting.".format(
|
logger.critical(f"Server {self.name} has been restarted {self.restart_count} times. It has crashed, not restarting.")
|
||||||
self.name, self.restart_count))
|
console.critical(f"Server {self.name} has been restarted {self.restart_count} times. It has crashed, not restarting.")
|
||||||
|
|
||||||
console.critical("Server {} has been restarted {} times. It has crashed, not restarting.".format(
|
|
||||||
self.name, self.restart_count))
|
|
||||||
|
|
||||||
# set to 99 restart attempts so this elif is skipped next time. (no double logging)
|
# set to 99 restart attempts so this elif is skipped next time. (no double logging)
|
||||||
self.restart_count = 99
|
self.restart_count = 99
|
||||||
@ -489,13 +474,13 @@ class Server:
|
|||||||
def remove_watcher_thread(self):
|
def remove_watcher_thread(self):
|
||||||
logger.info("Removing old crash detection watcher thread")
|
logger.info("Removing old crash detection watcher thread")
|
||||||
console.info("Removing old crash detection watcher thread")
|
console.info("Removing old crash detection watcher thread")
|
||||||
self.crash_watcher_schedule.remove(self.server_name)
|
self.server_scheduler.remove_job('c_'+str(self.server_id))
|
||||||
|
|
||||||
def agree_eula(self, user_id):
|
def agree_eula(self, user_id):
|
||||||
file = os.path.join(self.server_path, 'eula.txt')
|
file = os.path.join(self.server_path, 'eula.txt')
|
||||||
f = open(file, 'w')
|
f = open(file, 'w', encoding='utf-8')
|
||||||
f.write('eula=true')
|
f.write('eula=true')
|
||||||
f.close
|
f.close()
|
||||||
self.run_threaded_server(user_id)
|
self.run_threaded_server(user_id)
|
||||||
|
|
||||||
def is_backup_running(self):
|
def is_backup_running(self):
|
||||||
@ -506,8 +491,8 @@ class Server:
|
|||||||
|
|
||||||
def backup_server(self):
|
def backup_server(self):
|
||||||
backup_thread = threading.Thread(target=self.a_backup_server, daemon=True, name=f"backup_{self.name}")
|
backup_thread = threading.Thread(target=self.a_backup_server, daemon=True, name=f"backup_{self.name}")
|
||||||
logger.info("Starting Backup Thread for server {}.".format(self.settings['server_name']))
|
logger.info(f"Starting Backup Thread for server {self.settings['server_name']}.")
|
||||||
if self.server_path == None:
|
if self.server_path is None:
|
||||||
self.server_path = helper.get_os_understandable_path(self.settings['path'])
|
self.server_path = helper.get_os_understandable_path(self.settings['path'])
|
||||||
logger.info("Backup Thread - Local server path not defined. Setting local server path variable.")
|
logger.info("Backup Thread - Local server path not defined. Setting local server path variable.")
|
||||||
#checks if the backup thread is currently alive for this server
|
#checks if the backup thread is currently alive for this server
|
||||||
@ -515,44 +500,50 @@ class Server:
|
|||||||
try:
|
try:
|
||||||
backup_thread.start()
|
backup_thread.start()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error("Failed to start backup: {}".format(ex))
|
logger.error(f"Failed to start backup: {ex}")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.error("Backup is already being processed for server {}. Canceling backup request".format(self.settings['server_name']))
|
logger.error(f"Backup is already being processed for server {self.settings['server_name']}. Canceling backup request")
|
||||||
return False
|
return False
|
||||||
logger.info("Backup Thread started for server {}.".format(self.settings['server_name']))
|
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
|
||||||
|
|
||||||
def a_backup_server(self):
|
def a_backup_server(self):
|
||||||
logger.info("Starting server {} (ID {}) backup".format(self.name, self.server_id))
|
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
|
||||||
self.is_backingup = True
|
self.is_backingup = True
|
||||||
conf = management_helper.get_backup_config(self.server_id)
|
conf = management_helper.get_backup_config(self.server_id)
|
||||||
try:
|
try:
|
||||||
backup_filename = "{}/{}".format(self.settings['backup_path'], datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
|
backup_filename = f"{self.settings['backup_path']}/{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}"
|
||||||
logger.info("Creating backup of server '{}' (ID#{}, path={}) at '{}'".format(self.settings['server_name'], self.server_id, self.server_path, backup_filename))
|
logger.info(f"Creating backup of server '{self.settings['server_name']}'" +
|
||||||
|
f" (ID#{self.server_id}, path={self.server_path}) at '{backup_filename}'")
|
||||||
shutil.make_archive(helper.get_os_understandable_path(backup_filename), 'zip', self.server_path)
|
shutil.make_archive(helper.get_os_understandable_path(backup_filename), 'zip', self.server_path)
|
||||||
while len(self.list_backups()) > conf["max_backups"] and conf["max_backups"] > 0:
|
while len(self.list_backups()) > conf["max_backups"] and conf["max_backups"] > 0:
|
||||||
backup_list = self.list_backups()
|
backup_list = self.list_backups()
|
||||||
oldfile = backup_list[0]
|
oldfile = backup_list[0]
|
||||||
oldfile_path = "{}/{}".format(conf['backup_path'], oldfile['path'])
|
oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
|
||||||
logger.info("Removing old backup '{}'".format(oldfile['path']))
|
logger.info(f"Removing old backup '{oldfile['path']}'")
|
||||||
os.remove(helper.get_os_understandable_path(oldfile_path))
|
os.remove(helper.get_os_understandable_path(oldfile_path))
|
||||||
self.is_backingup = False
|
self.is_backingup = False
|
||||||
logger.info("Backup of server: {} completed".format(self.name))
|
logger.info(f"Backup of server: {self.name} completed")
|
||||||
return
|
return
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to create backup of server {} (ID {})".format(self.name, self.server_id))
|
logger.exception(f"Failed to create backup of server {self.name} (ID {self.server_id})")
|
||||||
self.is_backingup = False
|
self.is_backingup = False
|
||||||
return
|
return
|
||||||
|
|
||||||
def list_backups(self):
|
def list_backups(self):
|
||||||
if self.settings['backup_path']:
|
if self.settings['backup_path']:
|
||||||
if helper.check_path_exists(helper.get_os_understandable_path(self.settings['backup_path'])):
|
if helper.check_path_exists(helper.get_os_understandable_path(self.settings['backup_path'])):
|
||||||
files = helper.get_human_readable_files_sizes(helper.list_dir_by_date(helper.get_os_understandable_path(self.settings['backup_path'])))
|
files = (
|
||||||
return [{"path": os.path.relpath(f['path'], start=helper.get_os_understandable_path(self.settings['backup_path'])), "size": f["size"]} for f in files]
|
helper.get_human_readable_files_sizes(helper.list_dir_by_date(helper.get_os_understandable_path(self.settings['backup_path']))))
|
||||||
|
return [{
|
||||||
|
"path": os.path.relpath(f['path'],
|
||||||
|
start=helper.get_os_understandable_path(self.settings['backup_path'])),
|
||||||
|
"size": f["size"]
|
||||||
|
} for f in files]
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
logger.info("Error putting backup file list for server with ID: {}".format(self.server_id))
|
logger.info(f"Error putting backup file list for server with ID: {self.server_id}")
|
||||||
return[]
|
return[]
|
||||||
|
|
||||||
def jar_update(self):
|
def jar_update(self):
|
||||||
@ -567,13 +558,12 @@ class Server:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def a_jar_update(self):
|
def a_jar_update(self):
|
||||||
error = False
|
|
||||||
wasStarted = "-1"
|
wasStarted = "-1"
|
||||||
self.backup_server()
|
self.backup_server()
|
||||||
#checks if server is running. Calls shutdown if it is running.
|
#checks if server is running. Calls shutdown if it is running.
|
||||||
if self.check_running():
|
if self.check_running():
|
||||||
wasStarted = True
|
wasStarted = True
|
||||||
logger.info("Server with PID {} is running. Sending shutdown command".format(self.process.pid))
|
logger.info(f"Server with PID {self.process.pid} is running. Sending shutdown command")
|
||||||
self.stop_threaded_server()
|
self.stop_threaded_server()
|
||||||
else:
|
else:
|
||||||
wasStarted = False
|
wasStarted = False
|
||||||
@ -594,17 +584,17 @@ class Server:
|
|||||||
if os.path.isdir(backup_dir):
|
if os.path.isdir(backup_dir):
|
||||||
backup_executable = os.path.join(backup_dir, 'old_server.jar')
|
backup_executable = os.path.join(backup_dir, 'old_server.jar')
|
||||||
else:
|
else:
|
||||||
logger.info("Executable backup directory not found for Server: {}. Creating one.".format(self.name))
|
logger.info(f"Executable backup directory not found for Server: {self.name}. Creating one.")
|
||||||
os.mkdir(backup_dir)
|
os.mkdir(backup_dir)
|
||||||
backup_executable = os.path.join(backup_dir, 'old_server.jar')
|
backup_executable = os.path.join(backup_dir, 'old_server.jar')
|
||||||
|
|
||||||
if os.path.isfile(backup_executable):
|
if os.path.isfile(backup_executable):
|
||||||
#removes old backup
|
#removes old backup
|
||||||
logger.info("Old backup found for server: {}. Removing...".format(self.name))
|
logger.info(f"Old backup found for server: {self.name}. Removing...")
|
||||||
os.remove(backup_executable)
|
os.remove(backup_executable)
|
||||||
logger.info("Old backup removed for server: {}.".format(self.name))
|
logger.info(f"Old backup removed for server: {self.name}.")
|
||||||
else:
|
else:
|
||||||
logger.info("No old backups found for server: {}".format(self.name))
|
logger.info(f"No old backups found for server: {self.name}")
|
||||||
|
|
||||||
current_executable = os.path.join(helper.get_os_understandable_path(self.settings['path']), self.settings['executable'])
|
current_executable = os.path.join(helper.get_os_understandable_path(self.settings['path']), self.settings['executable'])
|
||||||
|
|
||||||
@ -637,7 +627,8 @@ class Server:
|
|||||||
for user in server_users:
|
for user in server_users:
|
||||||
websocket_helper.broadcast_user(user, 'notification', "Executable update finished for "+self.name)
|
websocket_helper.broadcast_user(user, 'notification', "Executable update finished for "+self.name)
|
||||||
|
|
||||||
management_helper.add_to_audit_log_raw('Alert', '-1', self.server_id, "Executable update finished for "+self.name, self.settings['server_ip'])
|
management_helper.add_to_audit_log_raw(
|
||||||
|
'Alert', '-1', self.server_id, "Executable update finished for "+self.name, self.settings['server_ip'])
|
||||||
if wasStarted:
|
if wasStarted:
|
||||||
self.start_server()
|
self.start_server()
|
||||||
elif not downloaded and not self.is_backingup:
|
elif not downloaded and not self.is_backingup:
|
||||||
@ -647,4 +638,3 @@ class Server:
|
|||||||
websocket_helper.broadcast_user(user,'notification',
|
websocket_helper.broadcast_user(user,'notification',
|
||||||
"Executable update failed for " + self.name + ". Check log file for details.")
|
"Executable update failed for " + self.name + ". Check log file for details.")
|
||||||
logger.error("Executable download failed.")
|
logger.error("Executable download failed.")
|
||||||
pass
|
|
||||||
|
@ -1,36 +1,32 @@
|
|||||||
from datetime import timedelta
|
|
||||||
from http import server
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import asyncio
|
import asyncio
|
||||||
import shutil
|
|
||||||
from tzlocal import get_localzone
|
from tzlocal import get_localzone
|
||||||
|
|
||||||
from app.classes.controllers.users_controller import Users_Controller
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.web.tornado import Webserver
|
|
||||||
|
from app.classes.web.tornado_handler import Webserver
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
from app.classes.minecraft.serverjars import server_jar_obj
|
||||||
from app.classes.models.servers import servers_helper
|
|
||||||
from app.classes.models.management import management_helper
|
from app.classes.models.management import management_helper
|
||||||
from apscheduler.events import EVENT_JOB_ERROR, EVENT_JOB_EXECUTED, EVENT_ALL, EVENT_JOB_REMOVED
|
from app.classes.controllers.users_controller import Users_Controller
|
||||||
|
from app.classes.controllers.servers_controller import Servers_Controller
|
||||||
|
|
||||||
logger = logging.getLogger('apscheduler')
|
logger = logging.getLogger('apscheduler')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
from apscheduler.events import EVENT_JOB_EXECUTED
|
||||||
from apscheduler.schedulers.background import BackgroundScheduler
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
from apscheduler.triggers.cron import CronTrigger
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as err:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {err.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {err.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
scheduler_intervals = { 'seconds',
|
scheduler_intervals = { 'seconds',
|
||||||
@ -80,7 +76,7 @@ class TasksManager:
|
|||||||
jobs = management_helper.get_schedules_enabled()
|
jobs = management_helper.get_schedules_enabled()
|
||||||
logger.info("Reload from DB called. Current enabled schedules: ")
|
logger.info("Reload from DB called. Current enabled schedules: ")
|
||||||
for item in jobs:
|
for item in jobs:
|
||||||
logger.info("JOB: {}".format(item))
|
logger.info(f"JOB: {item}")
|
||||||
|
|
||||||
def command_watcher(self):
|
def command_watcher(self):
|
||||||
while True:
|
while True:
|
||||||
@ -164,54 +160,133 @@ class TasksManager:
|
|||||||
for schedule in schedules:
|
for schedule in schedules:
|
||||||
if schedule.cron_string != "":
|
if schedule.cron_string != "":
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command, CronTrigger.from_crontab(schedule.cron_string, timezone=str(self.tz)), id=str(schedule.schedule_id), args=[schedule.server_id, self.users_controller.get_id_by_name('system'), '127.0.0.1', schedule.command])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
CronTrigger.from_crontab(schedule.cron_string,
|
||||||
|
timezone=str(self.tz)),
|
||||||
|
id = str(schedule.schedule_id),
|
||||||
|
args = [schedule.server_id,
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
schedule.command]
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error("Failed to schedule task with error: {}.".format(e))
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
console.warning("Removing failed task from DB.")
|
console.warning("Removing failed task from DB.")
|
||||||
logger.error("Failed to schedule task with error: {}.".format(e))
|
logger.error(f"Failed to schedule task with error: {e}.")
|
||||||
logger.warning("Removing failed task from DB.")
|
logger.warning("Removing failed task from DB.")
|
||||||
#remove items from DB if task fails to add to apscheduler
|
#remove items from DB if task fails to add to apscheduler
|
||||||
management_helper.delete_scheduled_task(schedule.schedule_id)
|
management_helper.delete_scheduled_task(schedule.schedule_id)
|
||||||
else:
|
else:
|
||||||
if schedule.interval_type == 'hours':
|
if schedule.interval_type == 'hours':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = 0, hour = '*/'+str(schedule.interval), id=str(schedule.schedule_id), args=[schedule.server_id, self.users_controller.get_id_by_name('system'), '127.0.0.1', schedule.command])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = 0,
|
||||||
|
hour = '*/'+str(schedule.interval),
|
||||||
|
id = str(schedule.schedule_id),
|
||||||
|
args = [schedule.server_id,
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
schedule.command]
|
||||||
|
)
|
||||||
elif schedule.interval_type == 'minutes':
|
elif schedule.interval_type == 'minutes':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = '*/'+str(schedule.interval), id=str(schedule.schedule_id), args=[schedule.server_id, self.users_controller.get_id_by_name('system'), '127.0.0.1', schedule.command])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = '*/'+str(schedule.interval),
|
||||||
|
id = str(schedule.schedule_id),
|
||||||
|
args = [schedule.server_id,
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
schedule.command]
|
||||||
|
)
|
||||||
elif schedule.interval_type == 'days':
|
elif schedule.interval_type == 'days':
|
||||||
time = schedule.start_time.split(':')
|
curr_time = schedule.start_time.split(':')
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', day = '*/'+str(schedule.interval), hour=time[0], minute=time[1], id=str(schedule.schedule_id), args=[schedule.server_id, self.users_controller.get_id_by_name('system'), '127.0.0.1', schedule.command])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
day = '*/'+str(schedule.interval),
|
||||||
|
hour=curr_time[0],
|
||||||
|
minute=curr_time[1],
|
||||||
|
id=str(schedule.schedule_id),
|
||||||
|
args=[schedule.server_id,
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
schedule.command]
|
||||||
|
)
|
||||||
self.scheduler.start()
|
self.scheduler.start()
|
||||||
jobs = self.scheduler.get_jobs()
|
jobs = self.scheduler.get_jobs()
|
||||||
logger.info("Loaded schedules. Current enabled schedules: ")
|
logger.info("Loaded schedules. Current enabled schedules: ")
|
||||||
for item in jobs:
|
for item in jobs:
|
||||||
logger.info("JOB: {}".format(item))
|
logger.info(f"JOB: {item}")
|
||||||
|
|
||||||
def schedule_job(self, job_data):
|
def schedule_job(self, job_data):
|
||||||
sch_id = management_helper.create_scheduled_task(job_data['server_id'], job_data['action'], job_data['interval'], job_data['interval_type'], job_data['start_time'], job_data['command'], "None", job_data['enabled'], job_data['one_time'], job_data['cron_string'])
|
sch_id = management_helper.create_scheduled_task(
|
||||||
|
job_data['server_id'],
|
||||||
|
job_data['action'],
|
||||||
|
job_data['interval'],
|
||||||
|
job_data['interval_type'],
|
||||||
|
job_data['start_time'],
|
||||||
|
job_data['command'],
|
||||||
|
"None",
|
||||||
|
job_data['enabled'],
|
||||||
|
job_data['one_time'],
|
||||||
|
job_data['cron_string'])
|
||||||
if job_data['enabled']:
|
if job_data['enabled']:
|
||||||
if job_data['cron_string'] != "":
|
if job_data['cron_string'] != "":
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command, CronTrigger.from_crontab(job_data['cron_string'], timezone=str(self.tz)), id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
CronTrigger.from_crontab(job_data['cron_string'],
|
||||||
|
timezone=str(self.tz)),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error("Failed to schedule task with error: {}.".format(e))
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
console.warning("Removing failed task from DB.")
|
console.warning("Removing failed task from DB.")
|
||||||
logger.error("Failed to schedule task with error: {}.".format(e))
|
logger.error(f"Failed to schedule task with error: {e}.")
|
||||||
logger.warning("Removing failed task from DB.")
|
logger.warning("Removing failed task from DB.")
|
||||||
#remove items from DB if task fails to add to apscheduler
|
#remove items from DB if task fails to add to apscheduler
|
||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
else:
|
else:
|
||||||
if job_data['interval_type'] == 'hours':
|
if job_data['interval_type'] == 'hours':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = 0, hour = '*/'+str(job_data['interval']), id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = 0,
|
||||||
|
hour = '*/'+str(job_data['interval']),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
elif job_data['interval_type'] == 'minutes':
|
elif job_data['interval_type'] == 'minutes':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = '*/'+str(job_data['interval']), id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = '*/'+str(job_data['interval']),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
elif job_data['interval_type'] == 'days':
|
elif job_data['interval_type'] == 'days':
|
||||||
time = job_data['start_time'].split(':')
|
curr_time = job_data['start_time'].split(':')
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', day = '*/'+str(job_data['interval']), hour = time[0], minute = time[1], id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']], )
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
day = '*/'+str(job_data['interval']),
|
||||||
|
hour = curr_time[0],
|
||||||
|
minute = curr_time[1],
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']],
|
||||||
|
)
|
||||||
logger.info("Added job. Current enabled schedules: ")
|
logger.info("Added job. Current enabled schedules: ")
|
||||||
jobs = self.scheduler.get_jobs()
|
jobs = self.scheduler.get_jobs()
|
||||||
for item in jobs:
|
for item in jobs:
|
||||||
logger.info("JOB: {}".format(item))
|
logger.info(f"JOB: {item}")
|
||||||
|
|
||||||
def remove_all_server_tasks(self, server_id):
|
def remove_all_server_tasks(self, server_id):
|
||||||
schedules = management_helper.get_schedules_by_server(server_id)
|
schedules = management_helper.get_schedules_by_server(server_id)
|
||||||
@ -223,9 +298,10 @@ class TasksManager:
|
|||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
if job.enabled:
|
if job.enabled:
|
||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
logger.info("Job with ID {} was deleted.".format(sch_id))
|
logger.info(f"Job with ID {sch_id} was deleted.")
|
||||||
else:
|
else:
|
||||||
logger.info("Job with ID {} was deleted from DB, but was not enabled. Not going to try removing something that doesn't exist from active schedules.".format(sch_id))
|
logger.info(f"Job with ID {sch_id} was deleted from DB, but was not enabled."
|
||||||
|
+ "Not going to try removing something that doesn't exist from active schedules.")
|
||||||
|
|
||||||
def update_job(self, sch_id, job_data):
|
def update_job(self, sch_id, job_data):
|
||||||
management_helper.update_scheduled_task(sch_id, job_data)
|
management_helper.update_scheduled_task(sch_id, job_data)
|
||||||
@ -233,28 +309,64 @@ class TasksManager:
|
|||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
except:
|
except:
|
||||||
logger.info("No job found in update job. Assuming it was previously disabled. Starting new job.")
|
logger.info("No job found in update job. Assuming it was previously disabled. Starting new job.")
|
||||||
|
|
||||||
|
if job_data['enabled']:
|
||||||
if job_data['cron_string'] != "":
|
if job_data['cron_string'] != "":
|
||||||
cron = job_data['cron_string'].split(' ')
|
|
||||||
try:
|
try:
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = cron[0], hour = cron[1], day = cron[2], month = cron[3], day_of_week = cron[4], args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
CronTrigger.from_crontab(job_data['cron_string'],
|
||||||
|
timezone=str(self.tz)),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.error("Failed to schedule task with error: {}.".format(e))
|
console.error(f"Failed to schedule task with error: {e}.")
|
||||||
console.info("Removing failed task from DB.")
|
console.info("Removing failed task from DB.")
|
||||||
management_helper.delete_scheduled_task(sch_id)
|
management_helper.delete_scheduled_task(sch_id)
|
||||||
else:
|
else:
|
||||||
if job_data['interval_type'] == 'hours':
|
if job_data['interval_type'] == 'hours':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = 0, hour = '*/'+str(job_data['interval']), id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = 0,
|
||||||
|
hour = '*/'+str(job_data['interval']),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
elif job_data['interval_type'] == 'minutes':
|
elif job_data['interval_type'] == 'minutes':
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', minute = '*/'+str(job_data['interval']), id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']])
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
minute = '*/'+str(job_data['interval']),
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
elif job_data['interval_type'] == 'days':
|
elif job_data['interval_type'] == 'days':
|
||||||
time = job_data['start_time'].split(':')
|
curr_time = job_data['start_time'].split(':')
|
||||||
self.scheduler.add_job(management_helper.add_command, 'cron', day = '*/'+str(job_data['interval']), hour = time[0], minute = time[1], id=str(sch_id), args=[job_data['server_id'], self.users_controller.get_id_by_name('system'), '127.0.0.1', job_data['command']], )
|
self.scheduler.add_job(management_helper.add_command,
|
||||||
|
'cron',
|
||||||
|
day = '*/'+str(job_data['interval']),
|
||||||
|
hour = curr_time[0],
|
||||||
|
minute = curr_time[1],
|
||||||
|
id=str(sch_id),
|
||||||
|
args=[job_data['server_id'],
|
||||||
|
self.users_controller.get_id_by_name('system'),
|
||||||
|
'127.0.0.1',
|
||||||
|
job_data['command']]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
self.scheduler.get_job(str(sch_id))
|
self.scheduler.get_job(str(sch_id))
|
||||||
self.scheduler.remove_job(str(sch_id))
|
self.scheduler.remove_job(str(sch_id))
|
||||||
except:
|
except:
|
||||||
logger.info("APScheduler found no scheduled job on schedule update for schedule with id: {}. Assuming it was already disabled.".format(sch_id))
|
logger.info(f"APScheduler found no scheduled job on schedule update for schedule with id: {sch_id} Assuming it was already disabled.")
|
||||||
|
|
||||||
def schedule_watcher(self, event):
|
def schedule_watcher(self, event):
|
||||||
if not event.exception:
|
if not event.exception:
|
||||||
@ -266,12 +378,12 @@ class TasksManager:
|
|||||||
else:
|
else:
|
||||||
logger.info("Event job ID is not numerical. Assuming it's stats - not stored in DB. Moving on.")
|
logger.info("Event job ID is not numerical. Assuming it's stats - not stored in DB. Moving on.")
|
||||||
else:
|
else:
|
||||||
logger.error("Task failed with error: {}".format(event.exception))
|
logger.error(f"Task failed with error: {event.exception}")
|
||||||
|
|
||||||
def start_stats_recording(self):
|
def start_stats_recording(self):
|
||||||
stats_update_frequency = helper.get_setting('stats_update_frequency')
|
stats_update_frequency = helper.get_setting('stats_update_frequency')
|
||||||
logger.info("Stats collection frequency set to {stats} seconds".format(stats=stats_update_frequency))
|
logger.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
||||||
console.info("Stats collection frequency set to {stats} seconds".format(stats=stats_update_frequency))
|
console.info(f"Stats collection frequency set to {stats_update_frequency} seconds")
|
||||||
|
|
||||||
# one for now,
|
# one for now,
|
||||||
self.controller.stats.record_stats()
|
self.controller.stats.record_stats()
|
||||||
@ -312,14 +424,17 @@ class TasksManager:
|
|||||||
'mem_usage': host_stats.get('mem_usage')
|
'mem_usage': host_stats.get('mem_usage')
|
||||||
})
|
})
|
||||||
|
|
||||||
servers = self.controller.servers_list
|
for user in Users_Controller.get_all_users():
|
||||||
servers_ping = []
|
servers_ping = []
|
||||||
|
if user.superuser:
|
||||||
|
servers = Servers_Controller.get_all_servers_stats()
|
||||||
|
else:
|
||||||
|
servers = Servers_Controller.get_authorized_servers_stats(user.user_id)
|
||||||
for srv in servers:
|
for srv in servers:
|
||||||
server_data = srv.get('server_data_obj', False)
|
if srv:
|
||||||
if server_data:
|
server_id = srv['server_data']['server_id']
|
||||||
server_id = server_data.get('server_id', False)
|
|
||||||
srv['raw_ping_result'] = self.controller.stats.get_raw_server_stats(server_id)
|
srv['raw_ping_result'] = self.controller.stats.get_raw_server_stats(server_id)
|
||||||
if ("{}".format(srv['raw_ping_result'].get('icon')) == "b''"):
|
if f"{srv['raw_ping_result'].get('icon')}" == "b''":
|
||||||
srv['raw_ping_result']['icon'] = False
|
srv['raw_ping_result']['icon'] = False
|
||||||
|
|
||||||
servers_ping.append({
|
servers_ping.append({
|
||||||
@ -340,12 +455,12 @@ class TasksManager:
|
|||||||
'version': srv['raw_ping_result'].get('version'),
|
'version': srv['raw_ping_result'].get('version'),
|
||||||
'icon': srv['raw_ping_result'].get('icon')
|
'icon': srv['raw_ping_result'].get('icon')
|
||||||
})
|
})
|
||||||
if (len(websocket_helper.clients) > 0):
|
if len(websocket_helper.clients) > 0:
|
||||||
websocket_helper.broadcast_page_params(
|
websocket_helper.broadcast_user_page_params(
|
||||||
'/panel/server_detail',
|
'/panel/server_detail',
|
||||||
{
|
{
|
||||||
'id': str(server_id)
|
'id': str(server_id)
|
||||||
},
|
}, user.user_id,
|
||||||
'update_server_details',
|
'update_server_details',
|
||||||
{
|
{
|
||||||
'id': srv['raw_ping_result'].get('id'),
|
'id': srv['raw_ping_result'].get('id'),
|
||||||
@ -369,7 +484,7 @@ class TasksManager:
|
|||||||
|
|
||||||
if (len(servers_ping) > 0) & (len(websocket_helper.clients) > 0):
|
if (len(servers_ping) > 0) & (len(websocket_helper.clients) > 0):
|
||||||
try:
|
try:
|
||||||
websocket_helper.broadcast_page('/panel/dashboard', 'update_server_status', servers_ping)
|
websocket_helper.broadcast_user_page('/panel/dashboard', user.user_id, 'update_server_status', servers_ping)
|
||||||
websocket_helper.broadcast_page('/status', 'update_server_status', servers_ping)
|
websocket_helper.broadcast_page('/status', 'update_server_status', servers_ping)
|
||||||
except:
|
except:
|
||||||
console.warning("Can't broadcast server status to websocket")
|
console.warning("Can't broadcast server status to websocket")
|
||||||
@ -378,4 +493,3 @@ class TasksManager:
|
|||||||
def log_watcher(self):
|
def log_watcher(self):
|
||||||
self.controller.servers.check_for_old_logs()
|
self.controller.servers.check_for_old_logs()
|
||||||
self.scheduler.add_job(self.controller.servers.check_for_old_logs, 'interval', hours=6, id="log-mgmt")
|
self.scheduler.add_job(self.controller.servers.check_for_old_logs, 'interval', hours=6, id="log-mgmt")
|
||||||
|
|
||||||
|
@ -8,7 +8,6 @@ from app.classes.shared.helpers import helper
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Translation:
|
class Translation:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.translations_path = os.path.join(helper.root_dir, 'app', 'translations')
|
self.translations_path = os.path.join(helper.root_dir, 'app', 'translations')
|
||||||
@ -55,8 +54,8 @@ class Translation:
|
|||||||
try:
|
try:
|
||||||
translated_page = data[page]
|
translated_page = data[page]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
logger.error('Translation File Error: page {} does not exist for lang {}'.format(page, language))
|
logger.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
||||||
console.error('Translation File Error: page {} does not exist for lang {}'.format(page, language))
|
console.error(f'Translation File Error: page {page} does not exist for lang {language}')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1,24 +1,17 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import tempfile
|
|
||||||
import threading
|
|
||||||
from typing import Container
|
|
||||||
import zipfile
|
|
||||||
|
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
import bleach
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import html
|
import html
|
||||||
import re
|
import re
|
||||||
from app.classes.models.users import helper_users
|
import logging
|
||||||
|
import tornado.web
|
||||||
|
import tornado.escape
|
||||||
|
import bleach
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.main_models import Users, installer
|
|
||||||
from app.classes.web.base_handler import BaseHandler
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.server import ServerOutBuf
|
from app.classes.shared.server import ServerOutBuf
|
||||||
|
|
||||||
|
from app.classes.web.base_handler import BaseHandler
|
||||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -67,7 +60,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not server_data['log_path']:
|
if not server_data['log_path']:
|
||||||
logger.warning("Log path not found in server_log ajax call ({})".format(server_id))
|
logger.warning(f"Log path not found in server_log ajax call ({server_id})")
|
||||||
|
|
||||||
if full_log:
|
if full_log:
|
||||||
log_lines = helper.get_setting('max_log_lines')
|
log_lines = helper.get_setting('max_log_lines')
|
||||||
@ -81,12 +74,11 @@ class AjaxHandler(BaseHandler):
|
|||||||
d = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', d)
|
d = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', d)
|
||||||
d = re.sub('[A-z]{2}\b\b', '', d)
|
d = re.sub('[A-z]{2}\b\b', '', d)
|
||||||
line = helper.log_colors(html.escape(d))
|
line = helper.log_colors(html.escape(d))
|
||||||
self.write('{}<br />'.format(line))
|
self.write(f'{line}<br />')
|
||||||
# self.write(d.encode("utf-8"))
|
# self.write(d.encode("utf-8"))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Skipping Log Line due to error: {}".format(e))
|
logger.warning(f"Skipping Log Line due to error: {e}")
|
||||||
pass
|
|
||||||
|
|
||||||
elif page == "announcements":
|
elif page == "announcements":
|
||||||
data = helper.get_announcements()
|
data = helper.get_announcements()
|
||||||
@ -97,20 +89,22 @@ class AjaxHandler(BaseHandler):
|
|||||||
file_path = helper.get_os_understandable_path(self.get_argument('file_path', None))
|
file_path = helper.get_os_understandable_path(self.get_argument('file_path', None))
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_file'): return
|
if not self.check_server_id(server_id, 'get_file'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning("Invalid path in get_file ajax call ({})".format(file_path))
|
logger.warning(f"Invalid path in get_file ajax call ({file_path})")
|
||||||
console.warning("Invalid path in get_file ajax call ({})".format(file_path))
|
console.warning(f"Invalid path in get_file ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(file_path) as file:
|
with open(file_path, encoding='utf-8') as file:
|
||||||
file_contents = file.read()
|
file_contents = file.read()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
file_contents = ''
|
file_contents = ''
|
||||||
@ -126,8 +120,10 @@ class AjaxHandler(BaseHandler):
|
|||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument('path', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_tree'): return
|
if not self.check_server_id(server_id, 'get_tree'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||||
@ -154,8 +150,10 @@ class AjaxHandler(BaseHandler):
|
|||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
path = self.get_argument('path', None)
|
path = self.get_argument('path', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'get_tree'): return
|
if not self.check_server_id(server_id, 'get_tree'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
if helper.validate_traversal(self.controller.servers.get_server_data_by_id(server_id)['path'], path):
|
||||||
self.write(helper.get_os_understandable_path(path) + '\n' +
|
self.write(helper.get_os_understandable_path(path) + '\n' +
|
||||||
@ -182,12 +180,6 @@ class AjaxHandler(BaseHandler):
|
|||||||
'Players': Enum_Permissions_Server.Players,
|
'Players': Enum_Permissions_Server.Players,
|
||||||
}
|
}
|
||||||
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
user_perms = self.controller.server_perms.get_user_id_permissions_list(exec_user['user_id'], server_id)
|
||||||
error = bleach.clean(self.get_argument('error', "WTF Error!"))
|
|
||||||
|
|
||||||
page_data = {
|
|
||||||
'user_data': exec_user,
|
|
||||||
'error': error
|
|
||||||
}
|
|
||||||
|
|
||||||
if page == "send_command":
|
if page == "send_command":
|
||||||
command = self.get_body_argument('command', default=None, strip=True)
|
command = self.get_body_argument('command', default=None, strip=True)
|
||||||
@ -200,18 +192,23 @@ class AjaxHandler(BaseHandler):
|
|||||||
srv_obj = self.controller.get_server_obj(server_id)
|
srv_obj = self.controller.get_server_obj(server_id)
|
||||||
|
|
||||||
if command == srv_obj.settings['stop_command']:
|
if command == srv_obj.settings['stop_command']:
|
||||||
logger.info("Stop command detected as terminal input - intercepting. Starting Crafty's stop process for server with id: {}.".format(server_id))
|
logger.info("Stop command detected as terminal input - intercepting." +
|
||||||
|
f"Starting Crafty's stop process for server with id: {server_id}")
|
||||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'stop_server')
|
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'stop_server')
|
||||||
command = None
|
command = None
|
||||||
elif command == 'restart':
|
elif command == 'restart':
|
||||||
logger.info("Restart command detected as terminal input - intercepting. Starting Crafty's stop process for server with id: {}.".format(server_id))
|
logger.info("Restart command detected as terminal input - intercepting." +
|
||||||
|
f"Starting Crafty's stop process for server with id: {server_id}")
|
||||||
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'restart_server')
|
self.controller.management.send_command(exec_user['user_id'], server_id, self.get_remote_ip(), 'restart_server')
|
||||||
command = None
|
command = None
|
||||||
if command:
|
if command:
|
||||||
if srv_obj.check_running():
|
if srv_obj.check_running():
|
||||||
srv_obj.send_command(command)
|
srv_obj.send_command(command)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'], "Sent command to {} terminal: {}".format(self.controller.servers.get_server_friendly_name(server_id), command), server_id, self.get_remote_ip())
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
|
f"Sent command to {self.controller.servers.get_server_friendly_name(server_id)} terminal: {command}",
|
||||||
|
server_id,
|
||||||
|
self.get_remote_ip())
|
||||||
|
|
||||||
elif page == "create_file":
|
elif page == "create_file":
|
||||||
if not permissions['Files'] in user_perms:
|
if not permissions['Files'] in user_perms:
|
||||||
@ -223,17 +220,19 @@ class AjaxHandler(BaseHandler):
|
|||||||
file_path = os.path.join(file_parent, file_name)
|
file_path = os.path.join(file_parent, file_name)
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'create_file'): return
|
if not self.check_server_id(server_id, 'create_file'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path) \
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path) \
|
||||||
or helper.check_file_exists(os.path.abspath(file_path)):
|
or helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning("Invalid path in create_file ajax call ({})".format(file_path))
|
logger.warning(f"Invalid path in create_file ajax call ({file_path})")
|
||||||
console.warning("Invalid path in create_file ajax call ({})".format(file_path))
|
console.warning(f"Invalid path in create_file ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create the file by opening it
|
# Create the file by opening it
|
||||||
with open(file_path, 'w') as file_object:
|
with open(file_path, 'w', encoding='utf-8') as file_object:
|
||||||
file_object.close()
|
file_object.close()
|
||||||
|
|
||||||
elif page == "create_dir":
|
elif page == "create_dir":
|
||||||
@ -246,13 +245,15 @@ class AjaxHandler(BaseHandler):
|
|||||||
dir_path = os.path.join(dir_parent, dir_name)
|
dir_path = os.path.join(dir_parent, dir_name)
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'create_dir'): return
|
if not self.check_server_id(server_id, 'create_dir'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), dir_path) \
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), dir_path) \
|
||||||
or helper.check_path_exists(os.path.abspath(dir_path)):
|
or helper.check_path_exists(os.path.abspath(dir_path)):
|
||||||
logger.warning("Invalid path in create_dir ajax call ({})".format(dir_path))
|
logger.warning(f"Invalid path in create_dir ajax call ({dir_path})")
|
||||||
console.warning("Invalid path in create_dir ajax call ({})".format(dir_path))
|
console.warning(f"Invalid path in create_dir ajax call ({dir_path})")
|
||||||
return
|
return
|
||||||
# Create the directory
|
# Create the directory
|
||||||
os.mkdir(dir_path)
|
os.mkdir(dir_path)
|
||||||
@ -265,7 +266,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
path = helper.get_os_understandable_path(self.get_argument('path', None))
|
path = helper.get_os_understandable_path(self.get_argument('path', None))
|
||||||
helper.unzipFile(path)
|
helper.unzipFile(path)
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=files".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=files")
|
||||||
return
|
return
|
||||||
|
|
||||||
elif page == "kill":
|
elif page == "kill":
|
||||||
@ -278,7 +279,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
svr.kill()
|
svr.kill()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Could not find PID for requested termsig. Full error: {}".format(e))
|
logger.error(f"Could not find PID for requested termsig. Full error: {e}")
|
||||||
return
|
return
|
||||||
elif page == "eula":
|
elif page == "eula":
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
@ -297,7 +298,11 @@ class AjaxHandler(BaseHandler):
|
|||||||
backup_path = svr_obj.backup_path
|
backup_path = svr_obj.backup_path
|
||||||
if helper.validate_traversal(backup_path, zip_name):
|
if helper.validate_traversal(backup_path, zip_name):
|
||||||
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
tempDir = helper.unzip_backup_archive(backup_path, zip_name)
|
||||||
new_server = self.controller.import_zip_server(svr_obj.server_name, tempDir, server_data['executable'], '1', '2', server_data['server_port'])
|
new_server = self.controller.import_zip_server(svr_obj.server_name,
|
||||||
|
tempDir,
|
||||||
|
server_data['executable'],
|
||||||
|
'1', '2',
|
||||||
|
server_data['server_port'])
|
||||||
new_server_id = new_server
|
new_server_id = new_server
|
||||||
new_server = self.controller.get_server_data(new_server)
|
new_server = self.controller.get_server_data(new_server)
|
||||||
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
self.controller.rename_backup_dir(server_id, new_server_id, new_server['server_uuid'])
|
||||||
@ -340,7 +345,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
console.warning("delete {} for server {}".format(file_path, server_id))
|
console.warning(f"Delete {file_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_file'):
|
if not self.check_server_id(server_id, 'del_file'):
|
||||||
return
|
return
|
||||||
@ -350,8 +355,8 @@ class AjaxHandler(BaseHandler):
|
|||||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
||||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning("Invalid path in del_file ajax call ({})".format(file_path))
|
logger.warning(f"Invalid path in del_file ajax call ({file_path})")
|
||||||
console.warning("Invalid path in del_file ajax call ({})".format(file_path))
|
console.warning(f"Invalid path in del_file ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the file
|
# Delete the file
|
||||||
@ -372,7 +377,7 @@ class AjaxHandler(BaseHandler):
|
|||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
console.warning("delete {} for server {}".format(file_path, server_id))
|
console.warning(f"Delete {file_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_file'):
|
if not self.check_server_id(server_id, 'del_file'):
|
||||||
return
|
return
|
||||||
@ -382,8 +387,8 @@ class AjaxHandler(BaseHandler):
|
|||||||
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
if not (helper.in_path(helper.get_os_understandable_path(server_info['path']), file_path) \
|
||||||
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
or helper.in_path(helper.get_os_understandable_path(server_info['backup_path']), file_path)) \
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning("Invalid path in del_file ajax call ({})".format(file_path))
|
logger.warning(f"Invalid path in del_file ajax call ({file_path})")
|
||||||
console.warning("Invalid path in del_file ajax call ({})".format(file_path))
|
console.warning(f"Invalid path in del_file ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the file
|
# Delete the file
|
||||||
@ -398,16 +403,18 @@ class AjaxHandler(BaseHandler):
|
|||||||
dir_path = helper.get_os_understandable_path(self.get_body_argument('dir_path', default=None, strip=True))
|
dir_path = helper.get_os_understandable_path(self.get_body_argument('dir_path', default=None, strip=True))
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
console.warning("delete {} for server {}".format(dir_path, server_id))
|
console.warning(f"Delete {dir_path} for server {server_id}")
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'del_dir'): return
|
if not self.check_server_id(server_id, 'del_dir'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
if not helper.in_path(helper.get_os_understandable_path(server_info['path']), dir_path) \
|
if not helper.in_path(helper.get_os_understandable_path(server_info['path']), dir_path) \
|
||||||
or not helper.check_path_exists(os.path.abspath(dir_path)):
|
or not helper.check_path_exists(os.path.abspath(dir_path)):
|
||||||
logger.warning("Invalid path in del_file ajax call ({})".format(dir_path))
|
logger.warning(f"Invalid path in del_file ajax call ({dir_path})")
|
||||||
console.warning("Invalid path in del_file ajax call ({})".format(dir_path))
|
console.warning(f"Invalid path in del_file ajax call ({dir_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Delete the directory
|
# Delete the directory
|
||||||
@ -421,14 +428,13 @@ class AjaxHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||||
return
|
return
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
logger.info(
|
logger.info(f"Removing server from panel for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
||||||
"Removing server from panel for server: {}".format(self.controller.servers.get_server_friendly_name(server_id)))
|
|
||||||
|
|
||||||
server_data = self.controller.get_server_data(server_id)
|
server_data = self.controller.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data['server_name']
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Deleted server {} named {}".format(server_id, server_name),
|
f"Deleted server {server_id} named {server_name}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
|
|
||||||
@ -441,14 +447,13 @@ class AjaxHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Unauthorized access to Config")
|
self.redirect("/panel/error?error=Unauthorized access to Config")
|
||||||
return
|
return
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
logger.info(
|
logger.info(f"Removing server and all associated files for server: {self.controller.servers.get_server_friendly_name(server_id)}")
|
||||||
"Removing server and all associated files for server: {}".format(self.controller.servers.get_server_friendly_name(server_id)))
|
|
||||||
|
|
||||||
server_data = self.controller.get_server_data(server_id)
|
server_data = self.controller.get_server_data(server_id)
|
||||||
server_name = server_data['server_name']
|
server_name = server_data['server_name']
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Deleted server {} named {}".format(server_id, server_name),
|
f"Deleted server {server_id} named {server_name}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
|
|
||||||
@ -483,17 +488,19 @@ class AjaxHandler(BaseHandler):
|
|||||||
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
file_path = helper.get_os_understandable_path(self.get_body_argument('file_path', default=None, strip=True))
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'save_file'): return
|
if not self.check_server_id(server_id, 'save_file'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), file_path)\
|
||||||
or not helper.check_file_exists(os.path.abspath(file_path)):
|
or not helper.check_file_exists(os.path.abspath(file_path)):
|
||||||
logger.warning("Invalid path in save_file ajax call ({})".format(file_path))
|
logger.warning(f"Invalid path in save_file ajax call ({file_path})")
|
||||||
console.warning("Invalid path in save_file ajax call ({})".format(file_path))
|
console.warning(f"Invalid path in save_file ajax call ({file_path})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Open the file in write mode and store the content in file_object
|
# Open the file in write mode and store the content in file_object
|
||||||
with open(file_path, 'w') as file_object:
|
with open(file_path, 'w', encoding='utf-8') as file_object:
|
||||||
file_object.write(file_contents)
|
file_object.write(file_contents)
|
||||||
|
|
||||||
elif page == "rename_item":
|
elif page == "rename_item":
|
||||||
@ -505,8 +512,10 @@ class AjaxHandler(BaseHandler):
|
|||||||
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
|
||||||
server_id = self.get_argument('id', None)
|
server_id = self.get_argument('id', None)
|
||||||
|
|
||||||
if not self.check_server_id(server_id, 'rename_item'): return
|
if not self.check_server_id(server_id, 'rename_item'):
|
||||||
else: server_id = bleach.clean(server_id)
|
return
|
||||||
|
else:
|
||||||
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
if item_path is None or new_item_name is None:
|
if item_path is None or new_item_name is None:
|
||||||
logger.warning("Invalid path(s) in rename_item ajax call")
|
logger.warning("Invalid path(s) in rename_item ajax call")
|
||||||
@ -515,16 +524,17 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), item_path) \
|
||||||
or not helper.check_path_exists(os.path.abspath(item_path)):
|
or not helper.check_path_exists(os.path.abspath(item_path)):
|
||||||
logger.warning("Invalid old name path in rename_item ajax call ({})".format(server_id))
|
logger.warning(f"Invalid old name path in rename_item ajax call ({server_id})")
|
||||||
console.warning("Invalid old name path in rename_item ajax call ({})".format(server_id))
|
console.warning(f"Invalid old name path in rename_item ajax call ({server_id})")
|
||||||
return
|
return
|
||||||
|
|
||||||
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
|
||||||
|
|
||||||
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']), new_item_path) \
|
if not helper.in_path(helper.get_os_understandable_path(self.controller.servers.get_server_data_by_id(server_id)['path']),
|
||||||
|
new_item_path) \
|
||||||
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
or helper.check_path_exists(os.path.abspath(new_item_path)):
|
||||||
logger.warning("Invalid new name path in rename_item ajax call ({})".format(server_id))
|
logger.warning(f"Invalid new name path in rename_item ajax call ({server_id})")
|
||||||
console.warning("Invalid new name path in rename_item ajax call ({})".format(server_id))
|
console.warning(f"Invalid new name path in rename_item ajax call ({server_id})")
|
||||||
return
|
return
|
||||||
|
|
||||||
# RENAME
|
# RENAME
|
||||||
@ -532,15 +542,15 @@ class AjaxHandler(BaseHandler):
|
|||||||
|
|
||||||
def check_server_id(self, server_id, page_name):
|
def check_server_id(self, server_id, page_name):
|
||||||
if server_id is None:
|
if server_id is None:
|
||||||
logger.warning("Server ID not defined in {} ajax call ({})".format(page_name, server_id))
|
logger.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
||||||
console.warning("Server ID not defined in {} ajax call ({})".format(page_name, server_id))
|
console.warning(f"Server ID not defined in {page_name} ajax call ({server_id})")
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
server_id = bleach.clean(server_id)
|
server_id = bleach.clean(server_id)
|
||||||
|
|
||||||
# does this server id exist?
|
# does this server id exist?
|
||||||
if not self.controller.servers.server_id_exists(server_id):
|
if not self.controller.servers.server_id_exists(server_id):
|
||||||
logger.warning("Server ID not found in {} ajax call ({})".format(page_name, server_id))
|
logger.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
||||||
console.warning("Server ID not found in {} ajax call ({})".format(page_name, server_id))
|
console.warning(f"Server ID not found in {page_name} ajax call ({server_id})")
|
||||||
return
|
return
|
||||||
return True
|
return True
|
||||||
|
@ -15,7 +15,8 @@ class ApiHandler(BaseHandler):
|
|||||||
self.write(data)
|
self.write(data)
|
||||||
|
|
||||||
def access_denied(self, user, reason=''):
|
def access_denied(self, user, reason=''):
|
||||||
if reason: reason = ' because ' + reason
|
if reason:
|
||||||
|
reason = ' because ' + reason
|
||||||
log.info("User %s from IP %s was denied access to the API route " + self.request.path + reason, user, self.get_remote_ip())
|
log.info("User %s from IP %s was denied access to the API route " + self.request.path + reason, user, self.get_remote_ip())
|
||||||
self.finish(self.return_response(403, {
|
self.finish(self.return_response(403, {
|
||||||
'error':'ACCESS_DENIED',
|
'error':'ACCESS_DENIED',
|
||||||
@ -36,7 +37,7 @@ class ApiHandler(BaseHandler):
|
|||||||
log.debug("Checking results")
|
log.debug("Checking results")
|
||||||
if user_data:
|
if user_data:
|
||||||
# Login successful! Check perms
|
# Login successful! Check perms
|
||||||
log.info("User {} has authenticated to API".format(user_data['username']))
|
log.info(f"User {user_data['username']} has authenticated to API")
|
||||||
# TODO: Role check
|
# TODO: Role check
|
||||||
|
|
||||||
return True # This is to set the "authenticated"
|
return True # This is to set the "authenticated"
|
||||||
@ -46,7 +47,10 @@ class ApiHandler(BaseHandler):
|
|||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warning("An error occured while authenticating an API user: %s", e)
|
log.warning("An error occured while authenticating an API user: %s", e)
|
||||||
self.access_denied("unknown"), "an error occured while authenticating the user"
|
self.finish(self.return_response(403, {
|
||||||
|
'error':'ACCESS_DENIED',
|
||||||
|
'info':'An error occured while authenticating the user'
|
||||||
|
}))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -54,7 +58,9 @@ class ServersStats(ApiHandler):
|
|||||||
def get(self):
|
def get(self):
|
||||||
"""Get details about all servers"""
|
"""Get details about all servers"""
|
||||||
authenticated = self.authenticate_user()
|
authenticated = self.authenticate_user()
|
||||||
if not authenticated: return
|
if not authenticated:
|
||||||
|
return
|
||||||
|
|
||||||
# Get server stats
|
# Get server stats
|
||||||
# TODO Check perms
|
# TODO Check perms
|
||||||
self.finish(self.write({"servers": self.controller.stats.get_servers_stats()}))
|
self.finish(self.write({"servers": self.controller.stats.get_servers_stats()}))
|
||||||
@ -64,7 +70,9 @@ class NodeStats(ApiHandler):
|
|||||||
def get(self):
|
def get(self):
|
||||||
"""Get stats for particular node"""
|
"""Get stats for particular node"""
|
||||||
authenticated = self.authenticate_user()
|
authenticated = self.authenticate_user()
|
||||||
if not authenticated: return
|
if not authenticated:
|
||||||
|
return
|
||||||
|
|
||||||
# Get node stats
|
# Get node stats
|
||||||
node_stats = self.controller.stats.get_node_stats()
|
node_stats = self.controller.stats.get_node_stats()
|
||||||
node_stats.pop("servers")
|
node_stats.pop("servers")
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import tornado.web
|
|
||||||
import bleach
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Union,
|
Union,
|
||||||
List,
|
List,
|
||||||
Optional, Tuple, Dict, Any
|
Optional, Tuple, Dict, Any
|
||||||
)
|
)
|
||||||
|
import tornado.web
|
||||||
|
import bleach
|
||||||
|
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
from app.classes.shared.main_controller import Controller
|
from app.classes.shared.main_controller import Controller
|
||||||
|
|
||||||
from app.classes.models.users import ApiKeys
|
from app.classes.models.users import ApiKeys
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -38,10 +39,10 @@ class BaseHandler(tornado.web.RequestHandler):
|
|||||||
def autobleach(self, name, text):
|
def autobleach(self, name, text):
|
||||||
for r in self.redactables:
|
for r in self.redactables:
|
||||||
if r in name:
|
if r in name:
|
||||||
logger.debug("Auto-bleaching {}: {}".format(name, "[**REDACTED**]"))
|
logger.debug(f"Auto-bleaching {name}: [**REDACTED**]")
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
logger.debug("Auto-bleaching {}: {}".format(name, text))
|
logger.debug(f"Auto-bleaching {name}: {text}")
|
||||||
if type(text) in self.nobleach:
|
if type(text) in self.nobleach:
|
||||||
logger.debug("Auto-bleaching - bypass type")
|
logger.debug("Auto-bleaching - bypass type")
|
||||||
return text
|
return text
|
||||||
|
@ -4,10 +4,10 @@ from app.classes.web.base_handler import BaseHandler
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DefaultHandler(BaseHandler):
|
class DefaultHandler(BaseHandler):
|
||||||
|
|
||||||
# Override prepare() instead of get() to cover all possible HTTP methods.
|
# Override prepare() instead of get() to cover all possible HTTP methods.
|
||||||
|
# pylint: disable=arguments-differ
|
||||||
def prepare(self, page=None):
|
def prepare(self, page=None):
|
||||||
if page is not None:
|
if page is not None:
|
||||||
self.set_status(404)
|
self.set_status(404)
|
||||||
@ -20,4 +20,3 @@ class DefaultHandler(BaseHandler):
|
|||||||
"/public/login",
|
"/public/login",
|
||||||
#translate=self.translator.translate,
|
#translate=self.translator.translate,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,26 +1,11 @@
|
|||||||
import sys
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.main_models import Users, fn
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
|
||||||
import bleach
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandler(BaseHandler):
|
class HTTPHandler(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
url = str(self.request.host)
|
url = str(self.request.host)
|
||||||
@ -34,13 +19,13 @@ class HTTPHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
resp = requests.get(url + ":" + str(port))
|
resp = requests.get(url + ":" + str(port))
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
except Exception as err:
|
except Exception:
|
||||||
port = db_port
|
port = db_port
|
||||||
self.redirect(url+":"+str(port))
|
self.redirect(url+":"+str(port))
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandlerPage(BaseHandler):
|
class HTTPHandlerPage(BaseHandler):
|
||||||
def get(self, page):
|
def get(self):
|
||||||
url = str(self.request.host)
|
url = str(self.request.host)
|
||||||
port = 443
|
port = 443
|
||||||
url_list = url.split(":")
|
url_list = url.split(":")
|
||||||
@ -52,6 +37,6 @@ class HTTPHandlerPage(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
resp = requests.get(url + ":" + str(port))
|
resp = requests.get(url + ":" + str(port))
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
except Exception as err:
|
except Exception:
|
||||||
port = db_port
|
port = db_port
|
||||||
self.redirect(url+":"+str(port))
|
self.redirect(url+":"+str(port))
|
@ -1,28 +1,12 @@
|
|||||||
import sys
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.main_models import Users, fn
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
|
||||||
import bleach
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPHandlerPage(BaseHandler):
|
class HTTPHandlerPage(BaseHandler):
|
||||||
def get(self, page):
|
def get(self):
|
||||||
url = self.request.full_url
|
url = self.request.full_url
|
||||||
port = 443
|
port = 443
|
||||||
if url[len(url)-1] == '/':
|
if url[len(url)-1] == '/':
|
||||||
@ -30,18 +14,18 @@ class HTTPHandlerPage(BaseHandler):
|
|||||||
url_list = url.split('/')
|
url_list = url.split('/')
|
||||||
if url_list[0] != "":
|
if url_list[0] != "":
|
||||||
primary_url = url_list[0] + ":"+str(port)+"/"
|
primary_url = url_list[0] + ":"+str(port)+"/"
|
||||||
backup_url = url_list[0] + ":" +str(helper.get_setting["https_port"]) +"/"
|
backup_url = url_list[0] + ":" +str(helper.get_setting("https_port")) +"/"
|
||||||
for i in range(len(url_list)-1):
|
for i in range(len(url_list)-1):
|
||||||
primary_url += url_list[i+1]
|
primary_url += url_list[i+1]
|
||||||
backup_url += url_list[i+1]
|
backup_url += url_list[i+1]
|
||||||
else:
|
else:
|
||||||
primary_url = url + str(port)
|
primary_url = url + str(port)
|
||||||
backup_url = url + str(helper.get_setting['https_port'])
|
backup_url = url + str(helper.get_setting('https_port'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.get(primary_url)
|
resp = requests.get(primary_url)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
url = primary_url
|
url = primary_url
|
||||||
except Exception as err:
|
except Exception:
|
||||||
url = backup_url
|
url = backup_url
|
||||||
self.redirect('https://'+url+':'+ str(port))
|
self.redirect('https://'+url+':'+ str(port))
|
@ -1,43 +1,34 @@
|
|||||||
from tempfile import tempdir
|
|
||||||
from typing import Dict, Optional, Any
|
|
||||||
|
|
||||||
from app.classes.shared.authentication import authentication
|
|
||||||
from app.classes.shared.translation import Translation
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
import bleach
|
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import shutil
|
from typing import Dict, Any, Tuple
|
||||||
import tempfile
|
import json
|
||||||
|
import logging
|
||||||
import threading
|
import threading
|
||||||
from cron_validator import CronValidator
|
import bleach
|
||||||
#TZLocal is set as a hidden import on win pipeline
|
|
||||||
from tzlocal import get_localzone
|
|
||||||
import libgravatar
|
import libgravatar
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from tornado import locale, iostream
|
import tornado.web
|
||||||
|
import tornado.escape
|
||||||
|
from tornado import iostream
|
||||||
from tornado.ioloop import IOLoop
|
from tornado.ioloop import IOLoop
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.main_models import Users, installer
|
#TZLocal is set as a hidden import on win pipeline
|
||||||
|
from tzlocal import get_localzone
|
||||||
|
from cron_validator import CronValidator
|
||||||
|
|
||||||
|
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||||
|
from app.classes.models.crafty_permissions import Enum_Permissions_Crafty
|
||||||
|
from app.classes.models.management import management_helper
|
||||||
|
|
||||||
|
from app.classes.shared.authentication import authentication
|
||||||
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
|
|
||||||
from app.classes.models.servers import Servers
|
|
||||||
from app.classes.models.server_permissions import Enum_Permissions_Server, Permissions_Servers
|
|
||||||
from app.classes.models.crafty_permissions import Enum_Permissions_Crafty, Permissions_Crafty
|
|
||||||
from app.classes.models.management import management_helper
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
from app.classes.web.websocket_helper import WebSocketHelper
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PanelHandler(BaseHandler):
|
class PanelHandler(BaseHandler):
|
||||||
|
|
||||||
def get_user_roles(self) -> Dict[str, list]:
|
def get_user_roles(self) -> Dict[str, list]:
|
||||||
@ -53,26 +44,26 @@ class PanelHandler(BaseHandler):
|
|||||||
for server in self.controller.list_defined_servers():
|
for server in self.controller.list_defined_servers():
|
||||||
argument = int(float(
|
argument = int(float(
|
||||||
bleach.clean(
|
bleach.clean(
|
||||||
self.get_argument('server_{}_access'.format(server['server_id']), '0')
|
self.get_argument(f"server_{server['server_id']}_access", '0')
|
||||||
)
|
)
|
||||||
))
|
))
|
||||||
if argument:
|
if argument:
|
||||||
servers.add(server['server_id'])
|
servers.add(server['server_id'])
|
||||||
return servers
|
return servers
|
||||||
|
|
||||||
def get_perms_quantity(self) -> (str, dict):
|
def get_perms_quantity(self) -> Tuple[str, dict]:
|
||||||
permissions_mask: str = "000"
|
permissions_mask: str = "000"
|
||||||
server_quantity: dict = {}
|
server_quantity: dict = {}
|
||||||
for permission in self.controller.crafty_perms.list_defined_crafty_permissions():
|
for permission in self.controller.crafty_perms.list_defined_crafty_permissions():
|
||||||
argument = int(float(bleach.clean(
|
argument = int(float(bleach.clean(
|
||||||
self.get_argument('permission_{}'.format(permission.name), '0')
|
self.get_argument(f'permission_{permission.name}', '0')
|
||||||
)))
|
)))
|
||||||
if argument:
|
if argument:
|
||||||
permissions_mask = self.controller.crafty_perms.set_permission(permissions_mask, permission, argument)
|
permissions_mask = self.controller.crafty_perms.set_permission(permissions_mask, permission, argument)
|
||||||
|
|
||||||
q_argument = int(float(
|
q_argument = int(float(
|
||||||
bleach.clean(
|
bleach.clean(
|
||||||
self.get_argument('quantity_{}'.format(permission.name), '0')
|
self.get_argument(f'quantity_{permission.name}', '0')
|
||||||
)
|
)
|
||||||
))
|
))
|
||||||
if q_argument:
|
if q_argument:
|
||||||
@ -84,7 +75,7 @@ class PanelHandler(BaseHandler):
|
|||||||
def get_perms(self) -> str:
|
def get_perms(self) -> str:
|
||||||
permissions_mask: str = "000"
|
permissions_mask: str = "000"
|
||||||
for permission in self.controller.crafty_perms.list_defined_crafty_permissions():
|
for permission in self.controller.crafty_perms.list_defined_crafty_permissions():
|
||||||
argument = self.get_argument('permission_{}'.format(permission.name), None)
|
argument = self.get_argument(f'permission_{permission.name}', None)
|
||||||
if argument is not None:
|
if argument is not None:
|
||||||
permissions_mask = self.controller.crafty_perms.set_permission(permissions_mask, permission,
|
permissions_mask = self.controller.crafty_perms.set_permission(permissions_mask, permission,
|
||||||
1 if argument == '1' else 0)
|
1 if argument == '1' else 0)
|
||||||
@ -93,7 +84,7 @@ class PanelHandler(BaseHandler):
|
|||||||
def get_perms_server(self) -> str:
|
def get_perms_server(self) -> str:
|
||||||
permissions_mask = "00000000"
|
permissions_mask = "00000000"
|
||||||
for permission in self.controller.server_perms.list_defined_permissions():
|
for permission in self.controller.server_perms.list_defined_permissions():
|
||||||
argument = self.get_argument('permission_{}'.format(permission.name), None)
|
argument = self.get_argument(f'permission_{permission.name}', None)
|
||||||
if argument is not None:
|
if argument is not None:
|
||||||
permissions_mask = self.controller.server_perms.set_permission(permissions_mask, permission,
|
permissions_mask = self.controller.server_perms.set_permission(permissions_mask, permission,
|
||||||
1 if argument == '1' else 0)
|
1 if argument == '1' else 0)
|
||||||
@ -102,13 +93,13 @@ class PanelHandler(BaseHandler):
|
|||||||
def get_user_role_memberships(self) -> set:
|
def get_user_role_memberships(self) -> set:
|
||||||
roles = set()
|
roles = set()
|
||||||
for role in self.controller.roles.get_all_roles():
|
for role in self.controller.roles.get_all_roles():
|
||||||
if self.get_argument('role_{}_membership'.format(role.role_id), None) == '1':
|
if self.get_argument(f'role_{role.role_id}_membership', None) == '1':
|
||||||
roles.add(role.role_id)
|
roles.add(role.role_id)
|
||||||
return roles
|
return roles
|
||||||
|
|
||||||
def download_file(self, name: str, file: str):
|
def download_file(self, name: str, file: str):
|
||||||
self.set_header('Content-Type', 'application/octet-stream')
|
self.set_header('Content-Type', 'application/octet-stream')
|
||||||
self.set_header('Content-Disposition', 'attachment; filename=' + name)
|
self.set_header('Content-Disposition', f'attachment; filename={name}')
|
||||||
chunk_size = 1024 * 1024 * 4 # 4 MiB
|
chunk_size = 1024 * 1024 * 4 # 4 MiB
|
||||||
|
|
||||||
with open(file, 'rb') as f:
|
with open(file, 'rb') as f:
|
||||||
@ -176,7 +167,7 @@ class PanelHandler(BaseHandler):
|
|||||||
data = json.loads(s['int_ping_results'])
|
data = json.loads(s['int_ping_results'])
|
||||||
s['int_ping_results'] = data
|
s['int_ping_results'] = data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed server data for page with error: {} ".format(e))
|
logger.error(f"Failed server data for page with error: {e}")
|
||||||
|
|
||||||
return page_data
|
return page_data
|
||||||
|
|
||||||
@ -190,6 +181,7 @@ class PanelHandler(BaseHandler):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
formatted_time = str(datetime.datetime.fromtimestamp(now).strftime('%Y-%m-%d %H:%M:%S'))
|
formatted_time = str(datetime.datetime.fromtimestamp(now).strftime('%Y-%m-%d %H:%M:%S'))
|
||||||
|
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
@ -272,7 +264,7 @@ class PanelHandler(BaseHandler):
|
|||||||
template = "public/error.html"
|
template = "public/error.html"
|
||||||
|
|
||||||
elif page == 'credits':
|
elif page == 'credits':
|
||||||
with open(helper.credits_cache) as republic_credits_will_do:
|
with open(helper.credits_cache, encoding='utf-8') as republic_credits_will_do:
|
||||||
credits_dict: dict = json.load(republic_credits_will_do)
|
credits_dict: dict = json.load(republic_credits_will_do)
|
||||||
timestamp = credits_dict["lastUpdate"] / 1000.0
|
timestamp = credits_dict["lastUpdate"] / 1000.0
|
||||||
page_data["patrons"] = credits_dict["patrons"]
|
page_data["patrons"] = credits_dict["patrons"]
|
||||||
@ -308,7 +300,7 @@ class PanelHandler(BaseHandler):
|
|||||||
data['stats']['waiting_start'] = self.controller.servers.get_waiting_start(
|
data['stats']['waiting_start'] = self.controller.servers.get_waiting_start(
|
||||||
str(data['stats']['server_id']['server_id']))
|
str(data['stats']['server_id']['server_id']))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to get server waiting to start: {} ".format(e))
|
logger.error(f"Failed to get server waiting to start: {e}")
|
||||||
data['stats']['waiting_start'] = False
|
data['stats']['waiting_start'] = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -324,14 +316,15 @@ class PanelHandler(BaseHandler):
|
|||||||
subpage = bleach.clean(self.get_argument('subpage', ""))
|
subpage = bleach.clean(self.get_argument('subpage', ""))
|
||||||
|
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None: return
|
if server_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
valid_subpages = ['term', 'logs', 'backup', 'config', 'files', 'admin_controls', 'tasks']
|
valid_subpages = ['term', 'logs', 'backup', 'config', 'files', 'admin_controls', 'tasks']
|
||||||
|
|
||||||
if subpage not in valid_subpages:
|
if subpage not in valid_subpages:
|
||||||
logger.debug('not a valid subpage')
|
logger.debug('not a valid subpage')
|
||||||
subpage = 'term'
|
subpage = 'term'
|
||||||
logger.debug('Subpage: "{}"'.format(subpage))
|
logger.debug(f'Subpage: "{subpage}"')
|
||||||
|
|
||||||
server = self.controller.get_server_obj(server_id)
|
server = self.controller.get_server_obj(server_id)
|
||||||
# server_data isn't needed since the server_stats also pulls server data
|
# server_data isn't needed since the server_stats also pulls server data
|
||||||
@ -340,7 +333,7 @@ class PanelHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
page_data['waiting_start'] = self.controller.servers.get_waiting_start(server_id)
|
page_data['waiting_start'] = self.controller.servers.get_waiting_start(server_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to get server waiting to start: {} ".format(e))
|
logger.error(f"Failed to get server waiting to start: {e}")
|
||||||
page_data['waiting_start'] = False
|
page_data['waiting_start'] = False
|
||||||
page_data['get_players'] = lambda: self.controller.stats.get_server_players(server_id)
|
page_data['get_players'] = lambda: self.controller.stats.get_server_players(server_id)
|
||||||
page_data['active_link'] = subpage
|
page_data['active_link'] = subpage
|
||||||
@ -413,13 +406,13 @@ class PanelHandler(BaseHandler):
|
|||||||
"""
|
"""
|
||||||
html = ""
|
html = ""
|
||||||
for player in banned_players:
|
for player in banned_players:
|
||||||
html += """
|
html += f"""
|
||||||
<li class="playerItem banned">
|
<li class="playerItem banned">
|
||||||
<h3>{}</h3>
|
<h3>{player['name']}</h3>
|
||||||
<span>Banned by {} for reason: {}</span>
|
<span>Banned by {player['source']} for reason: {player['reason']}</span>
|
||||||
<button onclick="send_command_to_server('pardon {}')" type="button" class="btn btn-danger">Unban</button>
|
<button onclick="send_command_to_server('pardon {player['name']}')" type="button" class="btn btn-danger">Unban</button>
|
||||||
</li>
|
</li>
|
||||||
""".format(player['name'], player['source'], player['reason'], player['name'])
|
"""
|
||||||
|
|
||||||
return html
|
return html
|
||||||
if subpage == "admin_controls":
|
if subpage == "admin_controls":
|
||||||
@ -428,14 +421,14 @@ class PanelHandler(BaseHandler):
|
|||||||
self.redirect("/panel/error?error=Unauthorized access")
|
self.redirect("/panel/error?error=Unauthorized access")
|
||||||
page_data['banned_players'] = get_banned_players_html()
|
page_data['banned_players'] = get_banned_players_html()
|
||||||
|
|
||||||
# template = "panel/server_details.html"
|
template = f"panel/server_{subpage}.html"
|
||||||
template = "panel/server_{subpage}.html".format(subpage=subpage)
|
|
||||||
|
|
||||||
elif page == 'download_backup':
|
elif page == 'download_backup':
|
||||||
file = self.get_argument('file', "")
|
file = self.get_argument('file', "")
|
||||||
|
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None: return
|
if server_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
@ -447,16 +440,17 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
self.download_file(file, backup_file)
|
self.download_file(file, backup_file)
|
||||||
|
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=backup".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=backup")
|
||||||
|
|
||||||
elif page == 'backup_now':
|
elif page == 'backup_now':
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None: return
|
if server_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
server = self.controller.get_server_obj(server_id)
|
server = self.controller.get_server_obj(server_id)
|
||||||
|
|
||||||
server.backup_server()
|
server.backup_server()
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=backup".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=backup")
|
||||||
|
|
||||||
elif page == 'panel_config':
|
elif page == 'panel_config':
|
||||||
auth_servers = {}
|
auth_servers = {}
|
||||||
@ -607,7 +601,8 @@ class PanelHandler(BaseHandler):
|
|||||||
page_data['schedule']['server_id'] = server_id
|
page_data['schedule']['server_id'] = server_id
|
||||||
page_data['schedule']['schedule_id'] = schedule.schedule_id
|
page_data['schedule']['schedule_id'] = schedule.schedule_id
|
||||||
page_data['schedule']['action'] = schedule.action
|
page_data['schedule']['action'] = schedule.action
|
||||||
#we check here to see if the command is any of the default ones. We do not want a user changing to a custom command and seeing our command there.
|
# We check here to see if the command is any of the default ones.
|
||||||
|
# We do not want a user changing to a custom command and seeing our command there.
|
||||||
if schedule.action != 'start' or schedule.action != 'stop' or schedule.action != 'restart' or schedule.action != 'backup':
|
if schedule.action != 'start' or schedule.action != 'stop' or schedule.action != 'restart' or schedule.action != 'backup':
|
||||||
page_data['schedule']['command'] = schedule.command
|
page_data['schedule']['command'] = schedule.command
|
||||||
else:
|
else:
|
||||||
@ -624,7 +619,7 @@ class PanelHandler(BaseHandler):
|
|||||||
difficulty = 'advanced'
|
difficulty = 'advanced'
|
||||||
page_data['schedule']['difficulty'] = difficulty
|
page_data['schedule']['difficulty'] = difficulty
|
||||||
|
|
||||||
if sch_id == None or server_id == None:
|
if sch_id is None or server_id is None:
|
||||||
self.redirect("/panel/error?error=Invalid server ID or Schedule ID")
|
self.redirect("/panel/error?error=Invalid server ID or Schedule ID")
|
||||||
|
|
||||||
if not Enum_Permissions_Server.Schedule in page_data['user_permissions']:
|
if not Enum_Permissions_Server.Schedule in page_data['user_permissions']:
|
||||||
@ -803,7 +798,8 @@ class PanelHandler(BaseHandler):
|
|||||||
name = self.get_argument('name', "")
|
name = self.get_argument('name', "")
|
||||||
|
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None: return
|
if server_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
server_info = self.controller.servers.get_server_data_by_id(server_id)
|
||||||
|
|
||||||
@ -813,7 +809,7 @@ class PanelHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.download_file(name, file)
|
self.download_file(name, file)
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=files".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=files")
|
||||||
|
|
||||||
elif page == 'download_support_package':
|
elif page == 'download_support_package':
|
||||||
tempZipStorage = exec_user['support_logs']
|
tempZipStorage = exec_user['support_logs']
|
||||||
@ -848,8 +844,11 @@ class PanelHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
elif page == "support_logs":
|
elif page == "support_logs":
|
||||||
logger.info("Support logs requested. Packinging logs for user with ID: {}".format(exec_user["user_id"]))
|
logger.info(f"Support logs requested. Packinging logs for user with ID: {exec_user['user_id']}")
|
||||||
logs_thread = threading.Thread(target=self.controller.package_support_logs, daemon=True, args=(exec_user,), name='{}_logs_thread'.format(exec_user['user_id']))
|
logs_thread = threading.Thread(target=self.controller.package_support_logs,
|
||||||
|
daemon=True,
|
||||||
|
args=(exec_user,),
|
||||||
|
name=f"{exec_user['user_id']}_logs_thread")
|
||||||
logs_thread.start()
|
logs_thread.start()
|
||||||
self.redirect('/panel/dashboard')
|
self.redirect('/panel/dashboard')
|
||||||
return
|
return
|
||||||
@ -866,6 +865,7 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def post(self, page):
|
def post(self, page):
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
@ -923,14 +923,13 @@ class PanelHandler(BaseHandler):
|
|||||||
auto_start = int(float(self.get_argument('auto_start', '0')))
|
auto_start = int(float(self.get_argument('auto_start', '0')))
|
||||||
crash_detection = int(float(self.get_argument('crash_detection', '0')))
|
crash_detection = int(float(self.get_argument('crash_detection', '0')))
|
||||||
logs_delete_after = int(float(self.get_argument('logs_delete_after', '0')))
|
logs_delete_after = int(float(self.get_argument('logs_delete_after', '0')))
|
||||||
# TODO: Add more modify options via the subpage parameter
|
|
||||||
# subpage = self.get_argument('subpage', None)
|
# subpage = self.get_argument('subpage', None)
|
||||||
|
|
||||||
server_id = self.check_server_id()
|
server_id = self.check_server_id()
|
||||||
if server_id is None: return
|
if server_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
server_obj = self.controller.servers.get_server_obj(server_id)
|
server_obj = self.controller.servers.get_server_obj(server_id)
|
||||||
server_settings = self.controller.get_server_data(server_id)
|
|
||||||
stale_executable = server_obj.executable
|
stale_executable = server_obj.executable
|
||||||
#Compares old jar name to page data being passed. If they are different we replace the executable name in the
|
#Compares old jar name to page data being passed. If they are different we replace the executable name in the
|
||||||
if str(stale_executable) != str(executable):
|
if str(stale_executable) != str(executable):
|
||||||
@ -966,11 +965,11 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.refresh_server_settings(server_id)
|
self.controller.refresh_server_settings(server_id)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited server {} named {}".format(server_id, server_name),
|
f"Edited server {server_id} named {server_name}",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
|
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=config".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=config")
|
||||||
|
|
||||||
if page == "server_backup":
|
if page == "server_backup":
|
||||||
logger.debug(self.request.arguments)
|
logger.debug(self.request.arguments)
|
||||||
@ -1008,7 +1007,7 @@ class PanelHandler(BaseHandler):
|
|||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
self.tasks_manager.reload_schedule_from_db()
|
self.tasks_manager.reload_schedule_from_db()
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=backup".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=backup")
|
||||||
|
|
||||||
|
|
||||||
if page == "new_schedule":
|
if page == "new_schedule":
|
||||||
@ -1022,7 +1021,7 @@ class PanelHandler(BaseHandler):
|
|||||||
interval_type = bleach.clean(self.get_argument('interval_type', None))
|
interval_type = bleach.clean(self.get_argument('interval_type', None))
|
||||||
#only check for time if it's number of days
|
#only check for time if it's number of days
|
||||||
if interval_type == "days":
|
if interval_type == "days":
|
||||||
time = bleach.clean(self.get_argument('time', None))
|
sch_time = bleach.clean(self.get_argument('time', None))
|
||||||
if action == "command":
|
if action == "command":
|
||||||
command = bleach.clean(self.get_argument('command', None))
|
command = bleach.clean(self.get_argument('command', None))
|
||||||
elif action == "start":
|
elif action == "start":
|
||||||
@ -1039,7 +1038,7 @@ class PanelHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
CronValidator.parse(cron_string)
|
CronValidator.parse(cron_string)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.redirect("/panel/error?error=INVALID FORMAT: Invalid Cron Format. {}".format(e))
|
self.redirect(f"/panel/error?error=INVALID FORMAT: Invalid Cron Format. {e}")
|
||||||
return
|
return
|
||||||
action = bleach.clean(self.get_argument('action', None))
|
action = bleach.clean(self.get_argument('action', None))
|
||||||
if action == "command":
|
if action == "command":
|
||||||
@ -1061,7 +1060,8 @@ class PanelHandler(BaseHandler):
|
|||||||
else:
|
else:
|
||||||
one_time = False
|
one_time = False
|
||||||
|
|
||||||
if not superuser and not permissions['Backup'] in self.controller.server_perms.get_user_id_permissions_list(exec_user["user_id"], server_id):
|
if not superuser and not permissions['Backup'] in self.controller.server_perms.get_user_id_permissions_list(exec_user["user_id"],
|
||||||
|
server_id):
|
||||||
self.redirect("/panel/error?error=Unauthorized access: User not authorized")
|
self.redirect("/panel/error?error=Unauthorized access: User not authorized")
|
||||||
return
|
return
|
||||||
elif server_id is None:
|
elif server_id is None:
|
||||||
@ -1072,13 +1072,6 @@ class PanelHandler(BaseHandler):
|
|||||||
if not self.controller.servers.server_id_exists(server_id):
|
if not self.controller.servers.server_id_exists(server_id):
|
||||||
self.redirect("/panel/error?error=Invalid Server ID")
|
self.redirect("/panel/error?error=Invalid Server ID")
|
||||||
return
|
return
|
||||||
minute = datetime.datetime.now().minute
|
|
||||||
hour = datetime.datetime.now().hour
|
|
||||||
if minute < 10:
|
|
||||||
minute = '0' + str(minute)
|
|
||||||
if hour < 10:
|
|
||||||
hour = '0'+str(hour)
|
|
||||||
current_time = str(hour)+':'+str(minute)
|
|
||||||
|
|
||||||
if interval_type == "days":
|
if interval_type == "days":
|
||||||
job_data = {
|
job_data = {
|
||||||
@ -1087,7 +1080,7 @@ class PanelHandler(BaseHandler):
|
|||||||
"interval_type": interval_type,
|
"interval_type": interval_type,
|
||||||
"interval": interval,
|
"interval": interval,
|
||||||
"command": command,
|
"command": command,
|
||||||
"start_time": time,
|
"start_time": sch_time,
|
||||||
"enabled": enabled,
|
"enabled": enabled,
|
||||||
"one_time": one_time,
|
"one_time": one_time,
|
||||||
"cron_string": ''
|
"cron_string": ''
|
||||||
@ -1098,7 +1091,6 @@ class PanelHandler(BaseHandler):
|
|||||||
"action": action,
|
"action": action,
|
||||||
"interval_type": '',
|
"interval_type": '',
|
||||||
"interval": '',
|
"interval": '',
|
||||||
"command": '',
|
|
||||||
#We'll base every interval off of a midnight start time.
|
#We'll base every interval off of a midnight start time.
|
||||||
"start_time": '',
|
"start_time": '',
|
||||||
"command": command,
|
"command": command,
|
||||||
@ -1123,11 +1115,11 @@ class PanelHandler(BaseHandler):
|
|||||||
self.tasks_manager.schedule_job(job_data)
|
self.tasks_manager.schedule_job(job_data)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited server {}: added scheduled job".format(server_id),
|
f"Edited server {server_id}: added scheduled job",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
self.tasks_manager.reload_schedule_from_db()
|
self.tasks_manager.reload_schedule_from_db()
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=tasks".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=tasks")
|
||||||
|
|
||||||
|
|
||||||
if page == "edit_schedule":
|
if page == "edit_schedule":
|
||||||
@ -1141,7 +1133,7 @@ class PanelHandler(BaseHandler):
|
|||||||
interval_type = bleach.clean(self.get_argument('interval_type', None))
|
interval_type = bleach.clean(self.get_argument('interval_type', None))
|
||||||
#only check for time if it's number of days
|
#only check for time if it's number of days
|
||||||
if interval_type == "days":
|
if interval_type == "days":
|
||||||
time = bleach.clean(self.get_argument('time', None))
|
sch_time = bleach.clean(self.get_argument('time', None))
|
||||||
if action == "command":
|
if action == "command":
|
||||||
command = bleach.clean(self.get_argument('command', None))
|
command = bleach.clean(self.get_argument('command', None))
|
||||||
elif action == "start":
|
elif action == "start":
|
||||||
@ -1159,7 +1151,7 @@ class PanelHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
CronValidator.parse(cron_string)
|
CronValidator.parse(cron_string)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.redirect("/panel/error?error=INVALID FORMAT: Invalid Cron Format. {}".format(e))
|
self.redirect(f"/panel/error?error=INVALID FORMAT: Invalid Cron Format. {e}")
|
||||||
return
|
return
|
||||||
action = bleach.clean(self.get_argument('action', None))
|
action = bleach.clean(self.get_argument('action', None))
|
||||||
if action == "command":
|
if action == "command":
|
||||||
@ -1181,7 +1173,8 @@ class PanelHandler(BaseHandler):
|
|||||||
else:
|
else:
|
||||||
one_time = False
|
one_time = False
|
||||||
|
|
||||||
if not superuser and not permissions['Backup'] in self.controller.server_perms.get_user_id_permissions_list(exec_user["user_id"], server_id):
|
if not superuser and not permissions['Backup'] in self.controller.server_perms.get_user_id_permissions_list(exec_user["user_id"],
|
||||||
|
server_id):
|
||||||
self.redirect("/panel/error?error=Unauthorized access: User not authorized")
|
self.redirect("/panel/error?error=Unauthorized access: User not authorized")
|
||||||
return
|
return
|
||||||
elif server_id is None:
|
elif server_id is None:
|
||||||
@ -1192,13 +1185,6 @@ class PanelHandler(BaseHandler):
|
|||||||
if not self.controller.servers.server_id_exists(server_id):
|
if not self.controller.servers.server_id_exists(server_id):
|
||||||
self.redirect("/panel/error?error=Invalid Server ID")
|
self.redirect("/panel/error?error=Invalid Server ID")
|
||||||
return
|
return
|
||||||
minute = datetime.datetime.now().minute
|
|
||||||
hour = datetime.datetime.now().hour
|
|
||||||
if minute < 10:
|
|
||||||
minute = '0' + str(minute)
|
|
||||||
if hour < 10:
|
|
||||||
hour = '0'+str(hour)
|
|
||||||
current_time = str(hour)+':'+str(minute)
|
|
||||||
|
|
||||||
if interval_type == "days":
|
if interval_type == "days":
|
||||||
job_data = {
|
job_data = {
|
||||||
@ -1207,7 +1193,7 @@ class PanelHandler(BaseHandler):
|
|||||||
"interval_type": interval_type,
|
"interval_type": interval_type,
|
||||||
"interval": interval,
|
"interval": interval,
|
||||||
"command": command,
|
"command": command,
|
||||||
"start_time": time,
|
"start_time": sch_time,
|
||||||
"enabled": enabled,
|
"enabled": enabled,
|
||||||
"one_time": one_time,
|
"one_time": one_time,
|
||||||
"cron_string": ''
|
"cron_string": ''
|
||||||
@ -1218,7 +1204,6 @@ class PanelHandler(BaseHandler):
|
|||||||
"action": action,
|
"action": action,
|
||||||
"interval_type": '',
|
"interval_type": '',
|
||||||
"interval": '',
|
"interval": '',
|
||||||
"command": '',
|
|
||||||
#We'll base every interval off of a midnight start time.
|
#We'll base every interval off of a midnight start time.
|
||||||
"start_time": '',
|
"start_time": '',
|
||||||
"command": command,
|
"command": command,
|
||||||
@ -1243,11 +1228,11 @@ class PanelHandler(BaseHandler):
|
|||||||
self.tasks_manager.update_job(sch_id, job_data)
|
self.tasks_manager.update_job(sch_id, job_data)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited server {}: updated schedule".format(server_id),
|
f"Edited server {server_id}: updated schedule",
|
||||||
server_id,
|
server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
self.tasks_manager.reload_schedule_from_db()
|
self.tasks_manager.reload_schedule_from_db()
|
||||||
self.redirect("/panel/server_detail?id={}&subpage=tasks".format(server_id))
|
self.redirect(f"/panel/server_detail?id={server_id}&subpage=tasks")
|
||||||
|
|
||||||
|
|
||||||
elif page == "edit_user":
|
elif page == "edit_user":
|
||||||
@ -1262,7 +1247,8 @@ class PanelHandler(BaseHandler):
|
|||||||
lang = bleach.clean(self.get_argument('language'), helper.get_setting('language'))
|
lang = bleach.clean(self.get_argument('language'), helper.get_setting('language'))
|
||||||
|
|
||||||
if superuser:
|
if superuser:
|
||||||
#Checks if user is trying to change super user status of self. We don't want that. Automatically make them stay super user since we know they are.
|
#Checks if user is trying to change super user status of self. We don't want that.
|
||||||
|
# Automatically make them stay super user since we know they are.
|
||||||
if str(exec_user['user_id']) != str(user_id):
|
if str(exec_user['user_id']) != str(user_id):
|
||||||
superuser = bleach.clean(self.get_argument('superuser', '0'))
|
superuser = bleach.clean(self.get_argument('superuser', '0'))
|
||||||
else:
|
else:
|
||||||
@ -1358,7 +1344,8 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.users.add_user_api_key(name, user_id, superuser, crafty_permissions_mask, server_permissions_mask)
|
self.controller.users.add_user_api_key(name, user_id, superuser, crafty_permissions_mask, server_permissions_mask)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
f"Added API key {name} with crafty permissions {crafty_permissions_mask} and {server_permissions_mask} for user with UID: {user_id}",
|
f"Added API key {name} with crafty permissions {crafty_permissions_mask}" +
|
||||||
|
f" and {server_permissions_mask} for user with UID: {user_id}",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.redirect(f"/panel/edit_user_apikeys?id={user_id}")
|
self.redirect(f"/panel/edit_user_apikeys?id={user_id}")
|
||||||
@ -1389,13 +1376,14 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
elif page == "add_user":
|
elif page == "add_user":
|
||||||
if bleach.clean(self.get_argument('username', None)).lower() == 'system':
|
if bleach.clean(self.get_argument('username', None)).lower() == 'system':
|
||||||
self.redirect("/panel/error?error=Unauthorized access: username system is reserved for the Crafty system. Please choose a different username.")
|
self.redirect("/panel/error?error=Unauthorized access: username system is reserved for the Crafty system." +
|
||||||
|
" Please choose a different username.")
|
||||||
return
|
return
|
||||||
username = bleach.clean(self.get_argument('username', None))
|
username = bleach.clean(self.get_argument('username', None))
|
||||||
password0 = bleach.clean(self.get_argument('password0', None))
|
password0 = bleach.clean(self.get_argument('password0', None))
|
||||||
password1 = bleach.clean(self.get_argument('password1', None))
|
password1 = bleach.clean(self.get_argument('password1', None))
|
||||||
email = bleach.clean(self.get_argument('email', "default@example.com"))
|
email = bleach.clean(self.get_argument('email', "default@example.com"))
|
||||||
enabled = int(float(self.get_argument('enabled', '0'))),
|
enabled = int(float(self.get_argument('enabled', '0')))
|
||||||
lang = bleach.clean(self.get_argument('lang', helper.get_setting('language')))
|
lang = bleach.clean(self.get_argument('lang', helper.get_setting('language')))
|
||||||
if superuser:
|
if superuser:
|
||||||
superuser = bleach.clean(self.get_argument('superuser', '0'))
|
superuser = bleach.clean(self.get_argument('superuser', '0'))
|
||||||
@ -1437,11 +1425,11 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.users.update_user(user_id, user_data=user_data, user_crafty_data=user_crafty_data)
|
self.controller.users.update_user(user_id, user_data=user_data, user_crafty_data=user_crafty_data)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Added user {} (UID:{})".format(username, user_id),
|
f"Added user {username} (UID:{user_id})",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited user {} (UID:{}) with roles {}".format(username, user_id, roles),
|
f"Edited user {username} (UID:{user_id}) with roles {roles}",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.redirect("/panel/panel_config")
|
self.redirect("/panel/panel_config")
|
||||||
@ -1475,7 +1463,7 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.roles.update_role(role_id, role_data=role_data, permissions_mask=permissions_mask)
|
self.controller.roles.update_role(role_id, role_data=role_data, permissions_mask=permissions_mask)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited role {} (RID:{}) with servers {}".format(role_name, role_id, servers),
|
f"Edited role {role_name} (RID:{role_id}) with servers {servers}",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.redirect("/panel/panel_config")
|
self.redirect("/panel/panel_config")
|
||||||
@ -1503,11 +1491,11 @@ class PanelHandler(BaseHandler):
|
|||||||
self.controller.roles.update_role(role_id, {"servers": servers}, permissions_mask)
|
self.controller.roles.update_role(role_id, {"servers": servers}, permissions_mask)
|
||||||
|
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Added role {} (RID:{})".format(role_name, role_id),
|
f"Added role {role_name} (RID:{role_id})",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"Edited role {} (RID:{}) with servers {}".format(role_name, role_id, servers),
|
f"Edited role {role_name} (RID:{role_id}) with servers {servers}",
|
||||||
server_id=0,
|
server_id=0,
|
||||||
source_ip=self.get_remote_ip())
|
source_ip=self.get_remote_ip())
|
||||||
self.redirect("/panel/panel_config")
|
self.redirect("/panel/panel_config")
|
||||||
@ -1523,6 +1511,7 @@ class PanelHandler(BaseHandler):
|
|||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def delete(self, page):
|
def delete(self, page):
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
|
@ -1,19 +1,13 @@
|
|||||||
from re import X
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
|
||||||
import libgravatar
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
|
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
from app.classes.shared.helpers import Helpers, helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.base_handler import BaseHandler
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.main_models import fn
|
from app.classes.shared.main_models import fn
|
||||||
|
|
||||||
from app.classes.models.users import Users
|
from app.classes.models.users import Users
|
||||||
|
from app.classes.web.base_handler import BaseHandler
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -21,11 +15,10 @@ try:
|
|||||||
import bleach
|
import bleach
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
class PublicHandler(BaseHandler):
|
class PublicHandler(BaseHandler):
|
||||||
|
|
||||||
def set_current_user(self, user_id: str = None):
|
def set_current_user(self, user_id: str = None):
|
||||||
@ -86,14 +79,16 @@ class PublicHandler(BaseHandler):
|
|||||||
entered_username = bleach.clean(self.get_argument('username'))
|
entered_username = bleach.clean(self.get_argument('username'))
|
||||||
entered_password = bleach.clean(self.get_argument('password'))
|
entered_password = bleach.clean(self.get_argument('password'))
|
||||||
|
|
||||||
|
# pylint: disable=no-member
|
||||||
user_data = Users.get_or_none(fn.Lower(Users.username) == entered_username.lower())
|
user_data = Users.get_or_none(fn.Lower(Users.username) == entered_username.lower())
|
||||||
|
|
||||||
|
|
||||||
# if we don't have a user
|
# if we don't have a user
|
||||||
if not user_data:
|
if not user_data:
|
||||||
error_msg = "Inncorrect username or password. Please try again."
|
error_msg = "Incorrect username or password. Please try again."
|
||||||
self.clear_cookie("user")
|
self.clear_cookie("user")
|
||||||
self.clear_cookie("user_data")
|
self.clear_cookie("user_data")
|
||||||
self.redirect('/public/login?error_msg={}'.format(error_msg))
|
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||||
return
|
return
|
||||||
|
|
||||||
# if they are disabled
|
# if they are disabled
|
||||||
@ -101,7 +96,7 @@ class PublicHandler(BaseHandler):
|
|||||||
error_msg = "User account disabled. Please contact your system administrator for more info."
|
error_msg = "User account disabled. Please contact your system administrator for more info."
|
||||||
self.clear_cookie("user")
|
self.clear_cookie("user")
|
||||||
self.clear_cookie("user_data")
|
self.clear_cookie("user_data")
|
||||||
self.redirect('/public/login?error_msg={}'.format(error_msg))
|
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||||
return
|
return
|
||||||
|
|
||||||
login_result = helper.verify_pass(entered_password, user_data.password)
|
login_result = helper.verify_pass(entered_password, user_data.password)
|
||||||
@ -109,7 +104,7 @@ class PublicHandler(BaseHandler):
|
|||||||
# Valid Login
|
# Valid Login
|
||||||
if login_result:
|
if login_result:
|
||||||
self.set_current_user(user_data.user_id)
|
self.set_current_user(user_data.user_id)
|
||||||
logger.info("User: {} Logged in from IP: {}".format(user_data, self.get_remote_ip()))
|
logger.info(f"User: {user_data} Logged in from IP: {self.get_remote_ip()}")
|
||||||
|
|
||||||
# record this login
|
# record this login
|
||||||
q = Users.select().where(Users.username == entered_username.lower()).get()
|
q = Users.select().where(Users.username == entered_username.lower()).get()
|
||||||
@ -128,7 +123,6 @@ class PublicHandler(BaseHandler):
|
|||||||
error_msg = "Inncorrect username or password. Please try again."
|
error_msg = "Inncorrect username or password. Please try again."
|
||||||
# log this failed login attempt
|
# log this failed login attempt
|
||||||
self.controller.management.add_to_audit_log(user_data.user_id, "Tried to log in", 0, self.get_remote_ip())
|
self.controller.management.add_to_audit_log(user_data.user_id, "Tried to log in", 0, self.get_remote_ip())
|
||||||
self.redirect('/public/login?error_msg={}'.format(error_msg))
|
self.redirect(f'/public/login?error_msg={error_msg}')
|
||||||
else:
|
else:
|
||||||
self.redirect("/public/login")
|
self.redirect("/public/login")
|
||||||
|
|
||||||
|
@ -3,14 +3,14 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import libgravatar
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
from app.classes.models.crafty_permissions import Enum_Permissions_Crafty
|
from app.classes.models.crafty_permissions import Enum_Permissions_Crafty
|
||||||
from app.classes.minecraft.serverjars import server_jar_obj
|
from app.classes.minecraft.serverjars import server_jar_obj
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
import libgravatar
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -21,8 +21,8 @@ try:
|
|||||||
import bleach
|
import bleach
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -30,6 +30,7 @@ class ServerHandler(BaseHandler):
|
|||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def get(self, page):
|
def get(self, page):
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
@ -120,6 +121,7 @@ class ServerHandler(BaseHandler):
|
|||||||
|
|
||||||
@tornado.web.authenticated
|
@tornado.web.authenticated
|
||||||
def post(self, page):
|
def post(self, page):
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
@ -152,7 +154,7 @@ class ServerHandler(BaseHandler):
|
|||||||
name_counter = 1
|
name_counter = 1
|
||||||
while is_name_used(new_server_name):
|
while is_name_used(new_server_name):
|
||||||
name_counter += 1
|
name_counter += 1
|
||||||
new_server_name = server_data.get('server_name') + " (Copy {})".format(name_counter)
|
new_server_name = server_data.get('server_name') + f" (Copy {name_counter})"
|
||||||
|
|
||||||
new_server_uuid = helper.create_uuid()
|
new_server_uuid = helper.create_uuid()
|
||||||
while os.path.exists(os.path.join(helper.servers_dir, new_server_uuid)):
|
while os.path.exists(os.path.join(helper.servers_dir, new_server_uuid)):
|
||||||
@ -167,12 +169,17 @@ class ServerHandler(BaseHandler):
|
|||||||
new_server_command = str(server_data.get('execution_command')).replace(server_uuid, new_server_uuid)
|
new_server_command = str(server_data.get('execution_command')).replace(server_uuid, new_server_uuid)
|
||||||
new_executable = server_data.get('executable')
|
new_executable = server_data.get('executable')
|
||||||
new_server_log_file = str(helper.get_os_understandable_path(server_data.get('log_path'))).replace(server_uuid, new_server_uuid)
|
new_server_log_file = str(helper.get_os_understandable_path(server_data.get('log_path'))).replace(server_uuid, new_server_uuid)
|
||||||
auto_start = server_data.get('auto_start')
|
|
||||||
auto_start_delay = server_data.get('auto_start_delay')
|
|
||||||
crash_detection = server_data.get('crash_detection')
|
|
||||||
server_port = server_data.get('server_port')
|
server_port = server_data.get('server_port')
|
||||||
|
|
||||||
self.controller.servers.create_server(new_server_name, new_server_uuid, new_server_path, "", new_server_command, new_executable, new_server_log_file, stop_command, server_port)
|
self.controller.servers.create_server(new_server_name,
|
||||||
|
new_server_uuid,
|
||||||
|
new_server_path,
|
||||||
|
"",
|
||||||
|
new_server_command,
|
||||||
|
new_executable,
|
||||||
|
new_server_log_file,
|
||||||
|
stop_command,
|
||||||
|
server_port)
|
||||||
|
|
||||||
self.controller.init_all_servers()
|
self.controller.init_all_servers()
|
||||||
|
|
||||||
@ -213,7 +220,7 @@ class ServerHandler(BaseHandler):
|
|||||||
|
|
||||||
new_server_id = self.controller.import_jar_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
|
new_server_id = self.controller.import_jar_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"imported a jar server named \"{}\"".format(server_name), # Example: Admin imported a server named "old creative"
|
f"imported a jar server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
elif import_type == 'import_zip':
|
elif import_type == 'import_zip':
|
||||||
@ -226,10 +233,11 @@ class ServerHandler(BaseHandler):
|
|||||||
|
|
||||||
new_server_id = self.controller.import_zip_server(server_name, zip_path, import_server_jar, min_mem, max_mem, port)
|
new_server_id = self.controller.import_zip_server(server_name, zip_path, import_server_jar, min_mem, max_mem, port)
|
||||||
if new_server_id == "false":
|
if new_server_id == "false":
|
||||||
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with sudo chown -R crafty:crafty {} And sudo chmod 2775 -R {}".format(import_server_path, import_server_path))
|
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with" +
|
||||||
|
f"sudo chown -R crafty:crafty {import_server_path} And sudo chmod 2775 -R {import_server_path}")
|
||||||
return
|
return
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"imported a zip server named \"{}\"".format(server_name), # Example: Admin imported a server named "old creative"
|
f"imported a zip server named \"{server_name}\"", # Example: Admin imported a server named "old creative"
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
#deletes temp dir
|
#deletes temp dir
|
||||||
@ -243,7 +251,8 @@ class ServerHandler(BaseHandler):
|
|||||||
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
role_ids = self.controller.users.get_user_roles_id(exec_user["user_id"])
|
||||||
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)
|
||||||
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
self.controller.management.add_to_audit_log(exec_user['user_id'],
|
||||||
"created a {} {} server named \"{}\"".format(server_version, str(server_type).capitalize(), server_name), # Example: Admin created a 1.16.5 Bukkit server named "survival"
|
f"created a {server_version} {str(server_type).capitalize()} server named \"{server_name}\"",
|
||||||
|
# Example: Admin created a 1.16.5 Bukkit server named "survival"
|
||||||
new_server_id,
|
new_server_id,
|
||||||
self.get_remote_ip())
|
self.get_remote_ip())
|
||||||
|
|
||||||
@ -251,7 +260,7 @@ class ServerHandler(BaseHandler):
|
|||||||
if len(captured_roles) == 0:
|
if len(captured_roles) == 0:
|
||||||
if not superuser:
|
if not superuser:
|
||||||
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
new_server_uuid = self.controller.servers.get_server_data_by_id(new_server_id).get("server_uuid")
|
||||||
role_id = self.controller.roles.add_role("Creator of Server with uuid={}".format(new_server_uuid))
|
role_id = self.controller.roles.add_role(f"Creator of Server with uuid={new_server_uuid}")
|
||||||
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
self.controller.server_perms.add_role_server(new_server_id, role_id, "11111111")
|
||||||
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
self.controller.users.add_role_to_user(exec_user["user_id"], role_id)
|
||||||
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
self.controller.crafty_perms.add_server_creation(exec_user["user_id"])
|
||||||
@ -264,8 +273,11 @@ class ServerHandler(BaseHandler):
|
|||||||
self.controller.stats.record_stats()
|
self.controller.stats.record_stats()
|
||||||
self.redirect("/panel/dashboard")
|
self.redirect("/panel/dashboard")
|
||||||
|
|
||||||
|
try:
|
||||||
self.render(
|
self.render(
|
||||||
template,
|
template,
|
||||||
data=page_data,
|
data=page_data,
|
||||||
translate=self.translator.translate,
|
translate=self.translator.translate,
|
||||||
)
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
self.redirect('/panel/dashboard')
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
|
from typing import ( Optional )
|
||||||
import tornado.web
|
import tornado.web
|
||||||
from typing import (
|
|
||||||
Optional
|
|
||||||
)
|
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
|
|
||||||
class CustomStaticHandler(tornado.web.StaticFileHandler):
|
class CustomStaticHandler(tornado.web.StaticFileHandler):
|
||||||
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
|
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
|
||||||
|
@ -1,27 +1,10 @@
|
|||||||
from re import template
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import tornado.web
|
|
||||||
import tornado.escape
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.base_handler import BaseHandler
|
from app.classes.web.base_handler import BaseHandler
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.main_models import fn
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
|
||||||
import bleach
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e.name), exc_info=True)
|
|
||||||
console.critical("Import Error: Unable to load {} module".format(e.name))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class StatusHandler(BaseHandler):
|
class StatusHandler(BaseHandler):
|
||||||
def get(self):
|
def get(self):
|
||||||
page_data = {}
|
page_data = {}
|
||||||
|
@ -3,10 +3,6 @@ import sys
|
|||||||
import json
|
import json
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
|
||||||
from app.classes.shared.helpers import helper
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -26,18 +22,18 @@ try:
|
|||||||
from app.classes.web.api_handler import ServersStats, NodeStats
|
from app.classes.web.api_handler import ServersStats, NodeStats
|
||||||
from app.classes.web.websocket_handler import SocketHandler
|
from app.classes.web.websocket_handler import SocketHandler
|
||||||
from app.classes.web.static_handler import CustomStaticHandler
|
from app.classes.web.static_handler import CustomStaticHandler
|
||||||
from app.classes.shared.translation import translation
|
|
||||||
from app.classes.web.upload_handler import UploadHandler
|
from app.classes.web.upload_handler import UploadHandler
|
||||||
from app.classes.web.http_handler import HTTPHandler, HTTPHandlerPage
|
from app.classes.web.http_handler import HTTPHandler, HTTPHandlerPage
|
||||||
from app.classes.web.status_handler import StatusHandler
|
from app.classes.web.status_handler import StatusHandler
|
||||||
|
from app.classes.shared.translation import translation
|
||||||
|
from app.classes.shared.console import console
|
||||||
|
from app.classes.shared.helpers import helper
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e, e.name))
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e, e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Webserver:
|
class Webserver:
|
||||||
|
|
||||||
def __init__(self, controller, tasks_manager):
|
def __init__(self, controller, tasks_manager):
|
||||||
@ -48,7 +44,6 @@ class Webserver:
|
|||||||
self.tasks_manager = tasks_manager
|
self.tasks_manager = tasks_manager
|
||||||
self._asyncio_patch()
|
self._asyncio_patch()
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def log_function(handler):
|
def log_function(handler):
|
||||||
|
|
||||||
@ -57,6 +52,7 @@ class Webserver:
|
|||||||
'Method': handler.request.method,
|
'Method': handler.request.method,
|
||||||
'URL': handler.request.uri,
|
'URL': handler.request.uri,
|
||||||
'Remote_IP': handler.request.remote_ip,
|
'Remote_IP': handler.request.remote_ip,
|
||||||
|
# pylint: disable=consider-using-f-string
|
||||||
'Elapsed_Time': '%.2fms' % (handler.request.request_time() * 1000)
|
'Elapsed_Time': '%.2fms' % (handler.request.request_time() * 1000)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,12 +70,12 @@ class Webserver:
|
|||||||
"""
|
"""
|
||||||
logger.debug("Checking if asyncio patch is required")
|
logger.debug("Checking if asyncio patch is required")
|
||||||
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
|
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
|
||||||
|
# pylint: disable=reimported,import-outside-toplevel,redefined-outer-name
|
||||||
import asyncio
|
import asyncio
|
||||||
try:
|
try:
|
||||||
from asyncio import WindowsSelectorEventLoopPolicy
|
from asyncio import WindowsSelectorEventLoopPolicy
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.debug("asyncio patch isn't required")
|
logger.debug("asyncio patch isn't required") # Can't assign a policy which doesn't exist.
|
||||||
pass # Can't assign a policy which doesn't exist.
|
|
||||||
else:
|
else:
|
||||||
if not isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
if not isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy):
|
||||||
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||||
@ -110,7 +106,7 @@ class Webserver:
|
|||||||
'keyfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.key.pem'),
|
'keyfile': os.path.join(helper.config_dir, 'web', 'certs', 'commander.key.pem'),
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info("Starting Web Server on ports http:{} https:{}".format(http_port, https_port))
|
logger.info(f"Starting Web Server on ports http:{http_port} https:{https_port}")
|
||||||
|
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||||
|
|
||||||
@ -176,8 +172,8 @@ class Webserver:
|
|||||||
self.HTTPS_Server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
self.HTTPS_Server = tornado.httpserver.HTTPServer(app, ssl_options=cert_objects)
|
||||||
self.HTTPS_Server.listen(https_port)
|
self.HTTPS_Server.listen(https_port)
|
||||||
|
|
||||||
logger.info("https://{}:{} is up and ready for connections.".format(helper.get_local_ip(), https_port))
|
logger.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
||||||
console.info("https://{}:{} is up and ready for connections.".format(helper.get_local_ip(), https_port))
|
console.info(f"https://{helper.get_local_ip()}:{https_port} is up and ready for connections.")
|
||||||
|
|
||||||
console.info("Server Init Complete: Listening For Connections:")
|
console.info("Server Init Complete: Listening For Connections:")
|
||||||
|
|
@ -1,16 +1,20 @@
|
|||||||
from app.classes.shared.main_controller import Controller
|
import logging
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
import tornado.options
|
import tornado.options
|
||||||
import tornado.web
|
import tornado.web
|
||||||
import tornado.httpserver
|
import tornado.httpserver
|
||||||
from tornado.options import options
|
|
||||||
from app.classes.models.server_permissions import Enum_Permissions_Server
|
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
import logging
|
from app.classes.shared.main_controller import Controller
|
||||||
import os
|
|
||||||
import json
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
import time
|
from app.classes.web.base_handler import BaseHandler
|
||||||
|
|
||||||
|
from app.classes.models.server_permissions import Enum_Permissions_Server
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -18,7 +22,7 @@ logger = logging.getLogger(__name__)
|
|||||||
MAX_STREAMED_SIZE = 1024 * 1024 * 1024
|
MAX_STREAMED_SIZE = 1024 * 1024 * 1024
|
||||||
|
|
||||||
@tornado.web.stream_request_body
|
@tornado.web.stream_request_body
|
||||||
class UploadHandler(tornado.web.RequestHandler):
|
class UploadHandler(BaseHandler):
|
||||||
|
|
||||||
# noinspection PyAttributeOutsideInit
|
# noinspection PyAttributeOutsideInit
|
||||||
def initialize(self, controller: Controller=None, tasks_manager=None, translator=None):
|
def initialize(self, controller: Controller=None, tasks_manager=None, translator=None):
|
||||||
@ -28,19 +32,21 @@ class UploadHandler(tornado.web.RequestHandler):
|
|||||||
|
|
||||||
def prepare(self):
|
def prepare(self):
|
||||||
self.do_upload = True
|
self.do_upload = True
|
||||||
|
# pylint: disable=unused-variable
|
||||||
api_key, token_data, exec_user = self.current_user
|
api_key, token_data, exec_user = self.current_user
|
||||||
|
server_id = self.get_argument('server_id', None)
|
||||||
superuser = exec_user['superuser']
|
superuser = exec_user['superuser']
|
||||||
if api_key is not None:
|
if api_key is not None:
|
||||||
superuser = superuser and api_key.superuser
|
superuser = superuser and api_key.superuser
|
||||||
user_id = exec_user['user_id']
|
user_id = exec_user['user_id']
|
||||||
|
|
||||||
if superuser:
|
if superuser:
|
||||||
exec_user_crafty_permissions = self.controller.crafty_perms.list_defined_crafty_permissions()
|
exec_user_server_permissions = self.controller.server_perms.list_defined_permissions()
|
||||||
elif api_key is not None:
|
elif api_key is not None:
|
||||||
exec_user_crafty_permissions = self.controller.crafty_perms.get_api_key_permissions_list(api_key)
|
exec_user_server_permissions = self.controller.server_perms.get_api_key_permissions_list(api_key, server_id)
|
||||||
else:
|
else:
|
||||||
exec_user_crafty_permissions = self.controller.crafty_perms.get_crafty_permissions_list(
|
exec_user_server_permissions = self.controller.server_perms.get_user_id_permissions_list(
|
||||||
exec_user["user_id"])
|
exec_user["user_id"], server_id)
|
||||||
|
|
||||||
server_id = self.request.headers.get('X-ServerId', None)
|
server_id = self.request.headers.get('X-ServerId', None)
|
||||||
|
|
||||||
@ -54,7 +60,7 @@ class UploadHandler(tornado.web.RequestHandler):
|
|||||||
console.warning('Server ID not found in upload handler call')
|
console.warning('Server ID not found in upload handler call')
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
|
|
||||||
if Enum_Permissions_Server.Files not in exec_user_crafty_permissions:
|
if Enum_Permissions_Server.Files not in exec_user_server_permissions:
|
||||||
logger.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
logger.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
||||||
console.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
console.warning(f'User {user_id} tried to upload a file to {server_id} without permissions!')
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
@ -73,7 +79,7 @@ class UploadHandler(tornado.web.RequestHandler):
|
|||||||
try:
|
try:
|
||||||
self.f = open(full_path, "wb")
|
self.f = open(full_path, "wb")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Upload failed with error: {}".format(e))
|
logger.error(f"Upload failed with error: {e}")
|
||||||
self.do_upload = False
|
self.do_upload = False
|
||||||
# If max_body_size is not set, you cannot upload files > 100MB
|
# If max_body_size is not set, you cannot upload files > 100MB
|
||||||
self.request.connection.set_max_body_size(MAX_STREAMED_SIZE)
|
self.request.connection.set_max_body_size(MAX_STREAMED_SIZE)
|
||||||
@ -94,6 +100,6 @@ class UploadHandler(tornado.web.RequestHandler):
|
|||||||
websocket_helper.broadcast('close_upload_box', 'error')
|
websocket_helper.broadcast('close_upload_box', 'error')
|
||||||
self.finish('error')
|
self.finish('error')
|
||||||
|
|
||||||
def data_received(self, data):
|
def data_received(self, chunk):
|
||||||
if self.do_upload:
|
if self.do_upload:
|
||||||
self.f.write(data)
|
self.f.write(chunk)
|
||||||
|
@ -2,13 +2,12 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from urllib.parse import parse_qsl
|
from urllib.parse import parse_qsl
|
||||||
from app.classes.models.users import Users
|
|
||||||
from app.classes.shared.authentication import authentication
|
from app.classes.shared.authentication import authentication
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
from app.classes.web.websocket_helper import websocket_helper
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
from app.classes.web.websocket_helper import websocket_helper
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -16,8 +15,8 @@ try:
|
|||||||
import tornado.websocket
|
import tornado.websocket
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError as e:
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e, e.name))
|
logger.critical(f"Import Error: Unable to load {e.name} module", exc_info=True)
|
||||||
console.critical("Import Error: Unable to load {} module".format(e, e.name))
|
console.critical(f"Import Error: Unable to load {e.name} module")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
@ -48,6 +47,7 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
def check_auth(self):
|
def check_auth(self):
|
||||||
return authentication.check_bool(self.get_cookie('token'))
|
return authentication.check_bool(self.get_cookie('token'))
|
||||||
|
|
||||||
|
# pylint: disable=arguments-differ
|
||||||
def open(self):
|
def open(self):
|
||||||
logger.debug('Checking WebSocket authentication')
|
logger.debug('Checking WebSocket authentication')
|
||||||
if self.check_auth():
|
if self.check_auth():
|
||||||
@ -55,7 +55,10 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
else:
|
else:
|
||||||
websocket_helper.send_message(self, 'notification', 'Not authenticated for WebSocket connection')
|
websocket_helper.send_message(self, 'notification', 'Not authenticated for WebSocket connection')
|
||||||
self.close()
|
self.close()
|
||||||
self.controller.management.add_to_audit_log_raw('unknown', 0, 0, 'Someone tried to connect via WebSocket without proper authentication', self.get_remote_ip())
|
self.controller.management.add_to_audit_log_raw('unknown',
|
||||||
|
0, 0,
|
||||||
|
'Someone tried to connect via WebSocket without proper authentication',
|
||||||
|
self.get_remote_ip())
|
||||||
websocket_helper.broadcast('notification', 'Someone tried to connect via WebSocket without proper authentication')
|
websocket_helper.broadcast('notification', 'Someone tried to connect via WebSocket without proper authentication')
|
||||||
logger.warning('Someone tried to connect via WebSocket without proper authentication')
|
logger.warning('Someone tried to connect via WebSocket without proper authentication')
|
||||||
|
|
||||||
@ -67,23 +70,21 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
|
|||||||
)))
|
)))
|
||||||
websocket_helper.add_client(self)
|
websocket_helper.add_client(self)
|
||||||
logger.debug('Opened WebSocket connection')
|
logger.debug('Opened WebSocket connection')
|
||||||
# websocket_helper.broadcast('notification', 'New client connected')
|
|
||||||
|
|
||||||
|
# pylint: disable=arguments-renamed
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def on_message(raw_message):
|
def on_message(raw_message):
|
||||||
|
|
||||||
logger.debug('Got message from WebSocket connection {}'.format(raw_message))
|
logger.debug(f'Got message from WebSocket connection {raw_message}')
|
||||||
message = json.loads(raw_message)
|
message = json.loads(raw_message)
|
||||||
logger.debug('Event Type: {}, Data: {}'.format(message['event'], message['data']))
|
logger.debug(f"Event Type: {message['event']}, Data: {message['data']}")
|
||||||
|
|
||||||
def on_close(self):
|
def on_close(self):
|
||||||
websocket_helper.remove_client(self)
|
websocket_helper.remove_client(self)
|
||||||
logger.debug('Closed WebSocket connection')
|
logger.debug('Closed WebSocket connection')
|
||||||
# websocket_helper.broadcast('notification', 'Client disconnected')
|
|
||||||
|
|
||||||
async def write_message_int(self, message):
|
async def write_message_int(self, message):
|
||||||
self.write_message(message)
|
self.write_message(message)
|
||||||
|
|
||||||
def write_message_helper(self, message):
|
def write_message_helper(self, message):
|
||||||
asyncio.run_coroutine_threadsafe(self.write_message_int(message), self.io_loop.asyncio_loop)
|
asyncio.run_coroutine_threadsafe(self.write_message_int(message), self.io_loop.asyncio_loop)
|
||||||
|
|
||||||
|
@ -1,20 +1,10 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import sys, threading, asyncio
|
|
||||||
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
import tornado.ioloop
|
|
||||||
|
|
||||||
except ModuleNotFoundError as e:
|
|
||||||
logger.critical("Import Error: Unable to load {} module".format(e, e.name))
|
|
||||||
console.critical("Import Error: Unable to load {} module".format(e, e.name))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
class WebSocketHelper:
|
class WebSocketHelper:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.clients = set()
|
self.clients = set()
|
||||||
@ -25,18 +15,19 @@ class WebSocketHelper:
|
|||||||
def remove_client(self, client):
|
def remove_client(self, client):
|
||||||
self.clients.remove(client)
|
self.clients.remove(client)
|
||||||
|
|
||||||
|
# pylint: disable=no-self-use
|
||||||
def send_message(self, client, event_type: str, data):
|
def send_message(self, client, event_type: str, data):
|
||||||
if client.check_auth():
|
if client.check_auth():
|
||||||
message = str(json.dumps({'event': event_type, 'data': data}))
|
message = str(json.dumps({'event': event_type, 'data': data}))
|
||||||
client.write_message_helper(message)
|
client.write_message_helper(message)
|
||||||
|
|
||||||
def broadcast(self, event_type: str, data):
|
def broadcast(self, event_type: str, data):
|
||||||
logger.debug('Sending to {} clients: {}'.format(len(self.clients), json.dumps({'event': event_type, 'data': data})))
|
logger.debug(f"Sending to {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
||||||
for client in self.clients:
|
for client in self.clients:
|
||||||
try:
|
try:
|
||||||
self.send_message(client, event_type, data)
|
self.send_message(client, event_type, data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Error catched while sending WebSocket message to {}'.format(client.get_remote_ip()))
|
logger.exception(f'Error caught while sending WebSocket message to {client.get_remote_ip()} {e}')
|
||||||
|
|
||||||
def broadcast_page(self, page: str, event_type: str, data):
|
def broadcast_page(self, page: str, event_type: str, data):
|
||||||
def filter_fn(client):
|
def filter_fn(client):
|
||||||
@ -87,13 +78,13 @@ class WebSocketHelper:
|
|||||||
def broadcast_with_fn(self, filter_fn, event_type: str, data):
|
def broadcast_with_fn(self, filter_fn, event_type: str, data):
|
||||||
clients = list(filter(filter_fn, self.clients))
|
clients = list(filter(filter_fn, self.clients))
|
||||||
|
|
||||||
logger.debug('Sending to {} out of {} clients: {}'.format(len(clients), len(self.clients), json.dumps({'event': event_type, 'data': data})))
|
logger.debug(f"Sending to {len(clients)} out of {len(self.clients)} clients: {json.dumps({'event': event_type, 'data': data})}")
|
||||||
|
|
||||||
for client in clients:
|
for client in clients:
|
||||||
try:
|
try:
|
||||||
self.send_message(client, event_type, data)
|
self.send_message(client, event_type, data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Error catched while sending WebSocket message to {}'.format(client.get_remote_ip()))
|
logger.exception(f'Error catched while sending WebSocket message to {client.get_remote_ip()} {e}')
|
||||||
|
|
||||||
def disconnect_all(self):
|
def disconnect_all(self):
|
||||||
console.info('Disconnecting WebSocket clients')
|
console.info('Disconnecting WebSocket clients')
|
||||||
|
@ -2,5 +2,5 @@
|
|||||||
"major": 4,
|
"major": 4,
|
||||||
"minor": 0,
|
"minor": 0,
|
||||||
"sub": 0,
|
"sub": 0,
|
||||||
"meta": "alpha.3"
|
"meta": "alpha.3.5"
|
||||||
}
|
}
|
||||||
|
@ -221,10 +221,10 @@
|
|||||||
</td>
|
</td>
|
||||||
<td id="server_running_status_{{server['server_data']['server_id']}}">
|
<td id="server_running_status_{{server['server_data']['server_id']}}">
|
||||||
{% if server['stats']['running'] %}
|
{% if server['stats']['running'] %}
|
||||||
<i class="fas fa-thumbs-up"></i> <span class="text-success">{{ translate('dashboard', 'online',
|
<span class="text-success"><i class="fas fa-signal"></i> {{ translate('dashboard', 'online',
|
||||||
data['lang']) }}</span>
|
data['lang']) }}</span>
|
||||||
{% else %}
|
{% else %}
|
||||||
<i class="fas fa-thumbs-down"></i> <span class="text-danger">{{ translate('dashboard', 'offline',
|
<span class="text-danger"><i class="fas fa-ban"></i> {{ translate('dashboard', 'offline',
|
||||||
data['lang']) }}</span>
|
data['lang']) }}</span>
|
||||||
{% end %}
|
{% end %}
|
||||||
</td>
|
</td>
|
||||||
|
@ -281,7 +281,9 @@
|
|||||||
"noDeleteFiles": "No, just remove from panel",
|
"noDeleteFiles": "No, just remove from panel",
|
||||||
"sendingDelete": "Deleting Server",
|
"sendingDelete": "Deleting Server",
|
||||||
"bePatientDelete": "Please be patient while we remove your server from the Crafty panel. This screen will close in a few moments.",
|
"bePatientDelete": "Please be patient while we remove your server from the Crafty panel. This screen will close in a few moments.",
|
||||||
"bePatientDeleteFiles" : "Please be patient while we remove your server from the Crafty panel and delete all files. This screen will close in a few moments."
|
"bePatientDeleteFiles" : "Please be patient while we remove your server from the Crafty panel and delete all files. This screen will close in a few moments.",
|
||||||
|
"crashTime": "Crash Timeout",
|
||||||
|
"crashTimeDesc": "How long should we wait before we consider your server as crashed?"
|
||||||
},
|
},
|
||||||
"serverConfigHelp": {
|
"serverConfigHelp": {
|
||||||
"title": "Server Config Area",
|
"title": "Server Config Area",
|
||||||
|
@ -23,9 +23,11 @@ server {
|
|||||||
ssl_certificate <CERIFICATE_LOCATION>;
|
ssl_certificate <CERIFICATE_LOCATION>;
|
||||||
ssl_certificate_key <KEYFILE_LOCATION>;
|
ssl_certificate_key <KEYFILE_LOCATION>;
|
||||||
location / {
|
location / {
|
||||||
|
#This is important for websockets
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_redirect off;
|
proxy_redirect off;
|
||||||
|
|
||||||
|
#These are important for websockets. They are required for crafty to function properly.
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection $http_connection;
|
proxy_set_header Connection $http_connection;
|
||||||
proxy_set_header X-Forwarded-Proto https;
|
proxy_set_header X-Forwarded-Proto https;
|
||||||
|
45
main.py
45
main.py
@ -1,4 +1,3 @@
|
|||||||
from cmd import Cmd
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
@ -6,19 +5,23 @@ import time
|
|||||||
import argparse
|
import argparse
|
||||||
import logging.config
|
import logging.config
|
||||||
import signal
|
import signal
|
||||||
import threading
|
|
||||||
from app.classes.controllers.management_controller import Management_Controller
|
|
||||||
|
|
||||||
""" Our custom classes / pip packages """
|
|
||||||
from app.classes.shared.console import console
|
from app.classes.shared.console import console
|
||||||
from app.classes.shared.helpers import helper
|
from app.classes.shared.helpers import helper
|
||||||
|
if helper.check_file_exists('/.dockerenv'):
|
||||||
|
console.cyan("Docker environment detected!")
|
||||||
|
else:
|
||||||
|
if helper.checkRoot():
|
||||||
|
console.critical("Root detected. Root/Admin access denied. Run Crafty again with non-elevated permissions.")
|
||||||
|
time.sleep(5)
|
||||||
|
console.critical("Crafty shutting down. Root/Admin access denied.")
|
||||||
|
sys.exit(0)
|
||||||
|
# pylint: disable=wrong-import-position
|
||||||
from app.classes.shared.main_models import installer, database
|
from app.classes.shared.main_models import installer, database
|
||||||
|
|
||||||
from app.classes.shared.tasks import TasksManager
|
from app.classes.shared.tasks import TasksManager
|
||||||
from app.classes.shared.main_controller import Controller
|
from app.classes.shared.main_controller import Controller
|
||||||
from app.classes.shared.migration import MigrationManager
|
from app.classes.shared.migration import MigrationManager
|
||||||
|
|
||||||
from app.classes.shared.cmd import MainPrompt
|
from app.classes.shared.command import MainPrompt
|
||||||
|
|
||||||
|
|
||||||
def do_intro():
|
def do_intro():
|
||||||
@ -47,7 +50,7 @@ def setup_logging(debug=True):
|
|||||||
|
|
||||||
if os.path.exists(logging_config_file):
|
if os.path.exists(logging_config_file):
|
||||||
# open our logging config file
|
# open our logging config file
|
||||||
with open(logging_config_file, 'rt') as f:
|
with open(logging_config_file, 'rt', encoding='utf-8') as f:
|
||||||
logging_config = json.load(f)
|
logging_config = json.load(f)
|
||||||
if debug:
|
if debug:
|
||||||
logging_config['loggers']['']['level'] = 'DEBUG'
|
logging_config['loggers']['']['level'] = 'DEBUG'
|
||||||
@ -56,11 +59,11 @@ def setup_logging(debug=True):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
logging.warning("Unable to read logging config from {}".format(logging_config_file))
|
logging.warning(f"Unable to read logging config from {logging_config_file}")
|
||||||
console.critical("Unable to read logging config from {}".format(logging_config_file))
|
console.critical(f"Unable to read logging config from {logging_config_file}")
|
||||||
|
|
||||||
|
|
||||||
""" Our Main Starter """
|
# Our Main Starter
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser("Crafty Controller - A Server Management System")
|
parser = argparse.ArgumentParser("Crafty Controller - A Server Management System")
|
||||||
|
|
||||||
@ -81,21 +84,11 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if helper.check_file_exists('/.dockerenv'):
|
|
||||||
console.cyan("Docker environment detected!")
|
|
||||||
else:
|
|
||||||
if helper.checkRoot():
|
|
||||||
console.critical("Root detected. Root/Admin access denied. Run Crafty again with non-elevated permissions.")
|
|
||||||
time.sleep(5)
|
|
||||||
console.critical("Crafty shutting down. Root/Admin access denied.")
|
|
||||||
sys.exit(0)
|
|
||||||
helper.ensure_logging_setup()
|
|
||||||
|
|
||||||
setup_logging(debug=args.verbose)
|
setup_logging(debug=args.verbose)
|
||||||
|
|
||||||
# setting up the logger object
|
# setting up the logger object
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
console.cyan("Logging set to: {} ".format(logger.level))
|
console.cyan(f"Logging set to: {logger.level}")
|
||||||
|
|
||||||
# print our pretty start message
|
# print our pretty start message
|
||||||
do_intro()
|
do_intro()
|
||||||
@ -112,7 +105,8 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
if fresh_install:
|
if fresh_install:
|
||||||
console.debug("Fresh install detected")
|
console.debug("Fresh install detected")
|
||||||
console.warning("We have detected a fresh install. Please be sure to forward Crafty's port, {}, through your router/firewall if you would like to be able to access Crafty remotely.".format(helper.get_setting('https_port')))
|
console.warning("We have detected a fresh install. Please be sure to forward Crafty's port, " +
|
||||||
|
f"{helper.get_setting('https_port')}, through your router/firewall if you would like to be able to access Crafty remotely.")
|
||||||
installer.default_settings()
|
installer.default_settings()
|
||||||
else:
|
else:
|
||||||
console.debug("Existing install detected")
|
console.debug("Existing install detected")
|
||||||
@ -144,7 +138,8 @@ if __name__ == '__main__':
|
|||||||
console.info("Checking Internet. This may take a minute.")
|
console.info("Checking Internet. This may take a minute.")
|
||||||
|
|
||||||
if not helper.check_internet():
|
if not helper.check_internet():
|
||||||
console.warning("We have detected the machine running Crafty has no connection to the internet. Client connections to the server may be limited.")
|
console.warning("We have detected the machine running Crafty has no connection to the internet. " +
|
||||||
|
"Client connections to the server may be limited.")
|
||||||
|
|
||||||
if not controller.check_system_user():
|
if not controller.check_system_user():
|
||||||
controller.add_system_user()
|
controller.add_system_user()
|
||||||
@ -154,7 +149,7 @@ if __name__ == '__main__':
|
|||||||
project_root = os.path.dirname(__file__)
|
project_root = os.path.dirname(__file__)
|
||||||
controller.set_project_root(project_root)
|
controller.set_project_root(project_root)
|
||||||
|
|
||||||
def sigterm_handler(signum, current_stack_frame):
|
def sigterm_handler():
|
||||||
print() # for newline
|
print() # for newline
|
||||||
logger.info("Recieved SIGTERM, stopping Crafty")
|
logger.info("Recieved SIGTERM, stopping Crafty")
|
||||||
console.info("Recieved SIGTERM, stopping Crafty")
|
console.info("Recieved SIGTERM, stopping Crafty")
|
||||||
|
Loading…
Reference in New Issue
Block a user