Merge branch 'dev' into 'xithical'

# Conflicts:
#   app/classes/shared/server.py
This commit is contained in:
xithical 2021-08-18 23:30:45 +00:00
commit c20ffb522d
32 changed files with 1220 additions and 210 deletions

BIN
.DS_Store vendored

Binary file not shown.

71
DBCHANGES.md Normal file
View File

@ -0,0 +1,71 @@
# Database change guide for contributors
When updating a database schema modify the schema in `app/classes/shared/models.py` and create a new migration with the `migration add <name>` command (in Crafty's prompt).
A full list of helper functions you can find in `app/classes/shared/models.py`
## Example migration files
### Rename column/field
```py
def migrate(migrator, database, **kwargs):
migrator.rename_column('my_table', 'old_name', 'new_name') # First argument can be model class OR table name
def rollback(migrator, database, **kwargs):
migrator.rename_column('my_table', 'new_name', 'old_name') # First argument can be model class OR table name
```
### Rename table/model
```py
def migrate(migrator, database, **kwargs):
migrator.rename_table('old_name', 'new_name') # First argument can be model class OR table name
def rollback(migrator, database, **kwargs):
migrator.rename_table('new_name', 'old_name') # First argument can be model class OR table name
```
### Create table/model
```py
import peewee
def migrate(migrator, database, **kwargs):
class NewTable(peewee.Model):
my_id = peewee.IntegerField(unique=True, primary_key=True)
class Meta:
table_name = 'new_table'
database = database
create_table(NewTable)
def rollback(migrator, database, **kwargs):
drop_table('new_table') # Can be model class OR table name
```
### Add columns/fields
```py
import peewee
def migrate(migrator, database, **kwargs):
migrator.add_columns('table_name', new_field_name=peewee.CharField(default="")) # First argument can be model class OR table name
def rollback(migrator, database, **kwargs):
migrator.drop_columns('table_name', ['new_field_name']) # First argument can be model class OR table name
```

18
README.md Normal file
View File

@ -0,0 +1,18 @@
# Crafty Controller 4.0.0-alpha.2
> Python based Control Panel for your Minecraft Server
## What is Crafty Controller?
Crafty Controller is a Minecraft Server Control Panel / Launcher. The purpose
of Crafty Controller is to launch a Minecraft Server in the background and present
a web interface for the server administrators to interact with their servers. Crafty
is compatible with Docker, Linux, Windows 7, Windows 8 and Windows 10.
## Documentation
Temporary documentation available on [GitLab](https://gitlab.com/crafty-controller/crafty-commander/wikis/home)
## Meta
Project Homepage - https://craftycontrol.com
Discord Server - https://discord.gg/9VJPhCE
Git Repository - https://gitlab.com/crafty-controller/crafty-web

View File

@ -71,6 +71,29 @@ class ServerJars:
data = self._read_cache()
return data.get('servers')
def get_serverjar_data_sorted(self):
data = self.get_serverjar_data()
def str_to_int(x, counter=0):
try:
return ord(x[0]) + str_to_int(x[1:], counter + 1) + len(x)
except IndexError:
return 0
def to_int(x):
try:
return int(x)
except ValueError:
temp = x.split('-')
return to_int(temp[0]) + str_to_int(temp[1]) / 100000
sort_key_fn = lambda x: [to_int(y) for y in x.split('.')]
for key in data.keys():
data[key] = sorted(data[key], key=sort_key_fn)
return data
def _check_api_alive(self):
logger.info("Checking serverjars.com API status")

View File

@ -22,9 +22,10 @@ except ModuleNotFoundError as e:
class MainPrompt(cmd.Cmd, object):
def __init__(self, tasks_manager):
def __init__(self, tasks_manager, migration_manager):
super().__init__()
self.tasks_manager = tasks_manager
self.migration_manager = migration_manager
# overrides the default Prompt
prompt = "Crafty Controller v{} > ".format(helper.get_version_string())
@ -47,6 +48,27 @@ class MainPrompt(cmd.Cmd, object):
def do_exit(self, line):
self.universal_exit()
def do_migrations(self, line):
if (line == 'up'):
self.migration_manager.up()
elif (line == 'down'):
self.migration_manager.down()
elif (line == 'done'):
console.info(self.migration_manager.done)
elif (line == 'todo'):
console.info(self.migration_manager.todo)
elif (line == 'diff'):
console.info(self.migration_manager.diff)
elif (line == 'info'):
console.info('Done: {}'.format(self.migration_manager.done))
console.info('FS: {}'.format(self.migration_manager.todo))
console.info('Todo: {}'.format(self.migration_manager.diff))
elif (line.startswith('add ')):
migration_name = line[len('add '):]
self.migration_manager.create(migration_name, False)
else:
console.info('Unknown migration command')
def universal_exit(self):
logger.info("Stopping all server daemons / threads")
console.info("Stopping all server daemons / threads - This may take a few seconds")
@ -62,3 +84,7 @@ class MainPrompt(cmd.Cmd, object):
@staticmethod
def help_exit():
console.help("Stops the server if running, Exits the program")
@staticmethod
def help_migrations():
console.help("Only for advanced users. Use with caution")

View File

@ -316,7 +316,7 @@ class Controller:
server_command = 'java -Xms{}M -Xmx{}M -jar {} nogui'.format(helper.float_to_string(min_mem),
helper.float_to_string(max_mem),
full_jar_path)
print('command: ' + server_command)
logger.debug('command: ' + server_command)
server_log_file = "{}/logs/latest.log".format(new_server_dir)
server_stop = "stop"

View File

@ -40,6 +40,7 @@ class Helpers:
self.webroot = os.path.join(self.root_dir, 'app', 'frontend')
self.servers_dir = os.path.join(self.root_dir, 'servers')
self.backup_path = os.path.join(self.root_dir, 'backups')
self.migration_dir = os.path.join(self.root_dir, 'app', 'migrations')
self.session_file = os.path.join(self.root_dir, 'app', 'config', 'session.lock')
self.settings_file = os.path.join(self.root_dir, 'app', 'config', 'config.json')
@ -158,7 +159,7 @@ class Helpers:
version = "{}.{}.{}-{}".format(version_data.get('major', '?'),
version_data.get('minor', '?'),
version_data.get('sub', '?'),
version_data.get('patch', '?'))
version_data.get('meta', '?'))
return str(version)
def do_exit(self):
@ -194,6 +195,9 @@ class Helpers:
(r'(\[.+?/ERROR\])', r'<span class="mc-log-error">\1</span>'),
(r'(\w+?\[/\d+?\.\d+?\.\d+?\.\d+?\:\d+?\])', r'<span class="mc-log-keyword">\1</span>'),
(r'\[(\d\d:\d\d:\d\d)\]', r'<span class="mc-log-time">[\1]</span>'),
(r'(\[.+? INFO\])', r'<span class="mc-log-info">\1</span>'),
(r'(\[.+? WARN\])', r'<span class="mc-log-warn">\1</span>'),
(r'(\[.+? ERROR\])', r'<span class="mc-log-error">\1</span>')
]
# highlight users keywords
@ -581,4 +585,10 @@ class Helpers:
return True
@staticmethod
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
return text
helper = Helpers()

View File

@ -0,0 +1,532 @@
from datetime import datetime
import logging
import typing as t
import sys
import os
import re
from importlib import import_module
from functools import wraps
try:
from functools import cached_property
except ImportError:
from cached_property import cached_property
from app.classes.shared.helpers import helper
from app.classes.shared.console import console
logger = logging.getLogger(__name__)
try:
import peewee
from playhouse.migrate import (
SchemaMigrator as ScM,
SqliteMigrator as SqM,
Operation, SQL, operation, SqliteDatabase,
make_index_name, Context
)
except ModuleNotFoundError as e:
logger.critical("Import Error: Unable to load {} module".format(
e.name), exc_info=True)
console.critical("Import Error: Unable to load {} module".format(e.name))
sys.exit(1)
class MigrateHistory(peewee.Model):
"""
Presents the migration history in a database.
"""
name = peewee.CharField(unique=True)
migrated_at = peewee.DateTimeField(default=datetime.utcnow)
def __unicode__(self) -> str:
"""
String representation of this migration
"""
return self.name
MIGRATE_TABLE = 'migratehistory'
MIGRATE_TEMPLATE = '''# Generated by database migrator
def migrate(migrator, database, **kwargs):
"""
Write your migrations here.
"""
{migrate}
def rollback(migrator, database, **kwargs):
"""
Write your rollback migrations here.
"""
{rollback}'''
VOID: t.Callable = lambda m, d: None
def get_model(method):
"""
Convert string to model class.
"""
@wraps(method)
def wrapper(migrator, model, *args, **kwargs):
if isinstance(model, str):
return method(migrator, migrator.orm[model], *args, **kwargs)
return method(migrator, model, *args, **kwargs)
return wrapper
class Migrator(object):
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
"""
Initializes the migrator
"""
if isinstance(database, peewee.Proxy):
database = database.obj
self.database: SqliteDatabase = database
self.orm: t.Dict[str, peewee.Model] = {}
self.operations: t.List[Operation] = []
self.migrator = SqliteMigrator(database)
def run(self):
"""
Runs operations.
"""
for op in self.operations:
if isinstance(op, Operation):
op.run()
else:
op()
self.clean()
def clean(self):
"""
Cleans the operations.
"""
self.operations = list()
def sql(self, sql: str, *params):
"""
Executes raw SQL.
"""
self.operations.append(self.migrator.sql(sql, *params))
def create_table(self, model: peewee.Model) -> peewee.Model:
"""
Creates model and table in database.
"""
self.orm[model._meta.table_name] = model
model._meta.database = self.database
self.operations.append(model.create_table)
return model
@get_model
def drop_table(self, model: peewee.Model):
"""
Drops model and table from database.
"""
del self.orm[model._meta.table_name]
self.operations.append(self.migrator.drop_table(model))
@get_model
def add_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model:
"""
Creates new fields.
"""
for name, field in fields.items():
model._meta.add_field(name, field)
self.operations.append(self.migrator.add_column(
model._meta.table_name, field.column_name, field))
if field.unique:
self.operations.append(self.migrator.add_index(
model._meta.table_name, (field.column_name,), unique=True))
return model
@get_model
def change_columns(self, model: peewee.Model, **fields: peewee.Field) -> peewee.Model:
"""
Changes fields.
"""
for name, field in fields.items():
old_field = model._meta.fields.get(name, field)
old_column_name = old_field and old_field.column_name
model._meta.add_field(name, field)
if isinstance(old_field, peewee.ForeignKeyField):
self.operations.append(self.migrator.drop_foreign_key_constraint(
model._meta.table_name, old_column_name))
if old_column_name != field.column_name:
self.operations.append(
self.migrator.rename_column(
model._meta.table_name, old_column_name, field.column_name))
if isinstance(field, peewee.ForeignKeyField):
on_delete = field.on_delete if field.on_delete else 'RESTRICT'
on_update = field.on_update if field.on_update else 'RESTRICT'
self.operations.append(self.migrator.add_foreign_key_constraint(
model._meta.table_name, field.column_name,
field.rel_model._meta.table_name, field.rel_field.name,
on_delete, on_update))
continue
self.operations.append(self.migrator.change_column(
model._meta.table_name, field.column_name, field))
if field.unique == old_field.unique:
continue
if field.unique:
index = (field.column_name,), field.unique
self.operations.append(self.migrator.add_index(
model._meta.table_name, *index))
model._meta.indexes.append(index)
else:
index = (field.column_name,), old_field.unique
self.operations.append(self.migrator.drop_index(
model._meta.table_name, *index))
model._meta.indexes.remove(index)
return model
@get_model
def drop_columns(self, model: peewee.Model, names: str, **kwargs) -> peewee.Model:
"""
Removes fields from model.
"""
fields = [field for field in model._meta.fields.values()
if field.name in names]
cascade = kwargs.pop('cascade', True)
for field in fields:
self.__del_field__(model, field)
if field.unique:
index_name = make_index_name(
model._meta.table_name, [field.column_name])
self.operations.append(self.migrator.drop_index(
model._meta.table_name, index_name))
self.operations.append(
self.migrator.drop_column(
model._meta.table_name, field.column_name, cascade=False))
return model
def __del_field__(self, model: peewee.Model, field: peewee.Field):
"""
Deletes field from model.
"""
model._meta.remove_field(field.name)
delattr(model, field.name)
if isinstance(field, peewee.ForeignKeyField):
obj_id_name = field.column_name
if field.column_name == field.name:
obj_id_name += '_id'
delattr(model, obj_id_name)
delattr(field.rel_model, field.backref)
@get_model
def rename_column(self, model: peewee.Model, old_name: str, new_name: str) -> peewee.Model:
"""
Renames field in model.
"""
field = model._meta.fields[old_name]
if isinstance(field, peewee.ForeignKeyField):
old_name = field.column_name
self.__del_field__(model, field)
field.name = field.column_name = new_name
model._meta.add_field(new_name, field)
if isinstance(field, peewee.ForeignKeyField):
field.column_name = new_name = field.column_name + '_id'
self.operations.append(self.migrator.rename_column(
model._meta.table_name, old_name, new_name))
return model
@get_model
def rename_table(self, model: peewee.Model, new_name: str) -> peewee.Model:
"""
Renames table in database.
"""
old_name = model._meta.table_name
del self.orm[model._meta.table_name]
model._meta.table_name = new_name
self.orm[model._meta.table_name] = model
self.operations.append(self.migrator.rename_table(old_name, new_name))
return model
@get_model
def add_index(self, model: peewee.Model, *columns: str, **kwargs) -> peewee.Model:
"""Create indexes."""
unique = kwargs.pop('unique', False)
model._meta.indexes.append((columns, unique))
columns_ = []
for col in columns:
field = model._meta.fields.get(col)
if len(columns) == 1:
field.unique = unique
field.index = not unique
if isinstance(field, peewee.ForeignKeyField):
col = col + '_id'
columns_.append(col)
self.operations.append(self.migrator.add_index(
model._meta.table_name, columns_, unique=unique))
return model
@get_model
def drop_index(self, model: peewee.Model, *columns: str) -> peewee.Model:
"""Drop indexes."""
columns_ = []
for col in columns:
field = model._meta.fields.get(col)
if not field:
continue
if len(columns) == 1:
field.unique = field.index = False
if isinstance(field, peewee.ForeignKeyField):
col = col + '_id'
columns_.append(col)
index_name = make_index_name(model._meta.table_name, columns_)
model._meta.indexes = [(cols, _) for (
cols, _) in model._meta.indexes if columns != cols]
self.operations.append(self.migrator.drop_index(
model._meta.table_name, index_name))
return model
@get_model
def add_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
"""Add not null."""
for name in names:
field = model._meta.fields[name]
field.null = False
self.operations.append(self.migrator.add_not_null(
model._meta.table_name, field.column_name))
return model
@get_model
def drop_not_null(self, model: peewee.Model, *names: str) -> peewee.Model:
"""Drop not null."""
for name in names:
field = model._meta.fields[name]
field.null = True
self.operations.append(self.migrator.drop_not_null(
model._meta.table_name, field.column_name))
return model
@get_model
def add_default(self, model: peewee.Model, name: str, default: t.Any) -> peewee.Model:
"""Add default."""
field = model._meta.fields[name]
model._meta.defaults[field] = field.default = default
self.operations.append(self.migrator.apply_default(
model._meta.table_name, name, field))
return model
class SqliteMigrator(SqM):
def drop_table(self, model):
return lambda: model.drop_table(cascade=False)
@operation
def change_column(self, table: str, column_name: str, field: peewee.Field):
operations = [self.alter_change_column(table, column_name, field)]
if not field.null:
operations.extend([self.add_not_null(table, column_name)])
return operations
def alter_change_column(self, table: str, column_name: str, field: peewee.Field) -> Operation:
return self._update_column(table, column_name, lambda x, y: y)
@operation
def sql(self, sql: str, *params) -> SQL:
"""
Executes raw SQL.
"""
return SQL(sql, *params)
def alter_add_column(
self, table: str, column_name: str, field: peewee.Field, **kwargs) -> Operation:
"""
Fixes field name for ForeignKeys.
"""
name = field.name
op = super().alter_add_column(
table, column_name, field, **kwargs)
if isinstance(field, peewee.ForeignKeyField):
field.name = name
return op
class MigrationManager(object):
filemask = re.compile(r"[\d]+_[^\.]+\.py$")
def __init__(self, database: t.Union[peewee.Database, peewee.Proxy]):
"""
Initializes the migration manager.
"""
if not isinstance(database, (peewee.Database, peewee.Proxy)):
raise RuntimeError('Invalid database: {}'.format(database))
self.database = database
@cached_property
def model(self) -> peewee.Model:
"""
Initialize and cache the MigrationHistory model.
"""
MigrateHistory._meta.database = self.database
MigrateHistory._meta.table_name = 'migratehistory'
MigrateHistory._meta.schema = None
MigrateHistory.create_table(True)
return MigrateHistory
@property
def done(self) -> t.List[str]:
"""
Scans migrations in the database.
"""
return [mm.name for mm in self.model.select().order_by(self.model.id)]
@property
def todo(self):
"""
Scans migrations in the file system.
"""
if not os.path.exists(helper.migration_dir):
logger.warning('Migration directory: {} does not exist.'.format(
helper.migration_dir))
os.makedirs(helper.migration_dir)
return sorted(f[:-3] for f in os.listdir(helper.migration_dir) if self.filemask.match(f))
@property
def diff(self) -> t.List[str]:
"""
Calculates difference between the filesystem and the database.
"""
done = set(self.done)
return [name for name in self.todo if name not in done]
@cached_property
def migrator(self) -> Migrator:
"""
Create migrator and setup it with fake migrations.
"""
migrator = Migrator(self.database)
for name in self.done:
self.up_one(name, migrator)
return migrator
def compile(self, name, migrate='', rollback=''):
"""
Compiles a migration.
"""
name = datetime.utcnow().strftime('%Y%m%d%H%M%S') + '_' + name
filename = name + '.py'
path = os.path.join(helper.migration_dir, filename)
with open(path, 'w') as f:
f.write(MIGRATE_TEMPLATE.format(
migrate=migrate, rollback=rollback, name=filename))
return name
def create(self, name: str = 'auto', auto: bool = False) -> t.Optional[str]:
"""
Creates a migration.
"""
migrate = rollback = ''
if auto:
raise NotImplementedError
logger.info('Creating migration "{}"'.format(name))
name = self.compile(name, migrate, rollback)
logger.info('Migration has been created as "{}"'.format(name))
return name
def clear(self):
"""Clear migrations."""
self.model.delete().execute()
def up(self, name: t.Optional[str] = None):
"""
Runs all unapplied migrations.
"""
logger.info('Starting migrations')
console.info('Starting migrations')
done = []
diff = self.diff
if not diff:
logger.info('There is nothing to migrate')
console.info('There is nothing to migrate')
return done
migrator = self.migrator
for mname in diff:
done.append(self.up_one(mname, migrator))
if name and name == mname:
break
return done
def read(self, name: str):
"""
Reads a migration from a file.
"""
call_params = dict()
if os.name == 'nt' and sys.version_info >= (3, 0):
# if system is windows - force utf-8 encoding
call_params['encoding'] = 'utf-8'
with open(os.path.join(helper.migration_dir, name + '.py'), **call_params) as f:
code = f.read()
scope = {}
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec(code, scope, None)
return scope.get('migrate', VOID), scope.get('rollback', VOID)
def up_one(self, name: str, migrator: Migrator,
rollback: bool = False) -> str:
"""
Runs a migration with a given name.
"""
try:
migrate_fn, rollback_fn = self.read(name)
with self.database.transaction():
if rollback:
logger.info('Rolling back "{}"'.format(name))
rollback_fn(migrator, self.database)
migrator.run()
self.model.delete().where(self.model.name == name).execute()
else:
logger.info('Migrate "{}"'.format(name))
migrate_fn(migrator, self.database)
migrator.run()
if name not in self.done:
self.model.create(name=name)
logger.info('Done "{}"'.format(name))
return name
except Exception:
self.database.rollback()
operation = 'Rollback' if rollback else 'Migration'
logger.exception('{} failed: {}'.format(operation, name))
raise
def down(self, name: t.Optional[str] = None):
"""
Rolls back migrations.
"""
if not self.done:
raise RuntimeError('No migrations are found.')
name = self.done[-1]
migrator = self.migrator
self.up_one(name, migrator, True)
logger.warning('Rolled back migration: {}'.format(name))

View File

@ -22,32 +22,12 @@ except ModuleNotFoundError as e:
console.critical("Import Error: Unable to load {} module".format(e.name))
sys.exit(1)
schema_version = (0, 1, 0) # major, minor, patch semver
database = SqliteDatabase(helper.db_path, pragmas={
'journal_mode': 'wal',
'cache_size': -1024 * 10})
class BaseModel(Model):
class Meta:
database = database
class SchemaVersion(BaseModel):
# DO NOT EVER CHANGE THE SCHEMA OF THIS TABLE
# (unless we have a REALLY good reason to)
# There will only ever be one row, and it allows the database loader to detect
# what it needs to do on major version upgrades so you don't have to wipe the DB
# every time you upgrade
schema_major = IntegerField()
schema_minor = IntegerField()
schema_patch = IntegerField()
class Meta:
table_name = 'schema_version'
primary_key = CompositeKey('schema_major', 'schema_minor', 'schema_patch')
class Users(BaseModel):
class Users(Model):
user_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
last_login = DateTimeField(default=datetime.datetime.now)
@ -61,9 +41,10 @@ class Users(BaseModel):
class Meta:
table_name = "users"
database = database
class Roles(BaseModel):
class Roles(Model):
role_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
last_update = DateTimeField(default=datetime.datetime.now)
@ -71,18 +52,20 @@ class Roles(BaseModel):
class Meta:
table_name = "roles"
database = database
class User_Roles(BaseModel):
class User_Roles(Model):
user_id = ForeignKeyField(Users, backref='user_role')
role_id = ForeignKeyField(Roles, backref='user_role')
class Meta:
table_name = 'user_roles'
primary_key = CompositeKey('user_id', 'role_id')
database = database
class Audit_Log(BaseModel):
class Audit_Log(Model):
audit_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
user_name = CharField(default="")
@ -91,8 +74,11 @@ class Audit_Log(BaseModel):
server_id = IntegerField(default=None, index=True) # When auditing global events, use server ID 0
log_msg = TextField(default='')
class Meta:
database = database
class Host_Stats(BaseModel):
class Host_Stats(Model):
time = DateTimeField(default=datetime.datetime.now, index=True)
boot_time = CharField(default="")
cpu_usage = FloatField(default=0)
@ -106,9 +92,10 @@ class Host_Stats(BaseModel):
class Meta:
table_name = "host_stats"
database = database
class Servers(BaseModel):
class Servers(Model):
server_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
server_uuid = CharField(default="", index=True)
@ -129,27 +116,30 @@ class Servers(BaseModel):
class Meta:
table_name = "servers"
database = database
class User_Servers(BaseModel):
class User_Servers(Model):
user_id = ForeignKeyField(Users, backref='user_server')
server_id = ForeignKeyField(Servers, backref='user_server')
class Meta:
table_name = 'user_servers'
primary_key = CompositeKey('user_id', 'server_id')
database = database
class Role_Servers(BaseModel):
class Role_Servers(Model):
role_id = ForeignKeyField(Roles, backref='role_server')
server_id = ForeignKeyField(Servers, backref='role_server')
class Meta:
table_name = 'role_servers'
primary_key = CompositeKey('role_id', 'server_id')
database = database
class Server_Stats(BaseModel):
class Server_Stats(Model):
stats_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
server_id = ForeignKeyField(Servers, backref='server', index=True)
@ -172,9 +162,10 @@ class Server_Stats(BaseModel):
class Meta:
table_name = "server_stats"
database = database
class Commands(BaseModel):
class Commands(Model):
command_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
server_id = ForeignKeyField(Servers, backref='server', index=True)
@ -185,9 +176,10 @@ class Commands(BaseModel):
class Meta:
table_name = "commands"
database = database
class Webhooks(BaseModel):
class Webhooks(Model):
id = AutoField()
name = CharField(max_length=64, unique=True, index=True)
method = CharField(default="POST")
@ -197,8 +189,10 @@ class Webhooks(BaseModel):
class Meta:
table_name = "webhooks"
database = database
class Schedules(BaseModel):
class Schedules(Model):
schedule_id = IntegerField(unique=True, primary_key=True)
server_id = ForeignKeyField(Servers, backref='schedule_server')
enabled = BooleanField()
@ -211,8 +205,10 @@ class Schedules(BaseModel):
class Meta:
table_name = 'schedules'
database = database
class Backups(BaseModel):
class Backups(Model):
directories = CharField(null=True)
max_backups = IntegerField()
server_id = ForeignKeyField(Servers, backref='backups_server')
@ -220,39 +216,15 @@ class Backups(BaseModel):
class Meta:
table_name = 'backups'
database = database
class db_builder:
@staticmethod
def create_tables():
with database:
database.create_tables([
Backups,
Users,
Roles,
User_Roles,
User_Servers,
Host_Stats,
Webhooks,
Servers,
Role_Servers,
Server_Stats,
Commands,
Audit_Log,
SchemaVersion,
Schedules
])
@staticmethod
def default_settings():
logger.info("Fresh Install Detected - Creating Default Settings")
console.info("Fresh Install Detected - Creating Default Settings")
SchemaVersion.insert({
SchemaVersion.schema_major: schema_version[0],
SchemaVersion.schema_minor: schema_version[1],
SchemaVersion.schema_patch: schema_version[2]
}).execute()
default_data = helper.find_default_password()
username = default_data.get("username", 'admin')
@ -279,39 +251,8 @@ class db_builder:
return True
pass
@staticmethod
def check_schema_version():
svs = SchemaVersion.select().execute()
if len(svs) != 1:
raise exceptions.SchemaError("Multiple or no schema versions detected - potentially a failed upgrade?")
sv = svs[0]
svt = (sv.schema_major, sv.schema_minor, sv.schema_patch)
logger.debug("Schema: found {}, expected {}".format(svt, schema_version))
console.debug("Schema: found {}, expected {}".format(svt, schema_version))
if sv.schema_major > schema_version[0]:
raise exceptions.SchemaError("Major version mismatch - possible code reversion")
elif sv.schema_major < schema_version[0]:
db_shortcuts.upgrade_schema()
if sv.schema_minor > schema_version[1]:
logger.warning("Schema minor mismatch detected: found {}, expected {}. Proceed with caution".format(svt, schema_version))
console.warning("Schema minor mismatch detected: found {}, expected {}. Proceed with caution".format(svt, schema_version))
elif sv.schema_minor < schema_version[1]:
db_shortcuts.upgrade_schema()
if sv.schema_patch > schema_version[2]:
logger.info("Schema patch mismatch detected: found {}, expected {}. Proceed with caution".format(svt, schema_version))
console.info("Schema patch mismatch detected: found {}, expected {}. Proceed with caution".format(svt, schema_version))
elif sv.schema_patch < schema_version[2]:
db_shortcuts.upgrade_schema()
logger.info("Schema validation successful! {}".format(schema_version))
class db_shortcuts:
@staticmethod
def upgrade_schema():
raise NotImplemented("I don't know who you are or how you reached this code, but this should NOT have happened. Please report it to the developer with due haste.")
@staticmethod
def return_rows(query):
rows = []

View File

@ -13,6 +13,7 @@ import zipfile
from threading import Thread
import shutil
import zlib
import html
from app.classes.shared.helpers import helper
@ -32,6 +33,50 @@ except ModuleNotFoundError as e:
sys.exit(1)
class ServerOutBuf:
lines = {}
def __init__(self, p, server_id):
self.p = p
self.server_id = str(server_id)
# Buffers text for virtual_terminal_lines config number of lines
self.max_lines = helper.get_setting('virtual_terminal_lines')
self.line_buffer = ''
ServerOutBuf.lines[self.server_id] = []
def check(self):
while self.p.isalive():
char = self.p.read(1)
if char == os.linesep:
ServerOutBuf.lines[self.server_id].append(self.line_buffer)
self.new_line_handler(self.line_buffer)
self.line_buffer = ''
# Limit list length to self.max_lines:
if len(ServerOutBuf.lines[self.server_id]) > self.max_lines:
ServerOutBuf.lines[self.server_id].pop(0)
else:
self.line_buffer += char
def new_line_handler(self, new_line):
new_line = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', new_line)
new_line = re.sub('[A-z]{2}\b\b', '', new_line)
highlighted = helper.log_colors(html.escape(new_line))
logger.debug('Broadcasting new virtual terminal line')
# TODO: Do not send data to clients who do not have permission to view this server's console
websocket_helper.broadcast_page_params(
'/panel/server_detail',
{
'id': self.server_id
},
'vterm_new_line',
{
'line': highlighted + '<br />'
}
)
class Server:
def __init__(self, stats):
@ -90,7 +135,7 @@ class Server:
def run_threaded_server(self):
# start the server
self.server_thread = threading.Thread(target=self.start_server, daemon=True)
self.server_thread = threading.Thread(target=self.start_server, daemon=True, name='{}_server_thread'.format(self.server_id))
self.server_thread.start()
def setup_server_run_command(self):
@ -150,6 +195,13 @@ class Server:
return False
websocket_helper.broadcast('send_start_reload', {
})
self.process = pexpect.spawn(self.server_command, cwd=self.server_path, timeout=None, encoding='utf-8')
out_buf = ServerOutBuf(self.process, self.server_id)
logger.debug('Starting virtual terminal listener for server {}'.format(self.name))
threading.Thread(target=out_buf.check, daemon=True, name='{}_virtual_terminal'.format(self.server_id)).start()
self.is_crashed = False
self.start_time = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))

View File

@ -113,16 +113,6 @@ class TasksManager:
elif command == "update_executable":
svr.jar_update()
elif command == "delete_server":
logger.info(
"Removing server from panel for server: {}".format(c['server_id']['server_name']))
self.controller.remove_server(c['server_id']['server_id'], False)
elif command == "delete_server_files":
logger.info(
"Removing server and all associated files for server: {}".format(c['server_id']['server_name']))
self.controller.remove_server(c['server_id']['server_id'], True)
db_helper.mark_command_complete(c.get('command_id', None))
time.sleep(1)
@ -210,7 +200,7 @@ class TasksManager:
host_stats = db_helper.get_latest_hosts_stats()
if len(websocket_helper.clients) > 0:
# There are clients
websocket_helper.broadcast('update_host_stats', {
websocket_helper.broadcast_page('/panel/dashboard', 'update_host_stats', {
'cpu_usage': host_stats.get('cpu_usage'),
'cpu_cores': host_stats.get('cpu_cores'),
'cpu_cur_freq': host_stats.get('cpu_cur_freq'),
@ -218,13 +208,9 @@ class TasksManager:
'mem_percent': host_stats.get('mem_percent'),
'mem_usage': host_stats.get('mem_usage')
})
time.sleep(4)
else:
# Stats are same
time.sleep(8)
time.sleep(4)
def log_watcher(self):
console.debug('in log_watcher')
helper.check_for_old_logs(db_helper)
schedule.every(6).hours.do(lambda: helper.check_for_old_logs(db_helper)).tag('log-mgmt')

View File

@ -13,19 +13,19 @@ class Translation():
self.translations_path = os.path.join(helper.root_dir, 'app', 'translations')
self.cached_translation = None
self.cached_translation_lang = None
self.lang_file_exists = []
def translate(self, page, word):
translated_word = None
lang = helper.get_setting('language')
fallback_lang = 'en_EN'
lang_file_exists = helper.check_file_exists(
os.path.join(
self.translations_path, lang + '.json'
)
)
if lang not in self.lang_file_exists and \
helper.check_file_exists(os.path.join(self.translations_path, lang + '.json')):
self.lang_file_exists.append(lang)
translated_word = self.translate_inner(page, word, lang) \
if lang_file_exists else self.translate_inner(page, word, fallback_lang)
if lang in self.lang_file_exists else self.translate_inner(page, word, fallback_lang)
if translated_word:
if isinstance(translated_word, dict): return json.dumps(translated_word)

View File

@ -5,12 +5,15 @@ import tornado.escape
import bleach
import os
import shutil
import html
import re
from app.classes.shared.console import console
from app.classes.shared.models import Users, installer
from app.classes.web.base_handler import BaseHandler
from app.classes.shared.models import db_helper
from app.classes.shared.helpers import helper
from app.classes.shared.server import ServerOutBuf
logger = logging.getLogger(__name__)
@ -47,7 +50,7 @@ class AjaxHandler(BaseHandler):
if server_id is None:
logger.warning("Server ID not found in server_log ajax call")
self.redirect("/panel/error?error=Server ID Not Found")
return False
return
server_id = bleach.clean(server_id)
@ -55,20 +58,23 @@ class AjaxHandler(BaseHandler):
if not server_data:
logger.warning("Server Data not found in server_log ajax call")
self.redirect("/panel/error?error=Server ID Not Found")
return
if not server_data['log_path']:
logger.warning("Log path not found in server_log ajax call ({})".format(server_id))
if full_log:
log_lines = helper.get_setting('max_log_lines')
data = helper.tail_file(server_data['log_path'], log_lines)
else:
log_lines = helper.get_setting('virtual_terminal_lines')
data = ServerOutBuf.lines.get(server_id, [])
data = helper.tail_file(server_data['log_path'], log_lines)
for d in data:
try:
line = helper.log_colors(d)
d = re.sub('(\033\\[(0;)?[0-9]*[A-z]?(;[0-9])?m?)|(> )', '', d)
d = re.sub('[A-z]{2}\b\b', '', d)
line = helper.log_colors(html.escape(d))
self.write('{}<br />'.format(line))
# self.write(d.encode("utf-8"))
@ -85,14 +91,14 @@ class AjaxHandler(BaseHandler):
file_path = self.get_argument('file_path', None)
server_id = self.get_argument('id', None)
if not self.check_server_id(server_id, 'get_file'): return False
if not self.check_server_id(server_id, 'get_file'): return
else: server_id = bleach.clean(server_id)
if not helper.in_path(db_helper.get_server_data_by_id(server_id)['path'], file_path)\
or not helper.check_file_exists(os.path.abspath(file_path)):
logger.warning("Invalid path in get_file ajax call ({})".format(file_path))
console.warning("Invalid path in get_file ajax call ({})".format(file_path))
return False
return
error = None
@ -113,7 +119,7 @@ class AjaxHandler(BaseHandler):
elif page == "get_tree":
server_id = self.get_argument('id', None)
if not self.check_server_id(server_id, 'get_tree'): return False
if not self.check_server_id(server_id, 'get_tree'): return
else: server_id = bleach.clean(server_id)
self.write(db_helper.get_server_data_by_id(server_id)['path'] + '\n' +
@ -149,16 +155,15 @@ class AjaxHandler(BaseHandler):
file_name = self.get_body_argument('file_name', default=None, strip=True)
file_path = os.path.join(file_parent, file_name)
server_id = self.get_argument('id', None)
print(server_id)
if not self.check_server_id(server_id, 'create_file'): return False
if not self.check_server_id(server_id, 'create_file'): return
else: server_id = bleach.clean(server_id)
if not helper.in_path(db_helper.get_server_data_by_id(server_id)['path'], file_path) \
or helper.check_file_exists(os.path.abspath(file_path)):
logger.warning("Invalid path in create_file ajax call ({})".format(file_path))
console.warning("Invalid path in create_file ajax call ({})".format(file_path))
return False
return
# Create the file by opening it
with open(file_path, 'w') as file_object:
@ -169,16 +174,15 @@ class AjaxHandler(BaseHandler):
dir_name = self.get_body_argument('dir_name', default=None, strip=True)
dir_path = os.path.join(dir_parent, dir_name)
server_id = self.get_argument('id', None)
print(server_id)
if not self.check_server_id(server_id, 'create_dir'): return False
if not self.check_server_id(server_id, 'create_dir'): return
else: server_id = bleach.clean(server_id)
if not helper.in_path(db_helper.get_server_data_by_id(server_id)['path'], dir_path) \
or helper.check_path_exists(os.path.abspath(dir_path)):
logger.warning("Invalid path in create_dir ajax call ({})".format(dir_path))
console.warning("Invalid path in create_dir ajax call ({})".format(dir_path))
return False
return
# Create the directory
os.mkdir(dir_path)
@ -191,7 +195,7 @@ class AjaxHandler(BaseHandler):
console.warning("delete {} for server {}".format(file_path, server_id))
if not self.check_server_id(server_id, 'del_file'): return False
if not self.check_server_id(server_id, 'del_file'): return
else: server_id = bleach.clean(server_id)
server_info = db_helper.get_server_data_by_id(server_id)
@ -200,7 +204,7 @@ class AjaxHandler(BaseHandler):
or not helper.check_file_exists(os.path.abspath(file_path)):
logger.warning("Invalid path in del_file ajax call ({})".format(file_path))
console.warning("Invalid path in del_file ajax call ({})".format(file_path))
return False
return
# Delete the file
os.remove(file_path)
@ -208,11 +212,10 @@ class AjaxHandler(BaseHandler):
elif page == "del_dir":
dir_path = self.get_body_argument('dir_path', default=None, strip=True)
server_id = self.get_argument('id', None)
print(server_id)
console.warning("delete {} for server {}".format(dir_path, server_id))
if not self.check_server_id(server_id, 'del_dir'): return False
if not self.check_server_id(server_id, 'del_dir'): return
else: server_id = bleach.clean(server_id)
server_info = db_helper.get_server_data_by_id(server_id)
@ -220,30 +223,39 @@ class AjaxHandler(BaseHandler):
or not helper.check_path_exists(os.path.abspath(dir_path)):
logger.warning("Invalid path in del_file ajax call ({})".format(dir_path))
console.warning("Invalid path in del_file ajax call ({})".format(dir_path))
return False
return
# Delete the directory
# os.rmdir(dir_path) # Would only remove empty directories
shutil.rmtree(dir_path) # Removes also when there are contents
elif page == "delete_server":
server_id = self.get_argument('id', None)
logger.info(
"Removing server from panel for server: {}".format(db_helper.get_server_friendly_name(server_id)))
self.controller.remove_server(server_id, False)
elif page == "delete_server_files":
server_id = self.get_argument('id', None)
logger.info(
"Removing server and all associated files for server: {}".format(db_helper.get_server_friendly_name(server_id)))
self.controller.remove_server(server_id, True)
@tornado.web.authenticated
def put(self, page):
if page == "save_file":
file_contents = self.get_body_argument('file_contents', default=None, strip=True)
file_path = self.get_body_argument('file_path', default=None, strip=True)
server_id = self.get_argument('id', None)
print(file_contents)
print(file_path)
print(server_id)
if not self.check_server_id(server_id, 'save_file'): return False
if not self.check_server_id(server_id, 'save_file'): return
else: server_id = bleach.clean(server_id)
if not helper.in_path(db_helper.get_server_data_by_id(server_id)['path'], file_path)\
or not helper.check_file_exists(os.path.abspath(file_path)):
logger.warning("Invalid path in save_file ajax call ({})".format(file_path))
console.warning("Invalid path in save_file ajax call ({})".format(file_path))
return False
return
# Open the file in write mode and store the content in file_object
with open(file_path, 'w') as file_object:
@ -253,21 +265,20 @@ class AjaxHandler(BaseHandler):
item_path = self.get_body_argument('item_path', default=None, strip=True)
new_item_name = self.get_body_argument('new_item_name', default=None, strip=True)
server_id = self.get_argument('id', None)
print(server_id)
if not self.check_server_id(server_id, 'rename_item'): return False
if not self.check_server_id(server_id, 'rename_item'): return
else: server_id = bleach.clean(server_id)
if item_path is None or new_item_name is None:
logger.warning("Invalid path(s) in rename_item ajax call")
console.warning("Invalid path(s) in rename_item ajax call")
return False
return
if not helper.in_path(db_helper.get_server_data_by_id(server_id)['path'], item_path) \
or not helper.check_path_exists(os.path.abspath(item_path)):
logger.warning("Invalid old name path in rename_item ajax call ({})".format(server_id))
console.warning("Invalid old name path in rename_item ajax call ({})".format(server_id))
return False
return
new_item_path = os.path.join(os.path.split(item_path)[0], new_item_name)
@ -275,7 +286,7 @@ class AjaxHandler(BaseHandler):
or helper.check_path_exists(os.path.abspath(new_item_path)):
logger.warning("Invalid new name path in rename_item ajax call ({})".format(server_id))
console.warning("Invalid new name path in rename_item ajax call ({})".format(server_id))
return False
return
# RENAME
os.rename(item_path, new_item_path)
@ -283,7 +294,7 @@ class AjaxHandler(BaseHandler):
if server_id is None:
logger.warning("Server ID not defined in {} ajax call ({})".format(page_name, server_id))
console.warning("Server ID not defined in {} ajax call ({})".format(page_name, server_id))
return False
return
else:
server_id = bleach.clean(server_id)
@ -291,5 +302,5 @@ class AjaxHandler(BaseHandler):
if not db_helper.server_id_exists(server_id):
logger.warning("Server ID not found in {} ajax call ({})".format(page_name, server_id))
console.warning("Server ID not found in {} ajax call ({})".format(page_name, server_id))
return False
return
return True

View File

@ -41,11 +41,11 @@ class ApiHandler(BaseHandler):
else:
logging.debug("Auth unsuccessful")
self.access_denied("unknown", "the user provided an invalid token")
return False
return
except Exception as e:
log.warning("An error occured while authenticating an API user: %s", e)
self.access_denied("unknown"), "an error occured while authenticating the user"
return False
return
class ServersStats(ApiHandler):

View File

@ -201,7 +201,7 @@ class PanelHandler(BaseHandler):
#if not db_helper.server_id_authorized(server_id, exec_user_id):
if not db_helper.server_id_authorized_from_roles(int(server_id), exec_user_id):
self.redirect("/panel/error?error=Invalid Server ID")
return False
return
server_info = db_helper.get_server_data_by_id(server_id)
backup_file = os.path.abspath(os.path.join(server_info["backup_path"], file))
@ -250,7 +250,7 @@ class PanelHandler(BaseHandler):
#if not db_helper.server_id_authorized(server_id, exec_user_id):
if not db_helper.server_id_authorized_from_roles(int(server_id), exec_user_id):
self.redirect("/panel/error?error=Invalid Server ID")
return False
return
server = self.controller.get_server_obj(server_id).backup_server()
self.redirect("/panel/server_detail?id={}&subpage=backup".format(server_id))

View File

@ -37,9 +37,6 @@ class PublicHandler(BaseHandler):
def get(self, page=None):
self.clear_cookie("user")
self.clear_cookie("user_data")
error = bleach.clean(self.get_argument('error', "Invalid Login!"))
page_data = {
@ -59,9 +56,16 @@ class PublicHandler(BaseHandler):
elif page == "error":
template = "public/error.html"
elif page == "logout":
self.clear_cookie("user")
self.clear_cookie("user_data")
self.redirect('/public/login')
return
# if we have no page, let's go to login
else:
self.redirect('/public/login')
return
self.render(
template,
@ -82,14 +86,18 @@ class PublicHandler(BaseHandler):
# if we don't have a user
if not user_data:
next_page = "/public/error?error=Login Failed"
self.clear_cookie("user")
self.clear_cookie("user_data")
self.redirect(next_page)
return False
return
# if they are disabled
if not user_data.enabled:
next_page = "/public/error?error=Login Failed"
self.clear_cookie("user")
self.clear_cookie("user_data")
self.redirect(next_page)
return False
return
login_result = helper.verify_pass(entered_password, user_data.password)
@ -118,6 +126,8 @@ class PublicHandler(BaseHandler):
next_page = "/panel/dashboard"
self.redirect(next_page)
else:
self.clear_cookie("user")
self.clear_cookie("user_data")
# log this failed login attempt
db_helper.add_to_audit_log(user_data.user_id, "Tried to log in", 0, self.get_remote_ip())
self.redirect('/public/error?error=Login Failed')

View File

@ -61,7 +61,7 @@ class ServerHandler(BaseHandler):
if page == "step1":
page_data['server_types'] = server_jar_obj.get_serverjar_data()
page_data['server_types'] = server_jar_obj.get_serverjar_data_sorted()
template = "server/wizard.html"
self.render(
@ -94,7 +94,7 @@ class ServerHandler(BaseHandler):
for server in db_helper.get_all_defined_servers():
if server['server_name'] == name:
return True
return False
return
server_data = db_helper.get_server_data_by_id(server_id)
server_uuid = server_data.get('server_uuid')
@ -105,8 +105,6 @@ class ServerHandler(BaseHandler):
name_counter += 1
new_server_name = server_data.get('server_name') + " (Copy {})".format(name_counter)
console.debug('new_server_name: "{}"'.format(new_server_name))
new_server_uuid = helper.create_uuid()
while os.path.exists(os.path.join(helper.servers_dir, new_server_uuid)):
new_server_uuid = helper.create_uuid()
@ -143,7 +141,6 @@ class ServerHandler(BaseHandler):
}).execute()
self.controller.init_all_servers()
console.debug('initted all servers')
return
@ -163,14 +160,14 @@ class ServerHandler(BaseHandler):
if not server_name:
self.redirect("/panel/error?error=Server name cannot be empty!")
return False
return
if import_type == 'import_jar':
good_path = self.controller.verify_jar_server(import_server_path, import_server_jar)
if not good_path:
self.redirect("/panel/error?error=Server path or Server Jar not found!")
return False
return
new_server_id = self.controller.import_jar_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
db_helper.add_to_audit_log(exec_user_data['user_id'],
@ -182,12 +179,12 @@ class ServerHandler(BaseHandler):
good_path = self.controller.verify_zip_server(import_server_path)
if not good_path:
self.redirect("/panel/error?error=Zip file not found!")
return False
return
new_server_id = self.controller.import_zip_server(server_name, import_server_path,import_server_jar, min_mem, max_mem, port)
if new_server_id == "false":
self.redirect("/panel/error?error=Zip file not accessible! You can fix this permissions issue with sudo chown -R crafty:crafty {} And sudo chmod 2775 -R {}".format(import_server_path, import_server_path))
return False
return
db_helper.add_to_audit_log(exec_user_data['user_id'],
"imported a zip server named \"{}\"".format(server_name), # Example: Admin imported a server named "old creative"
new_server_id,
@ -195,7 +192,7 @@ class ServerHandler(BaseHandler):
else:
if len(server_parts) != 2:
self.redirect("/panel/error?error=Invalid server data")
return False
return
server_type, server_version = server_parts
# todo: add server type check here and call the correct server add functions if not a jar
new_server_id = self.controller.create_jar_server(server_type, server_version, server_name, min_mem, max_mem, port)

View File

@ -159,7 +159,7 @@ class Webserver:
console.info("Server Init Complete: Listening For Connections:")
self.ioloop = tornado.ioloop.IOLoop.instance()
self.ioloop = tornado.ioloop.IOLoop.current()
self.ioloop.start()
def stop_web_server(self):

View File

@ -1,13 +1,21 @@
import json
import logging
import asyncio
import tornado.websocket
from app.classes.shared.console import console
from urllib.parse import parse_qsl
from app.classes.shared.models import Users, db_helper
from app.classes.shared.helpers import helper
from app.classes.web.websocket_helper import websocket_helper
logger = logging.getLogger(__name__)
try:
import tornado.websocket
except ModuleNotFoundError as e:
logger.critical("Import Error: Unable to load {} module".format(e, e.name))
console.critical("Import Error: Unable to load {} module".format(e, e.name))
sys.exit(1)
class SocketHandler(tornado.websocket.WebSocketHandler):
@ -15,6 +23,7 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
self.controller = controller
self.tasks_manager = tasks_manager
self.translator = translator
self.io_loop = tornado.ioloop.IOLoop.current()
def get_remote_ip(self):
remote_ip = self.request.headers.get("X-Real-IP") or \
@ -35,6 +44,7 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
def open(self):
logger.debug('Checking WebSocket authentication')
if self.check_auth():
self.handle()
else:
@ -42,10 +52,15 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
self.close()
db_helper.add_to_audit_log_raw('unknown', 0, 0, 'Someone tried to connect via WebSocket without proper authentication', self.get_remote_ip())
websocket_helper.broadcast('notification', 'Someone tried to connect via WebSocket without proper authentication')
logger.warning('Someone tried to connect via WebSocket without proper authentication')
def handle(self):
websocket_helper.addClient(self)
self.page = self.get_query_argument('page')
self.page_query_params = dict(parse_qsl(helper.remove_prefix(
self.get_query_argument('page_query_params'),
'?'
)))
websocket_helper.add_client(self)
logger.debug('Opened WebSocket connection')
# websocket_helper.broadcast('notification', 'New client connected')
@ -56,7 +71,13 @@ class SocketHandler(tornado.websocket.WebSocketHandler):
logger.debug('Event Type: {}, Data: {}'.format(message['event'], message['data']))
def on_close(self):
websocket_helper.removeClient(self)
websocket_helper.remove_client(self)
logger.debug('Closed WebSocket connection')
# websocket_helper.broadcast('notification', 'Client disconnected')
async def write_message_int(self, message):
self.write_message(message)
def write_message_helper(self, message):
asyncio.run_coroutine_threadsafe(self.write_message_int(message), self.io_loop.asyncio_loop)

View File

@ -1,31 +1,75 @@
import json
import logging
import sys, threading, asyncio
from app.classes.shared.console import console
logger = logging.getLogger(__name__)
class WebSocketHelper:
clients = set()
def addClient(self, client):
try:
import tornado.ioloop
except ModuleNotFoundError as e:
logger.critical("Import Error: Unable to load {} module".format(e, e.name))
console.critical("Import Error: Unable to load {} module".format(e, e.name))
sys.exit(1)
class WebSocketHelper:
def __init__(self):
self.clients = set()
def add_client(self, client):
self.clients.add(client)
def removeClient(self, client):
self.clients.add(client)
def send_message(self, client, event_type, data):
def remove_client(self, client):
self.clients.remove(client)
def send_message(self, client, event_type: str, data):
if client.check_auth():
message = str(json.dumps({'event': event_type, 'data': data}))
client.write_message(message)
client.write_message_helper(message)
def broadcast(self, event_type, data):
logger.debug('Sending: ' + str(json.dumps({'event': event_type, 'data': data})))
def broadcast(self, event_type: str, data):
logger.debug('Sending to {} clients: {}'.format(len(self.clients), json.dumps({'event': event_type, 'data': data})))
for client in self.clients:
try:
self.send_message(client, event_type, data)
except:
pass
except Exception as e:
logger.exception('Error catched while sending WebSocket message to {}'.format(client.get_remote_ip()))
def broadcast_page(self, page: str, event_type: str, data):
def filter_fn(client):
return client.page == page
clients = list(filter(filter_fn, self.clients))
logger.debug('Sending to {} out of {} clients: {}'.format(len(clients), len(self.clients), json.dumps({'event': event_type, 'data': data})))
for client in clients:
try:
self.send_message(client, event_type, data)
except Exception as e:
logger.exception('Error catched while sending WebSocket message to {}'.format(client.get_remote_ip()))
def broadcast_page_params(self, page: str, params: dict, event_type: str, data):
def filter_fn(client):
if client.page != page:
return False
for key, param in params.items():
if param != client.page_query_params.get(key, None):
return False
return True
clients = list(filter(filter_fn, self.clients))
logger.debug('Sending to {} out of {} clients: {}'.format(len(clients), len(self.clients), json.dumps({'event': event_type, 'data': data})))
for client in clients:
try:
self.send_message(client, event_type, data)
except Exception as e:
logger.exception('Error catched while sending WebSocket message to {}'.format(client.get_remote_ip()))
def disconnect_all(self):
console.info('Disconnecting WebSocket clients')

View File

@ -10,7 +10,7 @@
"stats_update_frequency": 30,
"delete_default_json": false,
"show_contribute_link": true,
"virtual_terminal_lines": 10,
"virtual_terminal_lines": 70,
"max_log_lines": 700,
"keywords": ["help", "chunk"]
}
}

View File

@ -8,7 +8,7 @@
"tornado_access": {
"format": "%(asctime)s - [Tornado] - [Access] - %(levelname)s - %(message)s"
},
"schedule": {
"schedule": {
"format": "%(asctime)s - [Schedules] - %(levelname)s - %(message)s"
}
},

View File

@ -173,8 +173,9 @@
let listenEvents = [];
try {
var wsInternal = new WebSocket('wss://' + location.host + '/ws');
pageQueryParams = 'page_query_params=' + encodeURIComponent(location.search)
page = 'page=' + encodeURIComponent(location.pathname)
var wsInternal = new WebSocket('wss://' + location.host + '/ws?' + page + '&' + pageQueryParams);
wsInternal.onopen = function() {
console.log('opened WebSocket connection:', wsInternal)
};

View File

@ -29,7 +29,7 @@
{% end %}
</div>
<a class="dropdown-item" href="/panel/activity_logs"><i class="dropdown-item-icon mdi mdi-calendar-check-outline text-primary"></i> Activity</a>
<a class="dropdown-item" href="/public/login"><i class="dropdown-item-icon mdi mdi-power text-primary"></i>Sign Out</a>
<a class="dropdown-item" href="/public/logout"><i class="dropdown-item-icon mdi mdi-power text-primary"></i>Sign Out</a>
</div>
</li>
</ul>

View File

@ -203,6 +203,35 @@
});
function deleteServerE(callback) {
var token = getCookie("_xsrf")
$.ajax({
type: "DELETE",
headers: {'X-XSRFToken': token},
url: '/ajax/delete_server?id={{ data['server_stats']['server_id']['server_id'] }}',
data: {
},
success: function(data){
console.log("got response:");
console.log(data);
},
});
}
function deleteServerFilesE(path, callback) {
var token = getCookie("_xsrf")
$.ajax({
type: "DELETE",
headers: {'X-XSRFToken': token},
url: '/ajax/delete_server_files?id={{ data['server_stats']['server_id']['server_id'] }}',
data: {
},
success: function(data){
console.log("got response:");
console.log(data);
},
});
}
let server_id = '{{ data['server_stats']['server_id']['server_id'] }}';
function send_command (server_id, command){
@ -249,7 +278,7 @@ let server_id = '{{ data['server_stats']['server_id']['server_id'] }}';
},
callback: function(result) {
if (!result){
send_command(server_id, 'delete_server');
deleteServerE()
setTimeout(function(){ window.location = '/panel/dashboard'; }, 5000);
bootbox.dialog({
backdrop: true,
@ -260,7 +289,7 @@ let server_id = '{{ data['server_stats']['server_id']['server_id'] }}';
return;}
else{
send_command(server_id, 'delete_server_files');
deleteServerFilesE();
setTimeout(function(){ window.location = '/panel/dashboard'; }, 5000);
bootbox.dialog({
backdrop: true,

View File

@ -166,8 +166,9 @@
margin-left: 10px;
}
/* Style the caret/arrow */
.tree-caret {
/* Style the items */
.tree-item,
.files-tree-title {
cursor: pointer;
user-select: none; /* Prevent text selection */
}

View File

@ -65,7 +65,7 @@
<li class="nav-item term-nav-item">
<label class="p-0 m-0">
<input type="checkbox" name="stop_scroll" id="stop_scroll" />
{{ translate('serverTerm', 'stopRefresh') }}
{{ translate('serverTerm', 'stopScroll') }}
</label>
</li>
</ul>
@ -169,7 +169,6 @@
let server_id = '{{ data['server_stats']['server_id']['server_id'] }}';
function get_server_log(){
if( !$("#stop_scroll").is(':checked')){
$.ajax({
type: 'GET',
url: '/ajax/server_log?id={{ data['server_stats']['server_id']['server_id'] }}',
@ -177,10 +176,16 @@
success: function (data) {
console.log('Got Log From Server')
$('#virt_console').html(data);
scroll();
},
scrollConsole();
},
});
}
}
function new_line_handler(data) {
$('#virt_console').append(data.line)
if (!$("#stop_scroll").is(':checked')) {
scrollConsole()
}
}
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
@ -193,9 +198,7 @@
console.log( "ready!" );
get_server_log()
setInterval(function(){
get_server_log() // this will run after every 5 seconds
}, 1500);
webSocket.on('vterm_new_line', new_line_handler)
});
$('#server_command').on('keydown', function (e) {
@ -218,7 +221,7 @@
});
function scroll(){
function scrollConsole(){
var logview = $('#virt_console');
if(logview.length)
logview.scrollTop(logview[0].scrollHeight - logview.height());

View File

@ -0,0 +1,215 @@
import peewee
import datetime
def migrate(migrator, database, **kwargs):
db = database
class Users(peewee.Model):
user_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
last_login = peewee.DateTimeField(default=datetime.datetime.now)
last_update = peewee.DateTimeField(default=datetime.datetime.now)
last_ip = peewee.CharField(default="")
username = peewee.CharField(default="", unique=True, index=True)
password = peewee.CharField(default="")
enabled = peewee.BooleanField(default=True)
superuser = peewee.BooleanField(default=False)
# we may need to revisit this
api_token = peewee.CharField(default="", unique=True, index=True)
class Meta:
table_name = "users"
database = db
class Roles(peewee.Model):
role_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
last_update = peewee.DateTimeField(default=datetime.datetime.now)
role_name = peewee.CharField(default="", unique=True, index=True)
class Meta:
table_name = "roles"
database = db
class User_Roles(peewee.Model):
user_id = peewee.ForeignKeyField(Users, backref='user_role')
role_id = peewee.ForeignKeyField(Roles, backref='user_role')
class Meta:
table_name = 'user_roles'
primary_key = peewee.CompositeKey('user_id', 'role_id')
database = db
class Audit_Log(peewee.Model):
audit_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
user_name = peewee.CharField(default="")
user_id = peewee.IntegerField(default=0, index=True)
source_ip = peewee.CharField(default='127.0.0.1')
# When auditing global events, use server ID 0
server_id = peewee.IntegerField(default=None, index=True)
log_msg = peewee.TextField(default='')
class Meta:
database = db
class Host_Stats(peewee.Model):
time = peewee.DateTimeField(default=datetime.datetime.now, index=True)
boot_time = peewee.CharField(default="")
cpu_usage = peewee.FloatField(default=0)
cpu_cores = peewee.IntegerField(default=0)
cpu_cur_freq = peewee.FloatField(default=0)
cpu_max_freq = peewee.FloatField(default=0)
mem_percent = peewee.FloatField(default=0)
mem_usage = peewee.CharField(default="")
mem_total = peewee.CharField(default="")
disk_json = peewee.TextField(default="")
class Meta:
table_name = "host_stats"
database = db
class Servers(peewee.Model):
server_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
server_uuid = peewee.CharField(default="", index=True)
server_name = peewee.CharField(default="Server", index=True)
path = peewee.CharField(default="")
backup_path = peewee.CharField(default="")
executable = peewee.CharField(default="")
log_path = peewee.CharField(default="")
execution_command = peewee.CharField(default="")
auto_start = peewee.BooleanField(default=0)
auto_start_delay = peewee.IntegerField(default=10)
crash_detection = peewee.BooleanField(default=0)
stop_command = peewee.CharField(default="stop")
executable_update_url = peewee.CharField(default="")
server_ip = peewee.CharField(default="127.0.0.1")
server_port = peewee.IntegerField(default=25565)
logs_delete_after = peewee.IntegerField(default=0)
class Meta:
table_name = "servers"
database = db
class User_Servers(peewee.Model):
user_id = peewee.ForeignKeyField(Users, backref='user_server')
server_id = peewee.ForeignKeyField(Servers, backref='user_server')
class Meta:
table_name = 'user_servers'
primary_key = peewee.CompositeKey('user_id', 'server_id')
database = db
class Role_Servers(peewee.Model):
role_id = peewee.ForeignKeyField(Roles, backref='role_server')
server_id = peewee.ForeignKeyField(Servers, backref='role_server')
class Meta:
table_name = 'role_servers'
primary_key = peewee.CompositeKey('role_id', 'server_id')
database = db
class Server_Stats(peewee.Model):
stats_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
server_id = peewee.ForeignKeyField(Servers, backref='server', index=True)
started = peewee.CharField(default="")
running = peewee.BooleanField(default=False)
cpu = peewee.FloatField(default=0)
mem = peewee.FloatField(default=0)
mem_percent = peewee.FloatField(default=0)
world_name = peewee.CharField(default="")
world_size = peewee.CharField(default="")
server_port = peewee.IntegerField(default=25565)
int_ping_results = peewee.CharField(default="")
online = peewee.IntegerField(default=0)
max = peewee.IntegerField(default=0)
players = peewee.CharField(default="")
desc = peewee.CharField(default="Unable to Connect")
version = peewee.CharField(default="")
updating = peewee.BooleanField(default=False)
class Meta:
table_name = "server_stats"
database = db
class Commands(peewee.Model):
command_id = peewee.AutoField()
created = peewee.DateTimeField(default=datetime.datetime.now)
server_id = peewee.ForeignKeyField(Servers, backref='server', index=True)
user = peewee.ForeignKeyField(Users, backref='user', index=True)
source_ip = peewee.CharField(default='127.0.0.1')
command = peewee.CharField(default='')
executed = peewee.BooleanField(default=False)
class Meta:
table_name = "commands"
database = db
class Webhooks(peewee.Model):
id = peewee.AutoField()
name = peewee.CharField(max_length=64, unique=True, index=True)
method = peewee.CharField(default="POST")
url = peewee.CharField(unique=True)
event = peewee.CharField(default="")
send_data = peewee.BooleanField(default=True)
class Meta:
table_name = "webhooks"
database = db
class Schedules(peewee.Model):
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
server_id = peewee.ForeignKeyField(Servers, backref='schedule_server')
enabled = peewee.BooleanField()
action = peewee.CharField()
interval = peewee.IntegerField()
interval_type = peewee.CharField()
start_time = peewee.CharField(null=True)
command = peewee.CharField(null=True)
comment = peewee.CharField()
class Meta:
table_name = 'schedules'
database = db
class Backups(peewee.Model):
directories = peewee.CharField(null=True)
max_backups = peewee.IntegerField()
server_id = peewee.ForeignKeyField(Servers, backref='backups_server')
schedule_id = peewee.ForeignKeyField(Schedules, backref='backups_schedule')
class Meta:
table_name = 'backups'
database = db
migrator.create_table(Backups)
migrator.create_table(Users)
migrator.create_table(Roles)
migrator.create_table(User_Roles)
migrator.create_table(User_Servers)
migrator.create_table(Host_Stats)
migrator.create_table(Webhooks)
migrator.create_table(Servers)
migrator.create_table(Role_Servers)
migrator.create_table(Server_Stats)
migrator.create_table(Commands)
migrator.create_table(Audit_Log)
migrator.create_table(Schedules)
def rollback(migrator, database, **kwargs):
migrator.drop_table('users')
migrator.drop_table('roles')
migrator.drop_table('user_roles')
migrator.drop_table('audit_log') # ? Not 100% sure of the table name, please specify in the schema
migrator.drop_table('host_stats')
migrator.drop_table('servers')
migrator.drop_table('user_servers')
migrator.drop_table('role_servers')
migrator.drop_table('server_stats')
migrator.drop_table('commands')
migrator.drop_table('webhooks')
migrator.drop_table('schedules')
migrator.drop_table('backups')

View File

@ -120,7 +120,7 @@
"playerControls": "Player Management"
},
"serverTerm": {
"stopRefresh": "Stop Refresh",
"stopScroll": "Stop Auto Scrollling",
"commandInput": "Enter your command",
"sendCommand": "Send command",
"start": "Start",

View File

@ -120,7 +120,7 @@
"playerControls": "Pelaajahallinta"
},
"serverTerm": {
"stopRefresh": "Lopeta päivitys",
"stopScroll": "Lopeta automaattinen vieritys",
"commandInput": "Kirjoita komento",
"sendCommand": "Lähetä komento",
"start": "Käynnistä",

32
main.py
View File

@ -4,14 +4,16 @@ import json
import time
import argparse
import logging.config
import signal
""" Our custom classes / pip packages """
from app.classes.shared.console import console
from app.classes.shared.helpers import helper
from app.classes.shared.models import installer
from app.classes.shared.models import installer, database
from app.classes.shared.tasks import TasksManager
from app.classes.shared.controller import Controller
from app.classes.shared.migration import MigrationManager
from app.classes.shared.cmd import MainPrompt
@ -90,16 +92,18 @@ if __name__ == '__main__':
# our session file, helps prevent multiple controller agents on the same machine.
helper.create_session_file(ignore=args.ignore)
migration_manager = MigrationManager(database)
migration_manager.up() # Automatically runs migrations
# do our installer stuff
fresh_install = installer.is_fresh_install()
if fresh_install:
console.debug("Fresh install detected")
installer.create_tables()
installer.default_settings()
else:
console.debug("Existing install detected")
installer.check_schema_version()
# now the tables are created, we can load the tasks_manger and server controller
controller = Controller()
@ -127,9 +131,24 @@ if __name__ == '__main__':
# this should always be last
tasks_manager.start_main_kill_switch_watcher()
Crafty = MainPrompt(tasks_manager)
Crafty = MainPrompt(tasks_manager, migration_manager)
def sigterm_handler(signum, current_stack_frame):
print() # for newline
logger.info("Recieved SIGTERM, stopping Crafty")
console.info("Recieved SIGTERM, stopping Crafty")
Crafty.universal_exit()
signal.signal(signal.SIGTERM, sigterm_handler)
if not args.daemon:
Crafty.cmdloop()
try:
Crafty.cmdloop()
except KeyboardInterrupt:
print() # for newline
logger.info("Recieved SIGINT, stopping Crafty")
console.info("Recieved SIGINT, stopping Crafty")
Crafty.universal_exit()
else:
print("Crafty started in daemon mode, no shell will be printed")
while True:
@ -139,6 +158,7 @@ if __name__ == '__main__':
time.sleep(1)
except KeyboardInterrupt:
logger.info("Recieved SIGINT, stopping Crafty")
console.info("Recieved SIGINT, stopping Crafty")
break
Crafty.universal_exit()

View File

@ -23,4 +23,3 @@ termcolor==1.1.0
tornado==6.0.4
urllib3==1.25.10
webencodings==0.5.1
peewee_migrate==1.4.6