Compare commits

...

338 Commits

Author SHA1 Message Date
Iain Powrie
2b14d514e0 Merge branch 'dev' into 'master'
v4.4.3

See merge request crafty-controller/crafty-4!792
2024-08-08 19:59:08 +00:00
Iain Powrie
149a51b4e6 Merge branch 'bugfix/schedule-creation' into 'dev'
Fix schedules creation fail due to missing action ID

See merge request crafty-controller/crafty-4!791
2024-08-08 19:41:47 +00:00
Zedifus
3da6d6f740 Close changelog 4.4.3 2024-08-08 20:37:30 +01:00
Zedifus
3ec43e45ba Update changelog !791 2024-08-08 20:36:26 +01:00
Andrew
a1c3e3386f Fix schedules creation fail due to missing action ID 2024-08-07 20:28:13 -04:00
Zedifus
0d19f81a29 Prepare 4.4.3 release base 2024-08-07 02:39:41 +01:00
Iain Powrie
94a1fdd215 Merge branch 'dev' into 'master'
v4.4.2

See merge request crafty-controller/crafty-4!790
2024-08-07 00:33:38 +00:00
Iain Powrie
589b3b5cc8 Merge branch 'bugfix/upload-flood' into 'dev'
Upload chunks in batches

See merge request crafty-controller/crafty-4!788
2024-08-07 00:26:26 +00:00
Zedifus
adc781f8f9 Close changelog 4.4.2 2024-08-07 01:21:24 +01:00
Zedifus
a4f97b446e Update changelog !788 2024-08-07 01:20:16 +01:00
Zedifus
4155f359cc Merge branch 'dev' into bugfix/upload-flood 2024-08-07 01:17:33 +01:00
Iain Powrie
1f832ae85e Merge branch 'bugfix/fileNotFound-exceptoin' into 'dev'
Fix exception message on file not found for backups

See merge request crafty-controller/crafty-4!789
2024-08-06 23:08:20 +00:00
Zedifus
885b629cc7 Update changelog !789 2024-08-06 23:54:16 +01:00
amcmanu3
849242ecea Fix exception message on file not found for backups 2024-08-06 18:36:21 -04:00
amcmanu3
6ef93908ae Tweak upload js function to upload in batches 2024-08-06 17:48:17 -04:00
Zedifus
fc6f85a16b Prepare 4.4.2 release base 2024-08-06 21:31:41 +01:00
Zedifus
fcdebbe3e5 Merge branch 'dev' into rerelease-4.4.1 2024-08-06 20:47:25 +01:00
Zedifus
45c3f73eca Revert "Revert "Merge branch 'dev' into 'master'""
This reverts the 4.4.1 release revert commit 29ce7a2cdeb59b4d769f4b107a24cece44b7a214.
2024-08-06 20:45:00 +01:00
Iain Powrie
73ed9156b7 Merge branch 'lang/new-lang-russian' into 'dev'
Add ru_RU Translation

See merge request crafty-controller/crafty-4!779
2024-08-06 19:14:34 +00:00
Zedifus
ce972ec728 Merge branch 'lang/new-lang-russian' of gitlab.com:crafty-controller/crafty-4 into lang/new-lang-russian 2024-08-06 20:10:06 +01:00
Zedifus
6139a6426f Close changelog 4.4.1 2024-08-06 20:09:21 +01:00
Zedifus
7f27c018de Merge branch 'dev' into lang/new-lang-russian 2024-08-06 20:08:30 +01:00
Iain Powrie
fe33b84e10 Merge branch 'bugfix/backup-migration' into 'dev'
Orphan Backup Migration Fix

See merge request crafty-controller/crafty-4!785
2024-08-06 19:05:17 +00:00
amcmanu3
7c16737fab Appease sonar return nothing instead of false 2024-08-06 14:59:10 -04:00
Zedifus
7b739863b3 Update changelog !785 2024-08-06 19:51:39 +01:00
Zedifus
037e13d243 Merge branch 'dev' into bugfix/backup-migration 2024-08-06 19:46:18 +01:00
amcmanu3
9f7f588e85 Tweak default backup configs to account for bad clone in 4.2.x 2024-07-30 14:30:30 -04:00
amcmanu3
4a58759183 Add function for removing old malformed backups 2024-07-30 16:45:02 +01:00
Analicia Abernathy
f8b1070162 added norole and nonerole translations 2024-07-30 07:32:10 -05:00
Zedifus
44b5f2a809 Reopen changelog 4.4.1 2024-07-30 00:49:33 +01:00
amcmanu3
ff9abe4359 Fix directory moves on backup migration 2024-07-29 18:02:00 -04:00
Iain Powrie
d3f965d127 Merge branch 'revert-501b490c' into 'master'
Revert "Merge branch 'dev' into 'master'"

See merge request crafty-controller/crafty-4!784
2024-07-29 21:53:44 +00:00
Iain Powrie
29ce7a2cde Revert "Merge branch 'dev' into 'master'"
This reverts merge request !783
2024-07-29 21:49:44 +00:00
Iain Powrie
501b490cbd Merge branch 'dev' into 'master'
v4.4.1

See merge request crafty-controller/crafty-4!783
2024-07-29 21:34:20 +00:00
Zedifus
627adcdc95 Close changelog 4.4.1 2024-07-29 21:56:47 +01:00
Iain Powrie
4e345652dc Merge branch 'bugfix/dropdown-width' into 'dev'
Remove unused and problematic "dropdown-menu" ident from css

See merge request crafty-controller/crafty-4!782
2024-07-29 20:52:07 +00:00
Zedifus
3775268883 Update changelog !782 2024-07-29 21:48:27 +01:00
amcmanu3
245f473693 Remove unused and problematic "dropdown-menu" ident from css 2024-07-29 16:41:25 -04:00
Iain Powrie
928d266ee6 Merge branch 'lang/th_TH-updates' into 'dev'
Add th_TH translations for !772

See merge request crafty-controller/crafty-4!781
2024-07-29 20:30:34 +00:00
Zedifus
c920719503 Update changelog !781 2024-07-29 21:26:58 +01:00
Zedifus
ef7fc68a24 Merge branch 'dev' into lang/th_TH-updates 2024-07-29 21:24:43 +01:00
Zedifus
d7f00ecb27 Update changelog !779 2024-07-29 21:13:51 +01:00
Zedifus
65ae3f08b9 Merge branch 'dev' into lang/new-lang-russian 2024-07-29 21:12:27 +01:00
Iain Powrie
b5e30bea58 Merge branch 'lang/fix-frFR-syntax' into 'dev'
Fix fr_FR syntax issues

See merge request crafty-controller/crafty-4!780
2024-07-29 20:07:40 +00:00
Zedifus
fbf1dd5395 Update chngelog !780 2024-07-29 20:53:58 +01:00
Analicia Abernathy
bcdc464edf Update th_TH.json 2024-07-29 14:47:53 -05:00
Analicia Abernathy
d0aeb019d8 adding missing translations from merge 772 2024-07-29 14:46:15 -05:00
Iain Powrie
568641dce3 Merge branch 'lang/fix_fr' into 'lang/fix-frFR-syntax'
Lang/fix fr

See merge request crafty-controller/crafty-4!778
2024-07-29 19:40:10 +00:00
Analicia Abernathy
e60b86c238 Adding new language ru_RU 2024-07-29 14:33:28 -05:00
Analicia Abernathy
d4af117421 added ru_RU to the humanized json 2024-07-29 14:33:14 -05:00
Iain Powrie
3751d47b82 Merge branch 'tweak/server-create-roles' into 'dev'
Change server creation to include searchable roles

See merge request crafty-controller/crafty-4!772
2024-07-29 19:20:43 +00:00
Zedifus
c8425578f0 Update changelog !772 2024-07-29 20:14:06 +01:00
Zedifus
83813e7566 Mark th_TH incomplete 2024-07-29 20:11:32 +01:00
Ludo-code
af3c08d886 Update file fr_FR.json 2024-07-26 06:44:27 +00:00
Ludo-code
70ef395936 fix fr lang 2024-07-24 16:33:22 +00:00
Analicia Abernathy
76215556d2 translations for server creation changes 2024-07-23 21:38:18 -05:00
amcmanu3
f7a4c9505f Ingnore humanized 2024-07-23 22:06:24 -04:00
Andrew
0ed296adc6 Merge branch 'dev' into tweak/server-create-roles 2024-07-21 11:17:16 -04:00
Iain Powrie
b71b7cb1c4 Merge branch 'refactor/upload-api' into 'dev'
Chunked Uploads | Fix Upload Authentication

See merge request crafty-controller/crafty-4!762
2024-07-09 02:32:51 +00:00
Zedifus
b36740e0f7 Update changelog !762 2024-07-09 03:29:06 +01:00
Iain Powrie
eaeda3e746 Merge branch 'refactor/backups' into 'dev'
Refactor Backups | Allow multiple backup configurations

See merge request crafty-controller/crafty-4!711
2024-07-09 02:11:30 +00:00
amcmanu3
1ac63fae0d Merge branch 'refactor/backups' into refactor/upload-api 2024-07-08 22:06:49 -04:00
amcmanu3
a3e210c0d3 Check for traversal on backup delete 2024-07-08 22:06:29 -04:00
amcmanu3
d9b9f00e9a Merge branch 'refactor/backups' into refactor/upload-api 2024-07-08 21:57:59 -04:00
amcmanu3
9186d9b02c Include backup_id in delete call 2024-07-08 21:51:27 -04:00
Zedifus
90e11ef73e Update changelog !711 2024-07-09 02:33:02 +01:00
Zedifus
c9b2b49e5f Merge branch 'dev' into refactor/backups 2024-07-09 02:31:30 +01:00
Iain Powrie
b4f721cd8c Merge branch 'tweak/server-name' into 'dev'
Do not allow slashes in server names

See merge request crafty-controller/crafty-4!767
2024-07-09 01:28:32 +00:00
Iain Powrie
974571ea7a Merge branch 'tweak/cpu-attribute-error' into 'dev'
Workaround cpu_freq call catching on obscure cpu architectures

See merge request crafty-controller/crafty-4!776
2024-07-09 01:21:29 +00:00
Zedifus
31abf80ffc Update changelog !776 2024-07-09 02:18:12 +01:00
Iain Powrie
b0afc80b73 Merge branch 'Username404-59-dev-patch-44249' into 'tweak/cpu-attribute-error'
Fix cpu_freq call catching on obscure systems

See merge request crafty-controller/crafty-4!754
2024-07-09 01:12:11 +00:00
Zedifus
65396b7f27 Merge branch 'dev' into tweak/server-name 2024-07-09 02:07:00 +01:00
Iain Powrie
1c7ffcdda7 Merge branch 'bugfix/user-tz-login' into 'dev'
Use UTC for tokens_valid_from in user config

See merge request crafty-controller/crafty-4!765
2024-07-09 01:00:46 +00:00
Zedifus
ba2d3c92a6 Update changelog !765 2024-07-09 01:55:01 +01:00
Zedifus
735cdb238a Merge branch 'dev' into bugfix/user-tz-login 2024-07-09 01:53:01 +01:00
Iain Powrie
4346f58ff3 Merge branch 'bugfix/user-creation' into 'dev'
User Creation Fixes

See merge request crafty-controller/crafty-4!763
2024-07-09 00:48:25 +00:00
amcmanu3
0519a36fe1 Fix sonar issue 2024-07-08 20:39:21 -04:00
Zedifus
5b4bf46f61 Mark he_IL incomplete 2024-07-09 01:20:09 +01:00
Zedifus
b63081b03c Update changelog !763 2024-07-09 01:13:43 +01:00
Zedifus
594f030545 Merge branch 'dev' into bugfix/user-creation 2024-07-09 01:07:19 +01:00
amcmanu3
f26606a0cd Except filenotfound 2024-07-08 20:05:42 -04:00
Iain Powrie
a710c80bb2 Merge branch 'lang/lang-names' into 'dev'
Add language header translations

See merge request crafty-controller/crafty-4!773
2024-07-08 23:56:04 +00:00
Zedifus
6366463d12 Update lang sort ci util to exclude humanized_index 2024-07-09 00:52:01 +01:00
Zedifus
66359ff561 Update changelog !773 2024-07-09 00:45:18 +01:00
Zedifus
c2c95c047a Add lolcatz humanized listing 2024-07-09 00:44:57 +01:00
Zedifus
7859c33cbd Merge branch 'dev' into lang/lang-names 2024-07-09 00:40:20 +01:00
Iain Powrie
b9625ab113 Merge branch 'bugfix/key-delete' into 'dev'
Fix typing issue causing IDs to not be ==

See merge request crafty-controller/crafty-4!775
2024-07-08 23:38:04 +00:00
Zedifus
920f1bcada Update changelog !775 2024-07-09 00:25:38 +01:00
Zedifus
319ea9510c Merge branch 'dev' into bugfix/key-delete 2024-07-09 00:23:51 +01:00
amcmanu3
eefbc81538 Fix sonar 2024-07-08 19:23:46 -04:00
Iain Powrie
a6b582b328 Merge branch 'sec/bump-tornado' into 'dev'
Bump tornado & requests for sec advisories

See merge request crafty-controller/crafty-4!774
2024-07-08 23:22:01 +00:00
Iain Powrie
fd86533b47 Merge branch 'dev' into 'sec/bump-tornado'
# Conflicts:
#   CHANGELOG.md
2024-07-08 23:19:19 +00:00
Zedifus
0edd6005c6 Update changelog !774 2024-07-09 00:18:13 +01:00
amcmanu3
87c559ae66 Delete temp dir files every 12 hours 2024-07-08 19:16:23 -04:00
amcmanu3
b0a38d1249 Remove overall file hash checking for now as it interferes with large files 2024-07-08 16:27:06 -04:00
amcmanu3
539c07be9d Fix typing issue causing IDs to not be == 2024-07-08 10:53:38 -04:00
Analicia Abernathy
ec4045b754 password lenth validation translation 2024-07-07 18:59:15 -05:00
Iain Powrie
df62da858f Merge branch 'dev' into 'tweak/server-name'
# Conflicts:
#   CHANGELOG.md
2024-07-07 23:32:44 +00:00
amcmanu3
8cf855488c Include single file for humanized translations 2024-07-07 19:22:54 -04:00
amcmanu3
b2afe3d361 Merge branch 'dev' into bugfix/user-tz-login 2024-07-07 18:41:27 -04:00
amcmanu3
d6e00edf4a Merge branch 'refactor/backups' into refactor/upload-api 2024-06-25 13:48:35 -04:00
amcmanu3
86ecfc35b6 Fix general user backup path 2024-06-25 13:48:17 -04:00
amcmanu3
1ba0520c0e Fix dir exclusions 2024-06-25 13:33:52 -04:00
amcmanu3
4fafb5cae9 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-25 12:42:51 -04:00
amcmanu3
7b1afec9cb Merge branch 'dev' into refactor/backups 2024-06-24 12:52:47 -04:00
amcmanu3
c504f3d83f Merge branch 'dev' into tweak/server-create-roles 2024-06-23 21:00:04 -04:00
amcmanu3
f269ff0f53 Appease sonar 2024-06-23 20:54:30 -04:00
amcmanu3
07e094d11e Fix sonar 2024-06-23 20:41:06 -04:00
amcmanu3
5a6229d282 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-23 20:28:53 -04:00
amcmanu3
aaadd0cc88 Remove deprectaed width 2024-06-23 20:28:03 -04:00
amcmanu3
20ae8b3a0b Remove role tags 2024-06-23 20:26:24 -04:00
amcmanu3
8b00f9f282 Remove duplicate CSS entry 2024-06-23 20:21:23 -04:00
amcmanu3
54cc2faa45 Update input labeling 2024-06-23 20:17:34 -04:00
amcmanu3
0156988d23 Remove deprecated width from table 2024-06-23 19:17:46 -04:00
amcmanu3
af11737b48 Refactor repeated auth errors 2024-06-23 19:13:44 -04:00
amcmanu3
f8884ab93e Refactor api handler to use constant id error 2024-06-23 19:10:42 -04:00
amcmanu3
16eca86a10 Some sonar fixes 2024-06-23 19:05:55 -04:00
Iain Powrie
522223a36f Merge branch 'bugfix/docker-audit-log' into 'dev'
Ensure audit log exists

See merge request crafty-controller/crafty-4!771
2024-06-23 15:36:35 +00:00
Zedifus
2c311499ac Update changelog !771 2024-06-23 16:32:12 +01:00
Zedifus
8b80d7e853 Merge branch 'dev' into bugfix/docker-audit-log 2024-06-23 16:30:44 +01:00
Iain Powrie
5df46ecd1c Merge branch 'tweak/status-typo' into 'dev'
Remove text from status page and use symbols

See merge request crafty-controller/crafty-4!770
2024-06-23 15:29:54 +00:00
Zedifus
cb41b613bf Update changelog !770 2024-06-23 16:26:26 +01:00
Zedifus
2fa23b14d1 Merge branch 'dev' into tweak/status-typo 2024-06-23 16:25:15 +01:00
Zedifus
a3ee37a8ff Bump requests to 2.32.0 for CVE-2024-35195 2024-06-23 16:18:25 +01:00
Zedifus
4ea9c75c41 Bump tornado to 6.4.1 for GHSA-753j-mpmx-qq6g & GHSA-w235-7p84-xx57 2024-06-23 16:17:55 +01:00
Iain Powrie
75f149849a Merge branch 'tweak/support-logs-threads' into 'dev'
Add a thread dump to support logs

See merge request crafty-controller/crafty-4!769
2024-06-23 15:08:51 +00:00
Zedifus
6ed23f09dd Update changelog !769 2024-06-23 16:03:07 +01:00
Zedifus
d05b986fa9 Merge branch 'dev' into tweak/support-logs-threads 2024-06-23 16:01:51 +01:00
Iain Powrie
c8d5bfb960 Merge branch 'bugfix/api-permissions' into 'dev'
Fix bug where full access gives minimal access

See merge request crafty-controller/crafty-4!768
2024-06-23 14:58:46 +00:00
Zedifus
b17347bd99 Update changelog !768 2024-06-23 15:49:29 +01:00
Zedifus
9331ad179e Merge branch 'dev' into bugfix/api-permissions 2024-06-23 15:48:05 +01:00
Zedifus
e0f2659135 Update changelog !767 2024-06-23 15:42:11 +01:00
Zedifus
40b7b2f2a1 Merge branch 'dev' into tweak/server-name 2024-06-23 15:40:42 +01:00
Iain Powrie
2253f1e812 Merge branch 'tweak/nav-label' into 'dev'
Remove navigation label from sidebar

See merge request crafty-controller/crafty-4!766
2024-06-23 14:36:02 +00:00
Zedifus
2438906be2 Update changelog !766 2024-06-23 15:32:00 +01:00
Zedifus
1a9ee2cbc1 Merge branch 'dev' into tweak/nav-label 2024-06-23 15:30:28 +01:00
Iain Powrie
2e9b7525be Merge branch 'bugfix/zip-server-creation' into 'dev'
Fix Zip Root Dir Selection

See merge request crafty-controller/crafty-4!764
2024-06-23 14:26:49 +00:00
Zedifus
ff56d8fe04 Update changelog !764 2024-06-23 15:23:19 +01:00
Zedifus
636ade7a9e Merge branch 'dev' into bugfix/zip-server-creation 2024-06-23 15:22:07 +01:00
Iain Powrie
b7830ec00b Merge branch 'tweak/default-creds' into 'dev'
Add info note to default creds file

See merge request crafty-controller/crafty-4!760
2024-06-23 14:15:12 +00:00
Zedifus
12d516a48d Update changelog !760 2024-06-23 14:55:33 +01:00
Zedifus
da97268cb3 Merge branch 'dev' into tweak/default-creds 2024-06-23 14:53:58 +01:00
Iain Powrie
5b02021478 Merge branch 'lang/additional-lang-for-4.4.0' into 'dev'
Lang/additional lang for 4.4.0

See merge request crafty-controller/crafty-4!761
2024-06-23 13:51:01 +00:00
Zedifus
af5c7aab10 Update changelog !761 2024-06-23 14:30:40 +01:00
amcmanu3
e33cf5451c Add language header translations 2024-06-22 18:44:02 -04:00
amcmanu3
ab267cfcc7 Remove outdated function 2024-06-21 14:28:03 -04:00
amcmanu3
bd73e1892a Don't allow list modifier in role name 2024-06-21 14:00:06 -04:00
amcmanu3
76c64b8ac4 Change server creation to include searchable roles 2024-06-21 13:25:07 -04:00
amcmanu3
b904430069 Ensure audit log exists 2024-06-20 17:29:35 -04:00
amcmanu3
43bd2d0444 Remove formatting 2024-06-19 13:54:32 -04:00
amcmanu3
481691aec5 Remove text from status page and use symbols 2024-06-19 13:48:48 -04:00
amcmanu3
95437c3e64 Remove function from troubleshooting 2024-06-19 13:31:36 -04:00
amcmanu3
6bbb3bf399 Fix box close after upload complete 2024-06-19 13:28:39 -04:00
Andrew
c4b183466e Close upload box if no files 2024-06-17 13:31:34 -04:00
Andrew
4a0dfddde5 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-17 09:45:57 -04:00
Andrew
f808f32444 Fix backup downloads 2024-06-16 16:02:26 -04:00
Andrew
ad734c526c Fix query statement for updates 2024-06-15 16:09:01 -04:00
Andrew
0b0b63d650 Fix migrations errors on initial run 2024-06-15 14:32:18 -04:00
Andrew
4cc2d81044 Merge branch 'dev' into refactor/backups 2024-06-15 12:11:16 -04:00
amcmanu3
59d835bd9d Merge branch 'refactor/backups' into refactor/upload-api 2024-06-14 19:31:48 -04:00
amcmanu3
3030712587 Refactor restore to persist backups 2024-06-14 19:31:26 -04:00
amcmanu3
e5d18e5f2c Add type hints. Return backup ID on creation 2024-06-14 19:30:43 -04:00
amcmanu3
60bf9a7914 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-14 18:30:05 -04:00
amcmanu3
f0afcfbc23 Fix check for directory existing 2024-06-14 18:29:36 -04:00
amcmanu3
6cf093031c Merge branch 'refactor/backups' into refactor/upload-api 2024-06-14 18:18:15 -04:00
amcmanu3
3c4b513a44 Use json.loads in the update function 2024-06-14 18:17:50 -04:00
Analicia Abernathy
ff8b5e0c03 Merge branch 'refactor/backups' of gitlab.com:crafty-controller/crafty-4 into refactor/backups 2024-06-14 14:17:11 -05:00
Analicia Abernathy
6e7b250292 Merge branch 'refactor/backups' of gitlab.com:crafty-controller/crafty-4 into refactor/backups 2024-06-14 14:17:00 -05:00
Analicia Abernathy
ff7a392119 translations for backup changes 2024-06-14 14:15:50 -05:00
amcmanu3
f5874d701a Add password error to not display password in plain text 2024-06-13 17:09:00 -04:00
amcmanu3
038a280ae1 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-13 16:17:49 -04:00
amcmanu3
d4503f1887 Use backup_location variable 2024-06-13 16:06:04 -04:00
amcmanu3
908ddc2d21 Remove migrate history 2024-06-13 15:54:54 -04:00
amcmanu3
6ff561f40b Fix default backup config 2024-06-13 15:54:47 -04:00
amcmanu3
5488f3880a Bump tornado to 6.4.1 for security checks 2024-06-11 15:59:20 -04:00
Andrew
4aad365811 Add a thread dump to support logs 2024-06-08 12:24:04 -04:00
Andrew
44ab4b8b75 Merge branch 'refactor/backups' into refactor/upload-api 2024-06-07 14:08:23 -04:00
Andrew
8dd82f4111 Set backups to live in a directory with the backup id 2024-06-07 14:08:05 -04:00
amcmanu3
682999f7f6 Standardize header naming 2024-06-02 13:02:40 -04:00
amcmanu3
10091a38ee Merge branch 'refactor/backups' into refactor/upload-api 2024-06-02 10:47:45 -04:00
amcmanu3
a69d569e23 Fix issue with backup UUID on migration 2024-06-02 10:47:13 -04:00
amcmanu3
bb6b516647 Fix bug where full access gives minimal access 2024-06-01 22:16:46 -04:00
amcmanu3
f28f689841 Do not allow slashes in server names 2024-05-31 15:31:42 -04:00
amcmanu3
c3560acbd1 Account for str valid_tokens_from 2024-05-31 12:45:29 -04:00
amcmanu3
7d418355ba Remove navigation label from sidebar 2024-05-31 12:37:21 -04:00
Andrew
251df92528 Appease the linter 2024-05-29 19:32:27 -04:00
Andrew
2898917b64 Remove print statements 2024-05-29 19:30:07 -04:00
Andrew
9ec0044458 Use UTC for tokens_valid_from in user config 2024-05-29 13:21:50 -10:00
Andrew
c8b607c08a Remove unnecessary imports 2024-05-28 21:49:41 -04:00
Andrew
5b8fcfe290 Merge branch 'refactor/backups' into refactor/upload-api 2024-05-28 19:48:29 -04:00
Andrew
e2132c2130 Fix migration 2024-05-28 19:48:11 -04:00
Andrew
f991b79782 Fix backup migration 2024-05-28 19:42:02 -04:00
Andrew
5340670ceb Fix bedrock zip imports 2024-05-28 19:39:13 -04:00
Andrew
a9856a8a2c Fix java server zip creation 2024-05-28 19:30:12 -04:00
Andrew
d8ad8f5e09 Fix role selection on user creation.
Security improvements
2024-05-28 17:14:05 -04:00
Andrew
0aae82448b Add comments 2024-05-28 15:39:44 -04:00
amcmanu3
407ca4c0bb Add logging 2024-05-27 19:48:35 -04:00
amcmanu3
3fd763eebd Check hashes 2024-05-27 19:12:39 -04:00
amcmanu3
d7bee5a7b8 Refactor uploads to same JS file 2024-05-27 19:12:31 -04:00
amcmanu3
9b7ddbfe1e Check for server dir on server_upload 2024-05-27 15:19:23 -04:00
amcmanu3
3b7a463184 Remove stream_size_GB option 2024-05-26 23:42:25 -04:00
amcmanu3
a9c9598ad0 Fix error on upload 2024-05-26 23:38:59 -04:00
amcmanu3
95ef72d809 All file uploads using API 2024-05-26 23:31:06 -04:00
amcmanu3
96f0ee62ac Fix upload imports 2024-05-26 23:18:12 -04:00
amcmanu3
45aacb97c8 Refactor upload route
Import uploads broke
2024-05-26 23:08:43 -04:00
amcmanu3
7c8781e09e Merge branch 'refactor/backups' into refactor/upload-api 2024-05-26 21:01:00 -04:00
amcmanu3
9ebaf38553 Fix files methods 2024-05-26 20:59:58 -04:00
amcmanu3
c30d17cbf8 Chunked uploads 2024-05-26 20:54:06 -04:00
amcmanu3
96b766cef7 Add no backup message 2024-05-26 18:45:15 -04:00
amcmanu3
b8681c0fce Security check when posting task config 2024-05-26 18:37:20 -04:00
amcmanu3
caf35a6de8 Front end tweaks 2024-05-26 18:21:38 -04:00
amcmanu3
772910dc6f Set backup schedules 2024-05-26 18:03:57 -04:00
amcmanu3
ab83b846d6 Refactor serverId 2024-05-26 17:17:58 -04:00
amcmanu3
217435283b Restore backup/clone 2024-05-26 17:10:20 -04:00
amcmanu3
a46129f20c Lint 2024-05-26 15:41:01 -04:00
amcmanu3
d24f56f4a5 Use self as first argument 2024-05-26 15:39:52 -04:00
amcmanu3
79adf29c62 Remove failed status after success 2024-05-26 15:38:38 -04:00
amcmanu3
daa511ef2a Remove print statements 2024-05-26 15:20:18 -04:00
amcmanu3
0421a490e2 Use translations for status 2024-05-26 15:19:08 -04:00
amcmanu3
708f57537c View backup error 2024-05-26 15:10:03 -04:00
amcmanu3
7c50b7cfa5 Add percentage to both pages 2024-05-26 13:59:08 -04:00
amcmanu3
1b073a2401 Backup statuses
Default backups
2024-05-26 13:45:13 -04:00
amcmanu3
bf196b68c0 Fix more lint 2024-05-25 22:25:29 -04:00
amcmanu3
74ef9e0a13 Fix backup file downloads 2024-05-25 22:19:14 -04:00
amcmanu3
5d82d79afd Add backup configs/remove backup configs 2024-05-25 22:10:35 -04:00
amcmanu3
3cf4ebf073 Backups are editable! 2024-05-25 16:33:28 -04:00
amcmanu3
b061ebf5e5 Use zip note 2024-05-25 15:12:55 -04:00
amcmanu3
334d4b69c8 Allow three arguments on server actions 2024-05-25 15:12:46 -04:00
amcmanu3
97de58f31d Add action ID to tasks 2024-05-25 15:12:28 -04:00
amcmanu3
41147266ad Fix issue with backup compression 2024-05-25 15:11:58 -04:00
amcmanu3
c037f1d1af Lint 2024-05-25 15:11:46 -04:00
amcmanu3
d55e7c9e64 Remove repeated code 2024-05-25 14:40:21 -04:00
amcmanu3
60d3ee1aa8 Working default backup 2024-05-25 14:40:14 -04:00
amcmanu3
f8626633cf Add action ID option to schedules 2024-05-25 13:51:40 -04:00
Zedifus
94707be975 Merge branch 'dev' into lang/additional-lang-for-4.4.0 2024-05-24 00:33:05 +01:00
Analicia Abernathy
fdd1d2fca3 added big bucket translations 2024-05-24 00:15:43 +01:00
Analicia Abernathy
895ba2d2f5 updates for the big bucket release 2024-05-24 00:15:43 +01:00
Zedifus
2a6c0ca751 Revert pinned sonarq version,
They've moved to a rootless image and cache was retaining files with root permissions, solution is to clear cache
2024-05-23 23:40:03 +01:00
amcmanu3
c2dd307369 Merge branch 'dev' into refactor/backups 2024-05-23 17:19:04 -04:00
Zedifus
5d53a1fa46 Pin sonar-scanner-cli to 5.0.1
Latest Broken
2024-05-23 21:54:35 +01:00
amcmanu3
f845f54654 Add info note to default creds file 2024-05-18 20:32:27 -04:00
Andrew
64116a6cf3 Remove backup enabled column 2024-05-15 21:37:56 -04:00
--unset
6ca396854d Merge branch 'dev' into refactor/backups 2024-05-15 21:01:16 -04:00
Zedifus
860b9aa5d1 Prepare 4.4.1 release base 2024-05-12 00:45:06 +01:00
Iain Powrie
e4f96f9118 Merge branch 'dev' into 'master'
v4.4.0

See merge request crafty-controller/crafty-4!758
2024-05-11 23:33:52 +00:00
Zedifus
938a7794bd Sort unsorted es_ES lang 2024-05-12 00:14:16 +01:00
Zedifus
ac4719158b Remove incorrectly placed translations 2024-05-11 23:56:06 +01:00
Zedifus
9de2d63d18 Merge branch 'lang/additional-lang-for-4.4.0' into dev 2024-05-11 23:44:14 +01:00
Iain Powrie
9f4e48deec Merge branch 'bugfix/authentication-return' into 'dev'
Fix API authentication stack

See merge request crafty-controller/crafty-4!759
2024-05-11 22:41:58 +00:00
Zedifus
c5e7b4a3b5 Update changelog !759 2024-05-11 23:32:20 +01:00
Analicia Abernathy
f0d934a7f9 updated language for merge 755 2024-05-11 11:14:58 -05:00
--unset
b1a7142dc0 Fix role permissions 2024-05-10 22:56:50 -04:00
--unset
88fd46282d Add expected return variable 2024-05-10 19:31:20 -04:00
Zedifus
738e145375 Close changelog v4.4.0 2024-05-10 21:57:25 +01:00
Zedifus
2a7146d079 Close changelog v4.4.0 2024-05-10 21:56:06 +01:00
Iain Powrie
64082092b4 Merge branch 'refactor/big-bucket' into 'dev'
Refactor SBuilder to use Big Bucket.

See merge request crafty-controller/crafty-4!755
2024-05-10 20:46:05 +00:00
Analicia Abernathy
1503d0f346 Update en_EN.json 2024-05-10 12:52:07 -05:00
Analicia Abernathy
3f3ca8b791 translations updates for merge 755 2024-05-10 12:51:10 -05:00
--unset
06bd04f41b Remove un-needed images 2024-05-09 17:49:35 -04:00
--unset
030ae69327 Merge branch 'refactor/big-bucket' of gitlab.com:crafty-controller/crafty-4 into refactor/big-bucket 2024-05-09 17:36:59 -04:00
--unset
47a1e33030 Add startup message 2024-05-09 17:36:56 -04:00
Zedifus
db6a0c4b62 Update changelog !755 2024-05-09 21:24:18 +01:00
Zedifus
718e76a01d Merge branch 'dev' into refactor/big-bucket 2024-05-09 21:23:07 +01:00
Iain Powrie
5bc1d74d13 Merge branch 'bugfix/childschedule' into 'dev'
Fix child schedule failing to load after del parent

See merge request crafty-controller/crafty-4!753
2024-05-09 20:16:59 +00:00
Zedifus
8f16b57b48 Update changelog !753 2024-05-09 21:11:14 +01:00
Zedifus
4227fc8339 Merge branch 'dev' into bugfix/childschedule 2024-05-09 21:09:51 +01:00
Iain Powrie
28ac3d9915 Merge branch 'tweak/json-audit-log' into 'dev'
Set audit logging to logfile instead of DB

See merge request crafty-controller/crafty-4!751
2024-05-09 20:07:54 +00:00
Zedifus
367e77765c Update changelog !751 2024-05-09 21:04:34 +01:00
Zedifus
39a98f2b5e Merge branch 'dev' into tweak/json-audit-log 2024-05-09 21:03:26 +01:00
Iain Powrie
4842d5c505 Merge branch 'bugfix/login-query' into 'dev'
Reset query arguments on login if ?next is not available

See merge request crafty-controller/crafty-4!750
2024-05-09 20:01:28 +00:00
Zedifus
4cd3bf17ae Update changelog !750 2024-05-09 20:58:26 +01:00
Zedifus
6257ea9c43 Merge branch 'dev' into bugfix/login-query 2024-05-09 20:56:19 +01:00
Iain Powrie
4799aad38c Merge branch 'lang/czech-updates' into 'dev'
Update to Czech translations

See merge request crafty-controller/crafty-4!749
2024-05-09 19:54:47 +00:00
Zedifus
100872966d Update changelog !749 2024-05-09 20:51:19 +01:00
Zedifus
31f7d88c3a Merge branch 'dev' into lang/czech-updates 2024-05-09 20:48:42 +01:00
Iain Powrie
8496ac91ef Merge branch 'tweak/error-return' into 'dev'
Add link to go back to dashboard on error page

See merge request crafty-controller/crafty-4!743
2024-05-09 19:47:20 +00:00
Zedifus
27eeb2fa0f Update changelog !743 2024-05-09 20:43:56 +01:00
Zedifus
c8c843a551 Merge branch 'dev' into tweak/error-return 2024-05-09 20:42:25 +01:00
Iain Powrie
5c9f2c25d7 Merge branch 'refactor/api-key-su' into 'dev'
Refactor API keys "super user" to "full access"

See merge request crafty-controller/crafty-4!731
2024-05-09 19:40:57 +00:00
Zedifus
6bcdb21bb8 Update changelog !731 2024-05-09 20:30:38 +01:00
Zedifus
4a466fbf6c Merge branch 'dev' into refactor/api-key-su 2024-05-09 20:28:38 +01:00
Zedifus
7c016d337f Merge branch 'dev' into refactor/big-bucket 2024-05-09 20:26:54 +01:00
Iain Powrie
2d73382c1e Merge branch 'devops/re-tag-test-jobs-gl17' into 'dev'
Re-tag test jobs to new shared runner standard (GL17)

See merge request crafty-controller/crafty-4!756
2024-05-09 19:22:01 +00:00
Zedifus
1655a16ba0 Re-tag test jobs to new shared runner standard (GL17) 2024-05-09 20:11:46 +01:00
--unset
29fc1f6f1f Refactor for new schema 2024-05-07 20:57:19 -04:00
--unset
71bd26a572 Change frontend for new manifest 2024-05-07 19:34:44 -04:00
--unset
54bac87b8a Add comment 2024-05-05 10:13:24 -04:00
--unset
5e778e9fd7 Allow self-hosted repo for server builder jars 2024-05-05 10:12:10 -04:00
--unset
88cee9903d Remove console log statements.
Add translations for big bucket
2024-05-05 10:11:43 -04:00
--unset
5b58af2226 Refactor add category back. 2024-05-05 09:44:17 -04:00
amcmanu3
76424aba7f Setup forge installs for big bucket 2024-05-04 20:29:11 -04:00
amcmanu3
33f2693bee Refactor/remove paper references 2024-05-04 20:01:13 -04:00
amcmanu3
3c0df76c4a Reset front end values on server select change 2024-05-04 20:00:34 -04:00
--unset
ef39564c0a Refactor server creation for big bucket 2024-05-04 15:55:52 -04:00
--unset
7967962e3a Refactor backend for big bucket 2024-05-04 15:15:43 -04:00
Username404-59
46f27a35b6 Fix cpu_freq call on obscure systems 2024-05-02 13:17:28 +00:00
amcmanu3
eebf68a376 Front end loading of backup edit page 2024-04-21 11:26:16 -04:00
amcmanu3
f2e00040bd Make backups list page load 2024-04-20 18:15:06 -04:00
amcmanu3
3bba043cf0 Get backup configs 2024-04-20 18:14:45 -04:00
amcmanu3
b898595371 Refactor backup config methods to add/update 2024-04-20 17:30:37 -04:00
amcmanu3
d998d82de0 Fix code quaity issues 2024-04-20 17:04:30 -04:00
amcmanu3
2134c4226d Fix audit log raw 2024-04-20 16:54:33 -04:00
amcmanu3
3fac1a39a8 Setup drop table migrations 2024-04-20 16:21:06 -04:00
amcmanu3
038f275388 Restructure translation json 2024-04-20 16:08:16 -04:00
amcmanu3
37373733d9 Add loading splash screen 2024-04-20 16:07:58 -04:00
amcmanu3
be2e6f5e6c Update acttivity log page to use new format 2024-04-20 15:47:50 -04:00
amcmanu3
92dfd18dbd Refactor API for new activity log format 2024-04-20 15:47:34 -04:00
amcmanu3
5fe696e269 Merge branch 'dev' into refactor/backups 2024-04-19 16:09:31 -04:00
amcmanu3
a76a2722db Fix child schedule failing to load after del parent 2024-04-19 16:05:35 -04:00
--unset
227d642546 Set audit logging to logfile instead of DB 2024-04-17 18:03:10 -04:00
--unset
89b552a880 Redirect to /login if garbage is in query args 2024-04-16 10:26:32 -04:00
--unset
92bd7b80c8 Remove console log on login 2024-04-16 10:26:09 -04:00
Analicia Abernathy
6c28ef6327 Update cs_CS.json 2024-04-15 17:48:53 -05:00
Analicia Abernathy
2a601c9f31 update translations for refactor/api-key-su 2024-04-11 09:16:03 -05:00
Analicia Abernathy
ab18daebe7 Merge branch 'refactor/api-key-su' of gitlab.com:crafty-controller/crafty-4 into refactor/api-key-su 2024-04-10 18:19:11 -05:00
Analicia Abernathy
ed37387fbf updated translations for branch refactor/api-key-su 2024-04-10 18:18:44 -05:00
amcmanu3
400f42ba26 Merge branch 'dev' into refactor/api-key-su 2024-04-08 13:42:16 -04:00
Analicia Abernathy
ba39480c51 Merge branch 'tweak/error-return' of gitlab.com:crafty-controller/crafty-4 into tweak/error-return 2024-04-07 18:25:15 -05:00
Analicia Abernathy
36f2256273 Update languages for merge 743 2024-04-07 18:25:05 -05:00
Zedifus
0fbf14063c Fix CL 4.3.3
Duplicate head
2024-04-07 02:33:34 +01:00
Zedifus
8106966146 Prepare 4.3.3 release base 2024-04-07 01:58:29 +01:00
amcmanu3
5f00f9d6fa Add full access translation 2024-04-06 13:53:38 -04:00
amcmanu3
9bed7092d6 Check for API permission as well as user permission 2024-04-06 13:42:41 -04:00
amcmanu3
46970e1283 Make dash button more noticable 2024-04-06 11:39:06 -04:00
Zedifus
0c1b81faca Merge branch 'dev' into tweak/error-return 2024-04-03 00:56:50 +01:00
amcmanu3
39cfd40fad Add link to go back to dashboard on error page 2024-04-01 19:09:26 -04:00
amcmanu3
089b49c85e Disable crafty perms user does not have access to 2024-03-25 13:04:09 -04:00
amcmanu3
c13ac6a53c Merge branch 'dev' into refactor/api-key-su 2024-03-25 12:36:43 -04:00
amcmanu3
0a572fba92 Refactor API keys "super user" to "full access" 2024-03-17 12:50:11 -04:00
amcmanu3
a4da773d25 Remove print statements 2024-03-09 12:49:59 -05:00
amcmanu3
1381cf77ef Fix most translations 2024-03-09 12:49:51 -05:00
amcmanu3
99ccaa925f Add backups to page 2024-03-08 23:23:36 -05:00
amcmanu3
eec9432118 Update migration
Add backup return function
2024-03-08 23:22:54 -05:00
amcmanu3
1a8d351fbd Remove print statements 2024-03-08 22:45:08 -05:00
amcmanu3
548a439f14 Initial commit for backup migration.
Kind of broken :/
2024-03-08 22:06:33 -05:00
amcmanu3
5c8cca6fda Merge branch 'dev' into refactor/backups 2024-03-08 21:56:03 -05:00
amcmanu3
0c517868b4 Merge branch 'dev' into refactor/backups 2024-03-08 16:23:45 -05:00
amcmanu3
e59b624025 Move backup path from servers to backup
Add uuid field to backups
2024-02-08 20:25:32 -05:00
117 changed files with 6093 additions and 2979 deletions

View File

@ -5,7 +5,7 @@ yamllint:
stage: lint
image: registry.gitlab.com/pipeline-components/yamllint:latest
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@ -18,7 +18,7 @@ jsonlint:
stage: lint
image: registry.gitlab.com/pipeline-components/jsonlint:latest
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@ -33,7 +33,7 @@ black:
stage: lint
image: registry.gitlab.com/pipeline-components/black:latest
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@ -46,7 +46,7 @@ pylint:
stage: lint
image: registry.gitlab.com/pipeline-components/pylint:latest
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never
@ -69,7 +69,7 @@ sonarcloud-check:
name: sonarsource/sonar-scanner-cli:latest
entrypoint: [""]
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$SONAR_TOKEN == null"
when: never
@ -91,7 +91,7 @@ lang-check:
stage: lint
image: alpine:latest
tags:
- docker
- saas-linux-medium-amd64
rules:
- if: "$CODE_QUALITY_DISABLED"
when: never

View File

@ -56,8 +56,8 @@ get_keys "${DIR}/en_EN.json" | sort > "${ref_keys}"
# Iterate over each .json file in the directory
for file in "${DIR}"/*.json; do
# Check if file is a regular file and not en_EN.json, and does not contain "_incomplete" in its name
if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && ! "${file}" =~ _incomplete ]]; then
# Check if file is a regular file and not en_EN.json, humanized index and does not contain "_incomplete" in its name
if [[ -f "${file}" && "${file}" != "${DIR}/en_EN.json" && "${file}" != "${DIR}/humanized_index.json" && ! "${file}" =~ _incomplete ]]; then
# Get keys and subkeys from the current file
current_keys=$(mktemp)

View File

@ -44,6 +44,7 @@ def main():
if (
"_incomplete" not in file
and file != "en_EN.json"
and file != "humanized_index.json"
and file.endswith(".json")
):
file_path = os.path.join(root, file)

View File

@ -1,4 +1,74 @@
# Changelog
## --- [4.4.3] - 2024/08/08
### Bug fixes
- Fix schedules creation fail due to missing action ID ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/791))
<br><br>
## --- [4.4.2] - 2024/08/07
### Bug fixes
- Migrations | Fix exception message on file not found for backups migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/789))
- UploadAPI | Upload chunks in batches to avoid overloading browser cache ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/788))
<br><br>
## --- [4.4.1] - 2024/08/06
### Patch Fixes
- Migrations | Fix orphan backup configurations crashing migration operation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
- Migrations | Fix missing default configuration if no server backup config exists during the migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
- Migrations | Fix extended runtime on move procedure during migration ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/785))
**-----------------------------------------------------------------------------**
**Initial release was reverted for patching (See Merge Request: [!784](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/784))** *2024/07/28*
**-----------------------------------------------------------------------------**
### Refactor
- Backups | Allow multiple backup configurations ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/711))
- UploadAPI | Use Crafty's JWT authentication for file uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
- UploadAPI | Splice files on the frontend to allow chunked uploads as well as bulk uploads ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
- UploadAPI | Enhance upload progress feedback on all upload pages ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
- UploadAPI | Consolidate and improve speed on uploads, supporting 100mb+ uploads through Cloudflare(Free) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/762))
### Bug fixes
- Fix zip imports so the root dir selection is functional ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/764))
- Fix bug where full access gives minimal access ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/768))
- Bump tornado & requests for sec advisories ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/774))
- Ensure audit.log exists or create it on Crafty startup ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/771))
- Fix typing issue on ID comparison causing general users to not be able to delete their own API keys ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/775))
- Fix user creation bug where it would fail when a role was selected ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
- Security improvements for general user creations on roles page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
- Security improvements for general user creations on user page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
- Use UTC for tokens_valid_from in user config, to resolve token invalidation on instance TZ change ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/765))
- Remove unused and problematic "dropdown-menu" ident from [!722](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) CSS ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/782))
### Tweaks
- Add info note to default creds file ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/760))
- Remove navigation label from sidebar ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/766))
- Do not allow slashes in server names ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/767))
- Add a thread dump to support logs ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/769))
- Remove text from status page and use symbols ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/770))
- Add better feedback on when errors appear on user creation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
- Workaround cpu_freq call catching on obscure cpu architectures ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/776))
- Change Role selector in server wizard to be a filter list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772))
### Lang
- Show natural language name instead of country code in User Config Lang select list ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/773))
- Add remaining `he_IL`, `th_TH` translations from **4.4.0** Release ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/761) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/763))
- Fix `fr_FR` syntax issues ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/780) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/778))
- Add ru_RU Translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/779))
- Add `th_TH` translations for [!772](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/772) ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/781))
<br><br>
## --- [4.4.0] - 2024/05/11
### Refactor
- Refactor API keys "super user" to "full access" ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/731) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/759))
- Refactor SBuilder to use Big Bucket Svc ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/755))
### Bug fixes
- Reset query arguments on login if `?next` is not available ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/750))
- Fix child schedule failing to load after del parent ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/753))
### Tweaks
- Add link to go back to dashboard on error page ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/743))
- Set audit logging to logfile instead of DB ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/751))
### Lang
- Changes of phrase in `cs_CS` translation ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/749))
<br><br>
## --- [4.3.2] - 2024/04/07
### Refactor
- Refactor ServerJars caching and move to api.serverjars.com ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/744) | [Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/746))

View File

@ -1,5 +1,5 @@
[![Crafty Logo](app/frontend/static/assets/images/logo_long.svg)](https://craftycontrol.com)
# Crafty Controller 4.3.2
# Crafty Controller 4.4.3
> Python based Control Panel for your Minecraft Server
## What is Crafty Controller?

View File

@ -5,6 +5,7 @@ from prometheus_client import CollectorRegistry, Gauge
from app.classes.models.management import HelpersManagement, HelpersWebhooks
from app.classes.models.servers import HelperServers
from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@ -75,7 +76,7 @@ class ManagementController:
# Commands Methods
# **********************************************************************************
def send_command(self, user_id, server_id, remote_ip, command):
def send_command(self, user_id, server_id, remote_ip, command, action_id=None):
server_name = HelperServers.get_server_friendly_name(server_id)
# Example: Admin issued command start_server for server Survival
@ -86,7 +87,12 @@ class ManagementController:
remote_ip,
)
self.queue_command(
{"server_id": server_id, "user_id": user_id, "command": command}
{
"server_id": server_id,
"user_id": user_id,
"command": command,
"action_id": action_id,
}
)
def queue_command(self, command_data):
@ -95,9 +101,6 @@ class ManagementController:
# **********************************************************************************
# Audit_Log Methods
# **********************************************************************************
@staticmethod
def get_activity_log():
return HelpersManagement.get_activity_log()
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
return self.management_helper.add_to_audit_log(
@ -126,6 +129,7 @@ class ManagementController:
cron_string="* * * * *",
parent=None,
delay=0,
action_id=None,
):
return HelpersManagement.create_scheduled_task(
server_id,
@ -140,6 +144,7 @@ class ManagementController:
cron_string,
parent,
delay,
action_id,
)
@staticmethod
@ -178,34 +183,47 @@ class ManagementController:
# Backups Methods
# **********************************************************************************
@staticmethod
def get_backup_config(server_id):
return HelpersManagement.get_backup_config(server_id)
def get_backup_config(backup_id):
return HelpersManagement.get_backup_config(backup_id)
def set_backup_config(
self,
server_id: int,
backup_path: str = None,
max_backups: int = None,
excluded_dirs: list = None,
compress: bool = False,
shutdown: bool = False,
before: str = "",
after: str = "",
):
return self.management_helper.set_backup_config(
server_id,
backup_path,
max_backups,
excluded_dirs,
compress,
shutdown,
before,
after,
@staticmethod
def get_backups_by_server(server_id, model=False):
return HelpersManagement.get_backups_by_server(server_id, model)
@staticmethod
def delete_backup_config(backup_id):
HelpersManagement.remove_backup_config(backup_id)
@staticmethod
def update_backup_config(backup_id, updates):
if "backup_location" in updates:
updates["backup_location"] = Helpers.wtol_path(updates["backup_location"])
return HelpersManagement.update_backup_config(backup_id, updates)
def add_backup_config(self, data) -> str:
if "backup_location" in data:
data["backup_location"] = Helpers.wtol_path(data["backup_location"])
return self.management_helper.add_backup_config(data)
def add_default_backup_config(self, server_id, backup_path):
return self.management_helper.add_backup_config(
{
"backup_name": "Default Backup",
"backup_location": Helpers.wtol_path(backup_path),
"max_backups": 0,
"before": "",
"after": "",
"compress": False,
"shutdown": False,
"server_id": server_id,
"excluded_dirs": [],
"default": True,
}
)
@staticmethod
def get_excluded_backup_dirs(server_id: int):
return HelpersManagement.get_excluded_backup_dirs(server_id)
def get_excluded_backup_dirs(backup_id: int):
return HelpersManagement.get_excluded_backup_dirs(backup_id)
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
self.management_helper.add_excluded_backup_dir(server_id, dir_to_add)

View File

@ -17,6 +17,10 @@ class ServerPermsController:
def get_server_user_list(server_id):
return PermissionsServers.get_server_user_list(server_id)
@staticmethod
def get_permissions(permissions_mask):
return PermissionsServers.get_permissions(permissions_mask)
@staticmethod
def list_defined_permissions():
permissions_list = PermissionsServers.get_permissions_list()
@ -61,6 +65,22 @@ class ServerPermsController:
def get_permissions_mask(role_id, server_id):
return PermissionsServers.get_permissions_mask(role_id, server_id)
@staticmethod
def get_lowest_api_perm_mask(user_server_permissions_mask, api_key_permssions_mask):
mask = ""
# If this isn't an API key we'll know the request came from basic
# authentication and ignore the API key permissions mask.
if not api_key_permssions_mask:
return user_server_permissions_mask
for _index, (user_perm, api_perm) in enumerate(
zip(user_server_permissions_mask, api_key_permssions_mask)
):
if user_perm == "1" and api_perm == "1":
mask += "1"
else:
mask += "0"
return mask
@staticmethod
def set_permission(
permission_mask, permission_tested: EnumPermissionsServer, value
@ -82,6 +102,11 @@ class ServerPermsController:
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
return PermissionsServers.get_api_key_permissions_list(key, server_id)
@staticmethod
def get_user_permissions_mask(user_id: str, server_id: str):
user = HelperUsers.get_user_model(user_id)
return PermissionsServers.get_user_permissions_mask(user, server_id)
@staticmethod
def get_authorized_servers_stats_from_roles(user_id):
user_roles = HelperUsers.get_user_roles_id(user_id)

View File

@ -48,7 +48,6 @@ class ServersController(metaclass=Singleton):
name: str,
server_uuid: str,
server_dir: str,
backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@ -83,7 +82,6 @@ class ServersController(metaclass=Singleton):
server_uuid,
name,
server_dir,
backup_path,
server_command,
server_file,
server_log_file,
@ -148,8 +146,7 @@ class ServersController(metaclass=Singleton):
PermissionsServers.delete_roles_permissions(role_id, role_data["servers"])
# Remove roles from server
PermissionsServers.remove_roles_of_server(server_id)
# Remove backup configs tied to server
self.management_helper.remove_backup_config(server_id)
self.management_helper.remove_all_server_backups(server_id)
# Finally remove server
self.servers_helper.remove_server(server_id)

View File

@ -55,6 +55,7 @@ class UsersController:
"minLength": self.helper.minimum_password_length,
"examples": ["crafty"],
"title": "Password",
"error": "passLength",
},
"email": {
"type": "string",

View File

@ -0,0 +1,53 @@
import logging
import logging.config
import json
from datetime import datetime
class JsonEncoderStrFallback(json.JSONEncoder):
def default(self, o):
try:
return super().default(o)
except TypeError as exc:
if "not JSON serializable" in str(exc):
return str(o)
raise
class JsonEncoderDatetime(JsonEncoderStrFallback):
def default(self, o):
if isinstance(o, datetime):
return o.strftime("%Y-%m-%dT%H:%M:%S%z")
return super().default(o)
class JsonFormatter(logging.Formatter):
def formatTime(self, record, datefmt=None):
"""
Override formatTime to customize the time format.
"""
timestamp = datetime.fromtimestamp(record.created)
if datefmt:
# Use the specified date format
return timestamp.strftime(datefmt)
# Default date format: YYYY-MM-DD HH:MM:SS,mmm
secs = int(record.msecs)
return f"{timestamp.strftime('%Y-%m-%d %H:%M:%S')},{secs:03d}"
def format(self, record):
log_data = {
"level": record.levelname,
"time": self.formatTime(record),
"log_msg": record.getMessage(),
}
# Filter out standard log record attributes and include only custom ones
custom_attrs = ["user_name", "user_id", "server_id", "source_ip"]
extra_attrs = {
key: value for key, value in record.__dict__.items() if key in custom_attrs
}
# Merge extra attributes with log data
log_data.update(extra_attrs)
return json.dumps(log_data)

View File

@ -0,0 +1,236 @@
import os
import json
import threading
import time
import logging
from datetime import datetime
import requests
from app.classes.controllers.servers_controller import ServersController
from app.classes.models.server_permissions import PermissionsServers
from app.classes.shared.file_helpers import FileHelpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
# Temp type var until sjars restores generic fetchTypes0
class BigBucket:
def __init__(self, helper):
self.helper = helper
# remove any trailing slash from config.json
# url since we add it on all the calls
self.base_url = str(
self.helper.get_setting("big_bucket_repo", "https://jars.arcadiatech.org")
).rstrip("/")
def _read_cache(self) -> dict:
cache_file = self.helper.big_bucket_cache
cache = {}
try:
with open(cache_file, "r", encoding="utf-8") as f:
cache = json.load(f)
except Exception as e:
logger.error(f"Unable to read big_bucket cache file: {e}")
return cache
def get_bucket_data(self):
data = self._read_cache()
return data.get("categories")
def _check_bucket_alive(self) -> bool:
logger.info("Checking Big Bucket status")
check_url = f"{self.base_url}/healthcheck"
try:
response = requests.get(check_url, timeout=2)
response_json = response.json()
if (
response.status_code in [200, 201]
and response_json.get("status") == "ok"
):
logger.info("Big bucket is alive and responding as expected")
return True
except Exception as e:
logger.error(f"Unable to connect to big bucket due to error: {e}")
return False
logger.error(
"Big bucket manifest is not available as expected or unable to contact"
)
return False
def _get_big_bucket(self) -> dict:
logger.debug("Calling for big bucket manifest.")
try:
response = requests.get(f"{self.base_url}/manifest.json", timeout=5)
if response.status_code in [200, 201]:
data = response.json()
del data["manifest_version"]
return data
return {}
except TimeoutError as e:
logger.error(f"Unable to get jars from remote with error {e}")
return {}
def _refresh_cache(self):
"""
Contains the shared logic for refreshing the cache.
This method is called by both manual_refresh_cache and refresh_cache methods.
"""
if not self._check_bucket_alive():
logger.error("big bucket API is not available.")
return False
cache_data = {
"last_refreshed": datetime.now().strftime("%m/%d/%Y, %H:%M:%S"),
"categories": self._get_big_bucket(),
}
try:
with open(
self.helper.big_bucket_cache, "w", encoding="utf-8"
) as cache_file:
json.dump(cache_data, cache_file, indent=4)
logger.info("Cache file successfully refreshed manually.")
except Exception as e:
logger.error(f"Failed to update cache file manually: {e}")
def manual_refresh_cache(self):
"""
Manually triggers the cache refresh process.
"""
logger.info("Manual bucket cache refresh initiated.")
self._refresh_cache()
logger.info("Manual refresh completed.")
def refresh_cache(self):
"""
Automatically trigger cache refresh process based age.
This method checks if the cache file is older than a specified number of days
before deciding to refresh.
"""
cache_file_path = self.helper.big_bucket_cache
# Determine if the cache is old and needs refreshing
cache_old = self.helper.is_file_older_than_x_days(cache_file_path)
# debug override
# cache_old = True
if not self._check_bucket_alive():
logger.error("big bucket API is not available.")
return False
if not cache_old:
logger.info("Cache file is not old enough to require automatic refresh.")
return False
logger.info("Automatic cache refresh initiated due to old cache.")
self._refresh_cache()
def get_fetch_url(self, jar, server, version) -> str:
"""
Constructs the URL for downloading a server JAR file based on the server type.
Parameters:
jar (str): The category of the JAR file to download.
server (str): Server software name (e.g., "paper").
version (str): Server version.
Returns:
str or None: URL for downloading the JAR file, or None if URL cannot be
constructed or an error occurs.
"""
try:
# Read cache file for URL that is in a list of one item
return self.get_bucket_data()[jar]["types"][server]["versions"][version][
"url"
][0]
except Exception as e:
logger.error(f"An error occurred while constructing fetch URL: {e}")
return None
def download_jar(self, jar, server, version, path, server_id):
update_thread = threading.Thread(
name=f"server_download-{server_id}-{server}-{version}",
target=self.a_download_jar,
daemon=True,
args=(jar, server, version, path, server_id),
)
update_thread.start()
def a_download_jar(self, jar, server, version, path, server_id):
"""
Downloads a server JAR file and performs post-download actions including
notifying users and setting import status.
This method waits for the server registration to complete, retrieves the
download URL for the specified server JAR file.
Upon successful download, it either runs the installer for
Forge servers or simply finishes the import process for other types. It
notifies server users about the completion of the download.
Parameters:
- jar (str): The category of the JAR file to download.
- server (str): The type of server software (e.g., 'forge', 'paper').
- version (str): The version of the server software.
- path (str): The local filesystem path where the JAR file will be saved.
- server_id (str): The unique identifier for the server being updated or
imported, used for notifying users and setting the import status.
Returns:
- bool: True if the JAR file was successfully downloaded and saved;
False otherwise.
The method ensures that the server is properly registered before proceeding
with the download and handles exceptions by logging errors and reverting
the import status if necessary.
"""
# delaying download for server register to finish
time.sleep(3)
fetch_url = self.get_fetch_url(jar, server, version)
if not fetch_url:
return False
server_users = PermissionsServers.get_server_user_list(server_id)
# Make sure the server is registered before updating its stats
while True:
try:
ServersController.set_import(server_id)
for user in server_users:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
break
except Exception as ex:
logger.debug(f"Server not registered yet. Delaying download - {ex}")
# Initiate Download
jar_dir = os.path.dirname(path)
jar_name = os.path.basename(path)
logger.info(fetch_url)
success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
# Post-download actions
if success:
if server == "forge-installer":
# If this is the newer Forge version, run the installer
ServersController.finish_import(server_id, True)
else:
ServersController.finish_import(server_id)
# Notify users
for user in server_users:
WebSocketManager().broadcast_user(
user, "notification", "Executable download finished"
)
time.sleep(3) # Delay for user notification
WebSocketManager().broadcast_user(user, "send_start_reload", {})
else:
logger.error(f"Unable to save jar to {path} due to download failure.")
ServersController.finish_import(server_id)
return success

View File

@ -1,395 +0,0 @@
import os
import json
import threading
import time
import logging
from datetime import datetime
import requests
from app.classes.controllers.servers_controller import ServersController
from app.classes.models.server_permissions import PermissionsServers
from app.classes.shared.file_helpers import FileHelpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
# Temp type var until sjars restores generic fetchTypes0
SERVERJARS_TYPES = ["modded", "proxies", "servers", "vanilla"]
PAPERJARS = ["paper", "folia"]
class ServerJars:
def __init__(self, helper):
self.helper = helper
self.base_url = "https://api.serverjars.com"
self.paper_base = "https://api.papermc.io"
@staticmethod
def get_paper_jars():
return PAPERJARS
def get_paper_versions(self, project):
"""
Retrieves a list of versions for a specified project from the PaperMC API.
Parameters:
project (str): The project name to query for available versions.
Returns:
list: A list of version strings available for the project. Returns an empty
list if the API call fails or if no versions are found.
This function makes a GET request to the PaperMC API to fetch available project
versions, The versions are returned in reverse order, with the most recent
version first.
"""
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/", timeout=2
)
response.raise_for_status()
api_data = response.json()
except Exception as e:
logger.error(f"Error loading project versions for {project}: {e}")
return []
versions = api_data.get("versions", [])
versions.reverse() # Ensure the most recent version comes first
return versions
def get_paper_build(self, project, version):
"""
Fetches the latest build for a specified project and version from PaperMC API.
Parameters:
project (str): Project name, typically a server software like 'paper'.
version (str): Project version to fetch the build number for.
Returns:
int or None: Latest build number if successful, None if not or on error.
This method attempts to query the PaperMC API for the latest build and
handles exceptions by logging errors and returning None.
"""
try:
response = requests.get(
f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
timeout=2,
)
response.raise_for_status()
api_data = response.json()
except Exception as e:
logger.error(f"Error fetching build for {project} {version}: {e}")
return None
builds = api_data.get("builds", [])
return builds[-1] if builds else None
def _read_cache(self):
cache_file = self.helper.serverjar_cache
cache = {}
try:
with open(cache_file, "r", encoding="utf-8") as f:
cache = json.load(f)
except Exception as e:
logger.error(f"Unable to read serverjars.com cache file: {e}")
return cache
def get_serverjar_data(self):
data = self._read_cache()
return data.get("types")
def _check_sjars_api_alive(self):
logger.info("Checking serverjars.com API status")
check_url = f"{self.base_url}"
try:
response = requests.get(check_url, timeout=2)
response_json = response.json()
if (
response.status_code in [200, 201]
and response_json.get("status") == "success"
and response_json.get("response", {}).get("status") == "ok"
):
logger.info("Serverjars.com API is alive and responding as expected")
return True
except Exception as e:
logger.error(f"Unable to connect to serverjar.com API due to error: {e}")
return False
logger.error(
"Serverjars.com API is not responding as expected or unable to contact"
)
return False
def _fetch_projects_for_type(self, server_type):
"""
Fetches projects for a given server type from the ServerJars API.
"""
try:
response = requests.get(
f"{self.base_url}/api/fetchTypes/{server_type}", timeout=5
)
response.raise_for_status() # Ensure HTTP errors are caught
data = response.json()
if data.get("status") == "success":
return data["response"].get("servers", [])
except requests.RequestException as e:
print(f"Error fetching projects for type {server_type}: {e}")
return []
def _get_server_type_list(self):
"""
Builds the type structure with projects fetched for each type.
"""
type_structure = {}
for server_type in SERVERJARS_TYPES:
projects = self._fetch_projects_for_type(server_type)
type_structure[server_type] = {project: [] for project in projects}
return type_structure
def _get_jar_versions(self, server_type, project_name, max_ver=50):
"""
Grabs available versions for specified project
Args:
server_type (str): Server Type Category (modded, servers, etc)
project_name (str): Target project (paper, forge, magma, etc)
max (int, optional): Max versions returned. Defaults to 50.
Returns:
list: An array of versions
"""
url = f"{self.base_url}/api/fetchAll/{server_type}/{project_name}?max={max_ver}"
try:
response = requests.get(url, timeout=5)
response.raise_for_status() # Ensure HTTP errors are caught
data = response.json()
logger.debug(f"Received data for {server_type}/{project_name}: {data}")
if data.get("status") == "success":
versions = [
item.get("version")
for item in data.get("response", [])
if "version" in item
]
versions.reverse() # Reverse so versions are newest -> oldest
logger.debug(f"Versions extracted: {versions}")
return versions
except requests.RequestException as e:
logger.error(
f"Error fetching jar versions for {server_type}/{project_name}: {e}"
)
return []
def _refresh_cache(self):
"""
Contains the shared logic for refreshing the cache.
This method is called by both manual_refresh_cache and refresh_cache methods.
"""
now = datetime.now()
cache_data = {
"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
"types": self._get_server_type_list(),
}
for server_type, projects in cache_data["types"].items():
for project_name in projects:
versions = self._get_jar_versions(server_type, project_name)
cache_data["types"][server_type][project_name] = versions
for paper_project in PAPERJARS:
cache_data["types"]["servers"][paper_project] = self.get_paper_versions(
paper_project
)
return cache_data
def manual_refresh_cache(self):
"""
Manually triggers the cache refresh process.
"""
if not self._check_sjars_api_alive():
logger.error("ServerJars API is not available.")
return False
logger.info("Manual cache refresh requested.")
cache_data = self._refresh_cache()
# Save the updated cache data
try:
with open(self.helper.serverjar_cache, "w", encoding="utf-8") as cache_file:
json.dump(cache_data, cache_file, indent=4)
logger.info("Cache file successfully refreshed manually.")
except Exception as e:
logger.error(f"Failed to update cache file manually: {e}")
def refresh_cache(self):
"""
Automatically trigger cache refresh process based age.
This method checks if the cache file is older than a specified number of days
before deciding to refresh.
"""
cache_file_path = self.helper.serverjar_cache
# Determine if the cache is old and needs refreshing
cache_old = self.helper.is_file_older_than_x_days(cache_file_path)
# debug override
# cache_old = True
if not self._check_sjars_api_alive():
logger.error("ServerJars API is not available.")
return False
if not cache_old:
logger.info("Cache file is not old enough to require automatic refresh.")
return False
logger.info("Automatic cache refresh initiated due to old cache.")
cache_data = self._refresh_cache()
# Save the updated cache data
try:
with open(cache_file_path, "w", encoding="utf-8") as cache_file:
json.dump(cache_data, cache_file, indent=4)
logger.info("Cache file successfully refreshed automatically.")
except Exception as e:
logger.error(f"Failed to update cache file automatically: {e}")
def get_fetch_url(self, jar, server, version):
"""
Constructs the URL for downloading a server JAR file based on the server type.
Supports two main types of server JAR sources:
- ServerJars API for servers not in PAPERJARS.
- Paper API for servers available through the Paper project.
Parameters:
jar (str): Name of the JAR file.
server (str): Server software name (e.g., "paper").
version (str): Server version.
Returns:
str or None: URL for downloading the JAR file, or None if URL cannot be
constructed or an error occurs.
"""
try:
# Check if the server type is not specifically handled by Paper.
if server not in PAPERJARS:
return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
# For Paper servers, attempt to get the build for the specified version.
paper_build_info = self.get_paper_build(server, version)
if paper_build_info is None:
# Log an error or handle the case where paper_build_info is None
logger.error(
"Error: Unable to get build information for server:"
f" {server}, version: {version}"
)
return None
build = paper_build_info.get("build")
if not build:
# Log an error or handle the case where build is None or not found
logger.error(
f"Error: Build number not found for server:"
f" {server}, version: {version}"
)
return None
# Construct and return the URL for downloading the Paper server JAR.
return (
f"{self.paper_base}/v2/projects/{server}/versions/{version}/"
f"builds/{build}/downloads/{server}-{version}-{build}.jar"
)
except Exception as e:
logger.error(f"An error occurred while constructing fetch URL: {e}")
return None
def download_jar(self, jar, server, version, path, server_id):
update_thread = threading.Thread(
name=f"server_download-{server_id}-{server}-{version}",
target=self.a_download_jar,
daemon=True,
args=(jar, server, version, path, server_id),
)
update_thread.start()
def a_download_jar(self, jar, server, version, path, server_id):
"""
Downloads a server JAR file and performs post-download actions including
notifying users and setting import status.
This method waits for the server registration to complete, retrieves the
download URL for the specified server JAR file.
Upon successful download, it either runs the installer for
Forge servers or simply finishes the import process for other types. It
notifies server users about the completion of the download.
Parameters:
- jar (str): The name of the JAR file to download.
- server (str): The type of server software (e.g., 'forge', 'paper').
- version (str): The version of the server software.
- path (str): The local filesystem path where the JAR file will be saved.
- server_id (str): The unique identifier for the server being updated or
imported, used for notifying users and setting the import status.
Returns:
- bool: True if the JAR file was successfully downloaded and saved;
False otherwise.
The method ensures that the server is properly registered before proceeding
with the download and handles exceptions by logging errors and reverting
the import status if necessary.
"""
# delaying download for server register to finish
time.sleep(3)
fetch_url = self.get_fetch_url(jar, server, version)
if not fetch_url:
return False
server_users = PermissionsServers.get_server_user_list(server_id)
# Make sure the server is registered before updating its stats
while True:
try:
ServersController.set_import(server_id)
for user in server_users:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
break
except Exception as ex:
logger.debug(f"Server not registered yet. Delaying download - {ex}")
# Initiate Download
jar_dir = os.path.dirname(path)
jar_name = os.path.basename(path)
logger.info(fetch_url)
success = FileHelpers.ssl_get_file(fetch_url, jar_dir, jar_name)
# Post-download actions
if success:
if server == "forge":
# If this is the newer Forge version, run the installer
ServersController.finish_import(server_id, True)
else:
ServersController.finish_import(server_id)
# Notify users
for user in server_users:
WebSocketManager().broadcast_user(
user, "notification", "Executable download finished"
)
time.sleep(3) # Delay for user notification
WebSocketManager().broadcast_user(user, "send_start_reload", {})
else:
logger.error(f"Unable to save jar to {path} due to download failure.")
ServersController.finish_import(server_id)
return success

View File

@ -86,7 +86,7 @@ class Stats:
def get_node_stats(self) -> NodeStatsReturnDict:
try:
cpu_freq = psutil.cpu_freq()
except (NotImplementedError, FileNotFoundError):
except (NotImplementedError, AttributeError, FileNotFoundError):
cpu_freq = None
if cpu_freq is None:
cpu_freq = psutil._common.scpufreq(current=-1, min=-1, max=-1)

View File

@ -187,7 +187,7 @@ class PermissionsCrafty:
@staticmethod
def get_api_key_permissions_list(key: ApiKeys):
user = HelperUsers.get_user(key.user_id)
if user["superuser"] and key.superuser:
if user["superuser"] and key.full_access:
return PermissionsCrafty.get_permissions_list()
if user["superuser"]:
# User is superuser but API key isn't

View File

@ -16,28 +16,11 @@ from app.classes.models.base_model import BaseModel
from app.classes.models.users import HelperUsers
from app.classes.models.servers import Servers
from app.classes.models.server_permissions import PermissionsServers
from app.classes.shared.main_models import DatabaseShortcuts
from app.classes.shared.helpers import Helpers
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
# **********************************************************************************
# Audit_Log Class
# **********************************************************************************
class AuditLog(BaseModel):
audit_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
user_name = CharField(default="")
user_id = IntegerField(default=0, index=True)
source_ip = CharField(default="127.0.0.1")
server_id = ForeignKeyField(
Servers, backref="audit_server", null=True
) # When auditing global events, use server ID null
log_msg = TextField(default="")
class Meta:
table_name = "audit_log"
auth_logger = logging.getLogger("audit_log")
# **********************************************************************************
@ -105,6 +88,7 @@ class Schedules(BaseModel):
interval_type = CharField()
start_time = CharField(null=True)
command = CharField(null=True)
action_id = CharField(null=True)
name = CharField()
one_time = BooleanField(default=False)
cron_string = CharField(default="")
@ -120,13 +104,19 @@ class Schedules(BaseModel):
# Backups Class
# **********************************************************************************
class Backups(BaseModel):
backup_id = CharField(primary_key=True, default=Helpers.create_uuid)
backup_name = CharField(default="New Backup")
backup_location = CharField(default="")
excluded_dirs = CharField(null=True)
max_backups = IntegerField()
max_backups = IntegerField(default=0)
server_id = ForeignKeyField(Servers, backref="backups_server")
compress = BooleanField(default=False)
shutdown = BooleanField(default=False)
before = CharField(default="")
after = CharField(default="")
default = BooleanField(default=False)
status = CharField(default='{"status": "Standby", "message": ""}')
enabled = BooleanField(default=True)
class Meta:
table_name = "backups"
@ -149,10 +139,6 @@ class HelpersManagement:
# **********************************************************************************
# Audit_Log Methods
# **********************************************************************************
@staticmethod
def get_activity_log():
query = AuditLog.select()
return DatabaseShortcuts.return_db_rows(query)
def add_to_audit_log(self, user_id, log_msg, server_id=None, source_ip=None):
logger.debug(f"Adding to audit log User:{user_id} - Message: {log_msg} ")
@ -166,50 +152,28 @@ class HelpersManagement:
WebSocketManager().broadcast_user(user, "notification", audit_msg)
except Exception as e:
logger.error(f"Error broadcasting to user {user} - {e}")
AuditLog.insert(
{
AuditLog.user_name: user_data["username"],
AuditLog.user_id: user_id,
AuditLog.server_id: server_id,
AuditLog.log_msg: audit_msg,
AuditLog.source_ip: source_ip,
}
).execute()
# deletes records when there's more than 300
ordered = AuditLog.select().order_by(+AuditLog.created)
for item in ordered:
if not self.helper.get_setting("max_audit_entries"):
max_entries = 300
else:
max_entries = self.helper.get_setting("max_audit_entries")
if AuditLog.select().count() > max_entries:
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
else:
return
auth_logger.info(
str(log_msg),
extra={
"user_name": user_data["username"],
"user_id": user_id,
"server_id": server_id,
"source_ip": source_ip,
},
)
def add_to_audit_log_raw(self, user_name, user_id, server_id, log_msg, source_ip):
AuditLog.insert(
{
AuditLog.user_name: user_name,
AuditLog.user_id: user_id,
AuditLog.server_id: server_id,
AuditLog.log_msg: log_msg,
AuditLog.source_ip: source_ip,
}
).execute()
# deletes records when there's more than 300
ordered = AuditLog.select().order_by(+AuditLog.created)
for item in ordered:
# configurable through app/config/config.json
if not self.helper.get_setting("max_audit_entries"):
max_entries = 300
else:
max_entries = self.helper.get_setting("max_audit_entries")
if AuditLog.select().count() > max_entries:
AuditLog.delete().where(AuditLog.audit_id == item.audit_id).execute()
else:
return
if isinstance(server_id, Servers) and server_id is not None:
server_id = server_id.server_id
auth_logger.info(
str(log_msg),
extra={
"user_name": user_name,
"user_id": user_id,
"server_id": server_id,
"source_ip": source_ip,
},
)
@staticmethod
def create_crafty_row():
@ -307,6 +271,7 @@ class HelpersManagement:
cron_string="* * * * *",
parent=None,
delay=0,
action_id=None,
):
sch_id = Schedules.insert(
{
@ -317,6 +282,7 @@ class HelpersManagement:
Schedules.interval_type: interval_type,
Schedules.start_time: start_time,
Schedules.command: command,
Schedules.action_id: action_id,
Schedules.name: name,
Schedules.one_time: one_time,
Schedules.cron_string: cron_string,
@ -379,133 +345,83 @@ class HelpersManagement:
# Backups Methods
# **********************************************************************************
@staticmethod
def get_backup_config(server_id):
try:
row = (
Backups.select().where(Backups.server_id == server_id).join(Servers)[0]
)
conf = {
"backup_path": row.server_id.backup_path,
"excluded_dirs": row.excluded_dirs,
"max_backups": row.max_backups,
"server_id": row.server_id_id,
"compress": row.compress,
"shutdown": row.shutdown,
"before": row.before,
"after": row.after,
}
except IndexError:
conf = {
"backup_path": None,
"excluded_dirs": None,
"max_backups": 0,
"server_id": server_id,
"compress": False,
"shutdown": False,
"before": "",
"after": "",
}
return conf
def get_backup_config(backup_id):
return model_to_dict(Backups.get(Backups.backup_id == backup_id))
@staticmethod
def remove_backup_config(server_id):
def get_backups_by_server(server_id, model=False):
if not model:
data = {}
for backup in (
Backups.select().where(Backups.server_id == server_id).execute()
):
data[str(backup.backup_id)] = {
"backup_id": backup.backup_id,
"backup_name": backup.backup_name,
"backup_location": backup.backup_location,
"excluded_dirs": backup.excluded_dirs,
"max_backups": backup.max_backups,
"server_id": backup.server_id_id,
"compress": backup.compress,
"shutdown": backup.shutdown,
"before": backup.before,
"after": backup.after,
"default": backup.default,
"enabled": backup.enabled,
}
else:
data = Backups.select().where(Backups.server_id == server_id).execute()
return data
@staticmethod
def get_default_server_backup(server_id: str) -> dict:
print(server_id)
bu_query = Backups.select().where(
Backups.server_id == server_id,
Backups.default == True, # pylint: disable=singleton-comparison
)
for item in bu_query:
print("HI", item)
backup_model = bu_query.first()
if backup_model:
return model_to_dict(backup_model)
raise IndexError
@staticmethod
def remove_all_server_backups(server_id):
Backups.delete().where(Backups.server_id == server_id).execute()
def set_backup_config(
self,
server_id: int,
backup_path: str = None,
max_backups: int = None,
excluded_dirs: list = None,
compress: bool = False,
shutdown: bool = False,
before: str = "",
after: str = "",
):
logger.debug(f"Updating server {server_id} backup config with {locals()}")
if Backups.select().where(Backups.server_id == server_id).exists():
new_row = False
conf = {}
else:
conf = {
"excluded_dirs": None,
"max_backups": 0,
"server_id": server_id,
"compress": False,
"shutdown": False,
"before": "",
"after": "",
}
new_row = True
if max_backups is not None:
conf["max_backups"] = max_backups
if excluded_dirs is not None:
dirs_to_exclude = ",".join(excluded_dirs)
@staticmethod
def remove_backup_config(backup_id):
Backups.delete().where(Backups.backup_id == backup_id).execute()
def add_backup_config(self, conf) -> str:
if "excluded_dirs" in conf:
dirs_to_exclude = ",".join(conf["excluded_dirs"])
conf["excluded_dirs"] = dirs_to_exclude
conf["compress"] = compress
conf["shutdown"] = shutdown
conf["before"] = before
conf["after"] = after
if not new_row:
with self.database.atomic():
if backup_path is not None:
server_rows = (
Servers.update(backup_path=backup_path)
.where(Servers.server_id == server_id)
.execute()
)
else:
server_rows = 0
backup_rows = (
Backups.update(conf).where(Backups.server_id == server_id).execute()
)
logger.debug(
f"Updating existing backup record. "
f"{server_rows}+{backup_rows} rows affected"
)
else:
with self.database.atomic():
conf["server_id"] = server_id
if backup_path is not None:
Servers.update(backup_path=backup_path).where(
Servers.server_id == server_id
)
Backups.create(**conf)
logger.debug("Creating new backup record.")
if len(self.get_backups_by_server(conf["server_id"], True)) <= 0:
conf["default"] = True
backup = Backups.create(**conf)
logger.debug("Creating new backup record.")
return backup.backup_id
@staticmethod
def get_excluded_backup_dirs(server_id: int):
excluded_dirs = HelpersManagement.get_backup_config(server_id)["excluded_dirs"]
def update_backup_config(backup_id, data):
if "excluded_dirs" in data:
dirs_to_exclude = ",".join(data["excluded_dirs"])
data["excluded_dirs"] = dirs_to_exclude
Backups.update(**data).where(Backups.backup_id == backup_id).execute()
@staticmethod
def get_excluded_backup_dirs(backup_id: int):
excluded_dirs = HelpersManagement.get_backup_config(backup_id)["excluded_dirs"]
if excluded_dirs is not None and excluded_dirs != "":
dir_list = excluded_dirs.split(",")
else:
dir_list = []
return dir_list
def add_excluded_backup_dir(self, server_id: int, dir_to_add: str):
dir_list = self.get_excluded_backup_dirs(server_id)
if dir_to_add not in dir_list:
dir_list.append(dir_to_add)
excluded_dirs = ",".join(dir_list)
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
else:
logger.debug(
f"Not adding {dir_to_add} to excluded directories - "
f"already in the excluded directory list for server ID {server_id}"
)
def del_excluded_backup_dir(self, server_id: int, dir_to_del: str):
dir_list = self.get_excluded_backup_dirs(server_id)
if dir_to_del in dir_list:
dir_list.remove(dir_to_del)
excluded_dirs = ",".join(dir_list)
self.set_backup_config(server_id=server_id, excluded_dirs=excluded_dirs)
else:
logger.debug(
f"Not removing {dir_to_del} from excluded directories - "
f"not in the excluded directory list for server ID {server_id}"
)
# **********************************************************************************
# Webhooks Class

View File

@ -264,7 +264,7 @@ class PermissionsServers:
@staticmethod
def get_api_key_permissions_list(key: ApiKeys, server_id: str):
user = HelperUsers.get_user(key.user_id)
if user["superuser"] and key.superuser:
if user["superuser"] and key.full_access:
return PermissionsServers.get_permissions_list()
roles_list = HelperUsers.get_user_roles_id(user["user_id"])
role_server = (

View File

@ -26,7 +26,6 @@ class Servers(BaseModel):
created = DateTimeField(default=datetime.datetime.now)
server_name = CharField(default="Server", index=True)
path = CharField(default="")
backup_path = CharField(default="")
executable = CharField(default="")
log_path = CharField(default="")
execution_command = CharField(default="")
@ -65,7 +64,6 @@ class HelperServers:
server_id: str,
name: str,
server_dir: str,
backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@ -81,7 +79,6 @@ class HelperServers:
name: The name of the server
server_uuid: This is the UUID of the server
server_dir: The directory where the server is located
backup_path: The path to the backup folder
server_command: The command to start the server
server_file: The name of the server file
server_log_file: The path to the server log file
@ -111,7 +108,6 @@ class HelperServers:
server_port=server_port,
server_ip=server_host,
stop_command=server_stop,
backup_path=backup_path,
type=server_type,
created_by=created_by,
).server_id

View File

@ -38,7 +38,7 @@ class Users(BaseModel):
superuser = BooleanField(default=False)
lang = CharField(default="en_EN")
support_logs = CharField(default="")
valid_tokens_from = DateTimeField(default=datetime.datetime.now)
valid_tokens_from = DateTimeField(default=Helpers.get_utc_now)
server_order = CharField(default="")
preparing = BooleanField(default=False)
hints = BooleanField(default=True)
@ -71,7 +71,7 @@ class ApiKeys(BaseModel):
user_id = ForeignKeyField(Users, backref="api_token", index=True)
server_permissions = CharField(default="00000000")
crafty_permissions = CharField(default="000")
superuser = BooleanField(default=False)
full_access = BooleanField(default=False)
class Meta:
table_name = "api_keys"
@ -119,7 +119,6 @@ class HelperUsers:
@staticmethod
def get_user_total():
count = Users.select().where(Users.username != "system").count()
print(count)
return count
@staticmethod
@ -408,7 +407,7 @@ class HelperUsers:
def add_user_api_key(
name: str,
user_id: str,
superuser: bool = False,
full_access: bool = False,
server_permissions_mask: t.Optional[str] = None,
crafty_permissions_mask: t.Optional[str] = None,
):
@ -426,7 +425,7 @@ class HelperUsers:
if crafty_permissions_mask is not None
else {}
),
ApiKeys.superuser: superuser,
ApiKeys.full_access: full_access,
}
).execute()

View File

@ -1,5 +1,6 @@
import logging
import time
from datetime import datetime
from typing import Optional, Dict, Any, Tuple
import jwt
from jwt import PyJWTError
@ -62,7 +63,17 @@ class Authentication:
user = HelperUsers.get_user(user_id)
# TODO: Have a cache or something so we don't constantly
# have to query the database
if int(user.get("valid_tokens_from").timestamp()) < iat:
valid_tokens_from_str = user.get("valid_tokens_from")
# It's possible this will be a string or a dt coming from the DB
# We need to account for that
try:
valid_tokens_from_dt = datetime.strptime(
valid_tokens_from_str, "%Y-%m-%d %H:%M:%S.%f%z"
)
except TypeError:
valid_tokens_from_dt = valid_tokens_from_str
# Convert the string to a datetime object
if int(valid_tokens_from_dt.timestamp()) < iat:
# Success!
return key, data, user
return None

View File

@ -4,7 +4,10 @@ import logging
import pathlib
import tempfile
import zipfile
from zipfile import ZipFile, ZIP_DEFLATED
import hashlib
from typing import BinaryIO
import mimetypes
from zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
import urllib.request
import ssl
import time
@ -22,6 +25,7 @@ class FileHelpers:
def __init__(self, helper):
self.helper: Helpers = helper
self.mime_types = mimetypes.MimeTypes()
@staticmethod
def ssl_get_file(
@ -142,6 +146,32 @@ class FileHelpers:
logger.error(f"Path specified is not a file or does not exist. {path}")
return e
def check_mime_types(self, file_path):
m_type, _value = self.mime_types.guess_type(file_path)
return m_type
@staticmethod
def calculate_file_hash(file_path: str) -> str:
"""
Takes one parameter of file path.
It will generate a SHA256 hash for the path and return it.
"""
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
@staticmethod
def calculate_buffer_hash(buffer: BinaryIO) -> str:
"""
Takes one argument of a stream buffer. Will return a
sha256 hash of the buffer
"""
sha256_hash = hashlib.sha256()
sha256_hash.update(buffer)
return sha256_hash.hexdigest()
@staticmethod
def copy_dir(src_path, dest_path, dirs_exist_ok=False):
# pylint: disable=unexpected-keyword-arg
@ -153,8 +183,7 @@ class FileHelpers:
@staticmethod
def move_dir(src_path, dest_path):
FileHelpers.copy_dir(src_path, dest_path)
FileHelpers.del_dirs(src_path)
shutil.move(src_path, dest_path)
@staticmethod
def move_dir_exist(src_path, dest_path):
@ -163,8 +192,7 @@ class FileHelpers:
@staticmethod
def move_file(src_path, dest_path):
FileHelpers.copy_file(src_path, dest_path)
FileHelpers.del_file(src_path)
shutil.move(src_path, dest_path)
@staticmethod
def make_archive(path_to_destination, path_to_zip, comment=""):
@ -229,74 +257,15 @@ class FileHelpers:
return True
def make_compressed_backup(
self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
):
# create a ZipFile object
path_to_destination += ".zip"
ex_replace = [p.replace("\\", "/") for p in excluded_dirs]
total_bytes = 0
dir_bytes = Helpers.get_dir_size(path_to_zip)
results = {
"percent": 0,
"total_files": self.helper.human_readable_file_size(dir_bytes),
}
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
{"id": str(server_id)},
"backup_status",
results,
)
with ZipFile(path_to_destination, "w", ZIP_DEFLATED) as zip_file:
zip_file.comment = bytes(
comment, "utf-8"
) # comments over 65535 bytes will be truncated
for root, dirs, files in os.walk(path_to_zip, topdown=True):
for l_dir in dirs:
if str(os.path.join(root, l_dir)).replace("\\", "/") in ex_replace:
dirs.remove(l_dir)
ziproot = path_to_zip
for file in files:
if (
str(os.path.join(root, file)).replace("\\", "/")
not in ex_replace
and file != "crafty.sqlite"
):
try:
logger.info(f"backing up: {os.path.join(root, file)}")
if os.name == "nt":
zip_file.write(
os.path.join(root, file),
os.path.join(root.replace(ziproot, ""), file),
)
else:
zip_file.write(
os.path.join(root, file),
os.path.join(root.replace(ziproot, "/"), file),
)
except Exception as e:
logger.warning(
f"Error backing up: {os.path.join(root, file)}!"
f" - Error was: {e}"
)
total_bytes += os.path.getsize(os.path.join(root, file))
percent = round((total_bytes / dir_bytes) * 100, 2)
results = {
"percent": percent,
"total_files": self.helper.human_readable_file_size(dir_bytes),
}
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
{"id": str(server_id)},
"backup_status",
results,
)
return True
def make_backup(
self, path_to_destination, path_to_zip, excluded_dirs, server_id, comment=""
self,
path_to_destination,
path_to_zip,
excluded_dirs,
server_id,
backup_id,
comment="",
compressed=None,
):
# create a ZipFile object
path_to_destination += ".zip"
@ -313,7 +282,15 @@ class FileHelpers:
"backup_status",
results,
)
with ZipFile(path_to_destination, "w") as zip_file:
WebSocketManager().broadcast_page_params(
"/panel/edit_backup",
{"id": str(server_id)},
"backup_status",
results,
)
# Set the compression mode based on the `compressed` parameter
compression_mode = ZIP_DEFLATED if compressed else ZIP_STORED
with ZipFile(path_to_destination, "w", compression_mode) as zip_file:
zip_file.comment = bytes(
comment, "utf-8"
) # comments over 65535 bytes will be truncated
@ -364,6 +341,7 @@ class FileHelpers:
results = {
"percent": percent,
"total_files": self.helper.human_readable_file_size(dir_bytes),
"backup_id": backup_id,
}
# send status results to page.
WebSocketManager().broadcast_page_params(
@ -372,6 +350,12 @@ class FileHelpers:
"backup_status",
results,
)
WebSocketManager().broadcast_page_params(
"/panel/edit_backup",
{"id": str(server_id)},
"backup_status",
results,
)
return True
@staticmethod

View File

@ -19,7 +19,7 @@ import shutil
import shlex
import subprocess
import itertools
from datetime import datetime
from datetime import datetime, timezone
from socket import gethostname
from contextlib import redirect_stderr, suppress
import libgravatar
@ -72,7 +72,7 @@ class Helpers:
self.db_path = os.path.join(
self.root_dir, "app", "config", "db", "crafty.sqlite"
)
self.serverjar_cache = os.path.join(self.config_dir, "serverjars.json")
self.big_bucket_cache = os.path.join(self.config_dir, "bigbucket.json")
self.credits_cache = os.path.join(self.config_dir, "credits.json")
self.passhasher = PasswordHasher()
self.exiting = False
@ -508,7 +508,6 @@ class Helpers:
"max_log_lines": 700,
"max_audit_entries": 300,
"disabled_language_files": [],
"stream_size_GB": 1,
"keywords": ["help", "chunk"],
"allow_nsfw_profile_pictures": False,
"enable_user_self_delete": False,
@ -516,6 +515,7 @@ class Helpers:
"monitored_mounts": mounts,
"dir_size_poll_freq_minutes": 5,
"crafty_logs_delete_after_days": 0,
"big_bucket_repo": "https://jars.arcadiatech.org",
}
def get_all_settings(self):
@ -639,6 +639,10 @@ class Helpers:
version = f"{major}.{minor}.{sub}"
return str(version)
@staticmethod
def get_utc_now() -> datetime:
return datetime.fromtimestamp(time.time(), tz=timezone.utc)
def encode_pass(self, password):
return self.passhasher.hash(password)
@ -1005,6 +1009,11 @@ class Helpers:
except PermissionError as e:
logger.critical(f"Check generated exception due to permssion error: {e}")
return False
except FileNotFoundError as e:
logger.critical(
f"Check generated exception due to file does not exist error: {e}"
)
return False
def create_self_signed_cert(self, cert_dir=None):
if cert_dir is None:

View File

@ -1,4 +1,5 @@
import os
import sys
import pathlib
from pathlib import Path
from datetime import datetime
@ -32,7 +33,7 @@ from app.classes.shared.console import Console
from app.classes.shared.helpers import Helpers
from app.classes.shared.file_helpers import FileHelpers
from app.classes.shared.import_helper import ImportHelpers
from app.classes.minecraft.serverjars import ServerJars
from app.classes.minecraft.bigbucket import BigBucket
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
@ -43,7 +44,7 @@ class Controller:
self.helper: Helpers = helper
self.file_helper: FileHelpers = file_helper
self.import_helper: ImportHelpers = import_helper
self.server_jars: ServerJars = ServerJars(helper)
self.big_bucket: BigBucket = BigBucket(helper)
self.users_helper: HelperUsers = HelperUsers(database, self.helper)
self.roles_helper: HelperRoles = HelperRoles(database)
self.servers_helper: HelperServers = HelperServers(database)
@ -251,6 +252,19 @@ class Controller:
# Copy crafty logs to archive dir
full_log_name = os.path.join(crafty_path, "logs")
FileHelpers.copy_dir(os.path.join(self.project_root, "logs"), full_log_name)
thread_dump = ""
for thread in threading.enumerate():
if sys.version_info >= (3, 8):
thread_dump += (
f"Name: {thread.name}\tIdentifier:"
f" {thread.ident}\tTID/PID: {thread.native_id}\n"
)
else:
print(f"Name: {thread.name}\tIdentifier: {thread.ident}")
with open(
os.path.join(temp_dir, "crafty_thread_dump.txt"), "a", encoding="utf-8"
) as f:
f.write(thread_dump)
self.support_scheduler.add_job(
self.log_status,
"interval",
@ -436,7 +450,7 @@ class Controller:
if root_create_data["create_type"] == "download_jar":
if Helpers.is_os_windows():
# Let's check for and setup for install server commands
if create_data["type"] == "forge":
if create_data["type"] == "forge-installer":
server_command = (
f"java -Xms{Helpers.float_to_string(min_mem)}M "
f"-Xmx{Helpers.float_to_string(max_mem)}M "
@ -449,7 +463,7 @@ class Controller:
f'-jar "{server_file}" nogui'
)
else:
if create_data["type"] == "forge":
if create_data["type"] == "forge-installer":
server_command = (
f"java -Xms{Helpers.float_to_string(min_mem)}M "
f"-Xmx{Helpers.float_to_string(max_mem)}M "
@ -552,7 +566,6 @@ class Controller:
name=data["name"],
server_uuid=server_fs_uuid,
server_dir=new_server_path,
backup_path=backup_path,
server_command=server_command,
server_file=server_file,
server_log_file=log_location,
@ -562,26 +575,23 @@ class Controller:
server_host=monitoring_host,
server_type=monitoring_type,
)
self.management.set_backup_config(
self.management.add_default_backup_config(
new_server_id,
backup_path,
)
if data["create_type"] == "minecraft_java":
if root_create_data["create_type"] == "download_jar":
# modded update urls from server jars will only update the installer
if (
create_data["category"] != "modded"
and create_data["type"] not in ServerJars.get_paper_jars()
):
if create_data["type"] != "forge-installer":
server_obj = self.servers.get_server_obj(new_server_id)
url = (
"https://api.serverjars.com/api/fetchJar/"
f"{create_data['category']}"
f"/{create_data['type']}/{create_data['version']}"
url = self.big_bucket.get_fetch_url(
create_data["category"],
create_data["type"],
create_data["version"],
)
server_obj.executable_update_url = url
self.servers.update_server(server_obj)
self.server_jars.download_jar(
self.big_bucket.download_jar(
create_data["category"],
create_data["type"],
create_data["version"],
@ -711,7 +721,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
backup_path,
server_command,
server_jar,
server_log_file,
@ -765,7 +774,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
backup_path,
server_command,
server_exe,
server_log_file,
@ -810,7 +818,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
backup_path,
server_command,
server_exe,
server_log_file,
@ -858,7 +865,6 @@ class Controller:
server_name,
server_id,
new_server_dir,
backup_path,
server_command,
server_exe,
server_log_file,
@ -882,16 +888,13 @@ class Controller:
# **********************************************************************************
def rename_backup_dir(self, old_server_id, new_server_id, new_uuid):
server_data = self.servers.get_server_data_by_id(old_server_id)
server_obj = self.servers.get_server_obj(new_server_id)
old_bu_path = server_data["backup_path"]
ServerPermsController.backup_role_swap(old_server_id, new_server_id)
backup_path = old_bu_path
backup_path = os.path.join(self.helper.backup_path, old_server_id)
backup_path = Path(backup_path)
backup_path_components = list(backup_path.parts)
backup_path_components[-1] = new_uuid
new_bu_path = pathlib.PurePath(os.path.join(*backup_path_components))
server_obj.backup_path = new_bu_path
default_backup_dir = os.path.join(self.helper.backup_path, new_uuid)
try:
os.rmdir(default_backup_dir)
@ -905,7 +908,6 @@ class Controller:
name: str,
server_uuid: str,
server_dir: str,
backup_path: str,
server_command: str,
server_file: str,
server_log_file: str,
@ -920,7 +922,6 @@ class Controller:
name,
server_uuid,
server_dir,
backup_path,
server_command,
server_file,
server_log_file,
@ -985,16 +986,16 @@ class Controller:
f"Unable to delete server files for server with ID: "
f"{server_id} with error logged: {e}"
)
if Helpers.check_path_exists(
self.servers.get_server_data_by_id(server_id)["backup_path"]
):
FileHelpers.del_dirs(
Helpers.get_os_understandable_path(
self.servers.get_server_data_by_id(server_id)[
"backup_path"
]
backup_configs = HelpersManagement.get_backups_by_server(
server_id, True
)
for config in backup_configs:
if Helpers.check_path_exists(config.backup_location):
FileHelpers.del_dirs(
Helpers.get_os_understandable_path(
config.backup_location
)
)
)
# Cleanup scheduled tasks
try:

View File

@ -207,9 +207,6 @@ class ServerInstance:
self.server_scheduler.start()
self.dir_scheduler.start()
self.start_dir_calc_task()
self.backup_thread = threading.Thread(
target=self.backup_server, daemon=True, name=f"backup_{self.name}"
)
self.is_backingup = False
# Reset crash and update at initialization
self.stats_helper.server_crash_reset()
@ -690,7 +687,8 @@ class ServerInstance:
try:
# Getting the forge version from the executable command
version = re.findall(
r"forge-([0-9\.]+)((?:)|(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
r"forge-installer-([0-9\.]+)((?:)|"
r"(?:-([0-9\.]+)-[a-zA-Z]+)).jar",
server_obj.execution_command,
)
version_param = version[0][0].split(".")
@ -939,8 +937,7 @@ class ServerInstance:
WebSocketManager().broadcast_user(user, "send_start_reload", {})
def restart_threaded_server(self, user_id):
bu_conf = HelpersManagement.get_backup_config(self.server_id)
if self.is_backingup and bu_conf["shutdown"]:
if self.is_backingup:
logger.info(
"Restart command detected. Supressing - server has"
" backup shutdown enabled and server is currently backing up."
@ -1110,12 +1107,16 @@ class ServerInstance:
f.write("eula=true")
self.run_threaded_server(user_id)
def a_backup_server(self):
if self.settings["backup_path"] == "":
logger.critical("Backup path is None. Canceling Backup!")
return
def server_backup_threader(self, backup_id, update=False):
# Check to see if we're already backing up
if self.check_backup_by_id(backup_id):
return False
backup_thread = threading.Thread(
target=self.backup_server, daemon=True, name=f"backup_{self.name}"
target=self.backup_server,
daemon=True,
name=f"backup_{backup_id}",
args=[backup_id, update],
)
logger.info(
f"Starting Backup Thread for server {self.settings['server_name']}."
@ -1126,27 +1127,20 @@ class ServerInstance:
"Backup Thread - Local server path not defined. "
"Setting local server path variable."
)
# checks if the backup thread is currently alive for this server
if not self.is_backingup:
try:
backup_thread.start()
self.is_backingup = True
except Exception as ex:
logger.error(f"Failed to start backup: {ex}")
return False
else:
logger.error(
f"Backup is already being processed for server "
f"{self.settings['server_name']}. Canceling backup request"
)
try:
backup_thread.start()
except Exception as ex:
logger.error(f"Failed to start backup: {ex}")
return False
logger.info(f"Backup Thread started for server {self.settings['server_name']}.")
@callback
def backup_server(self):
def backup_server(self, backup_id, update):
was_server_running = None
logger.info(f"Starting server {self.name} (ID {self.server_id}) backup")
server_users = PermissionsServers.get_server_user_list(self.server_id)
# Alert the start of the backup to the authorized users.
for user in server_users:
WebSocketManager().broadcast_user(
user,
@ -1156,30 +1150,40 @@ class ServerInstance:
).format(self.name),
)
time.sleep(3)
conf = HelpersManagement.get_backup_config(self.server_id)
# Get the backup config
conf = HelpersManagement.get_backup_config(backup_id)
# Adjust the location to include the backup ID for destination.
backup_location = os.path.join(conf["backup_location"], conf["backup_id"])
# Check if the backup location even exists.
if not backup_location:
Console.critical("No backup path found. Canceling")
return None
if conf["before"]:
if self.check_running():
logger.debug(
"Found running server and send command option. Sending command"
)
self.send_command(conf["before"])
logger.debug(
"Found running server and send command option. Sending command"
)
self.send_command(conf["before"])
# Pause to let command run
time.sleep(5)
if conf["shutdown"]:
if conf["before"]:
# pause to let people read message.
time.sleep(5)
logger.info(
"Found shutdown preference. Delaying"
+ "backup start. Shutting down server."
)
if self.check_running():
self.stop_server()
was_server_running = True
if not update:
was_server_running = False
if self.check_running():
self.stop_server()
was_server_running = True
self.helper.ensure_dir_exists(backup_location)
self.helper.ensure_dir_exists(self.settings["backup_path"])
try:
backup_filename = (
f"{self.settings['backup_path']}/"
f"{backup_location}/"
f"{datetime.datetime.now().astimezone(self.tz).strftime('%Y-%m-%d_%H-%M-%S')}" # pylint: disable=line-too-long
)
logger.info(
@ -1187,42 +1191,36 @@ class ServerInstance:
f" (ID#{self.server_id}, path={self.server_path}) "
f"at '{backup_filename}'"
)
excluded_dirs = HelpersManagement.get_excluded_backup_dirs(self.server_id)
excluded_dirs = HelpersManagement.get_excluded_backup_dirs(backup_id)
server_dir = Helpers.get_os_understandable_path(self.settings["path"])
if conf["compress"]:
logger.debug(
"Found compress backup to be true. Calling compressed archive"
)
self.file_helper.make_compressed_backup(
Helpers.get_os_understandable_path(backup_filename),
server_dir,
excluded_dirs,
self.server_id,
)
else:
logger.debug(
"Found compress backup to be false. Calling NON-compressed archive"
)
self.file_helper.make_backup(
Helpers.get_os_understandable_path(backup_filename),
server_dir,
excluded_dirs,
self.server_id,
)
self.file_helper.make_backup(
Helpers.get_os_understandable_path(backup_filename),
server_dir,
excluded_dirs,
self.server_id,
backup_id,
conf["backup_name"],
conf["compress"],
)
while (
len(self.list_backups()) > conf["max_backups"]
len(self.list_backups(conf)) > conf["max_backups"]
and conf["max_backups"] > 0
):
backup_list = self.list_backups()
backup_list = self.list_backups(conf)
oldfile = backup_list[0]
oldfile_path = f"{conf['backup_path']}/{oldfile['path']}"
oldfile_path = f"{backup_location}/{oldfile['path']}"
logger.info(f"Removing old backup '{oldfile['path']}'")
os.remove(Helpers.get_os_understandable_path(oldfile_path))
self.is_backingup = False
logger.info(f"Backup of server: {self.name} completed")
results = {"percent": 100, "total_files": 0, "current_file": 0}
results = {
"percent": 100,
"total_files": 0,
"current_file": 0,
"backup_id": backup_id,
}
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@ -1247,7 +1245,6 @@ class ServerInstance:
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
time.sleep(3)
self.last_backup_failed = False
if conf["after"]:
if self.check_running():
logger.debug(
@ -1255,12 +1252,21 @@ class ServerInstance:
)
self.send_command(conf["after"])
# pause to let people read message.
HelpersManagement.update_backup_config(
backup_id,
{"status": json.dumps({"status": "Standby", "message": ""})},
)
time.sleep(5)
except:
except Exception as e:
logger.exception(
f"Failed to create backup of server {self.name} (ID {self.server_id})"
)
results = {"percent": 100, "total_files": 0, "current_file": 0}
results = {
"percent": 100,
"total_files": 0,
"current_file": 0,
"backup_id": backup_id,
}
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
@ -1268,56 +1274,51 @@ class ServerInstance:
"backup_status",
results,
)
self.is_backingup = False
if was_server_running:
logger.info(
"Backup complete. User had shutdown preference. Starting server."
)
self.run_threaded_server(HelperUsers.get_user_id_by_name("system"))
self.last_backup_failed = True
def backup_status(self, source_path, dest_path):
results = Helpers.calc_percent(source_path, dest_path)
self.backup_stats = results
if len(WebSocketManager().clients) > 0:
WebSocketManager().broadcast_page_params(
"/panel/server_detail",
{"id": str(self.server_id)},
"backup_status",
results,
HelpersManagement.update_backup_config(
backup_id,
{"status": json.dumps({"status": "Failed", "message": f"{e}"})},
)
self.set_backup_status()
def last_backup_status(self):
return self.last_backup_failed
def send_backup_status(self):
try:
return self.backup_stats
except:
return {"percent": 0, "total_files": 0}
def set_backup_status(self):
backups = HelpersManagement.get_backups_by_server(self.server_id, True)
alert = False
for backup in backups:
if json.loads(backup.status)["status"] == "Failed":
alert = True
self.last_backup_failed = alert
def list_backups(self):
if not self.settings["backup_path"]:
def list_backups(self, backup_config: dict) -> list:
if not backup_config:
logger.info(
f"Error putting backup file list for server with ID: {self.server_id}"
)
return []
backup_location = os.path.join(
backup_config["backup_location"], backup_config["backup_id"]
)
if not Helpers.check_path_exists(
Helpers.get_os_understandable_path(self.settings["backup_path"])
Helpers.get_os_understandable_path(backup_location)
):
return []
files = Helpers.get_human_readable_files_sizes(
Helpers.list_dir_by_date(
Helpers.get_os_understandable_path(self.settings["backup_path"])
Helpers.get_os_understandable_path(backup_location)
)
)
return [
{
"path": os.path.relpath(
f["path"],
start=Helpers.get_os_understandable_path(
self.settings["backup_path"]
),
start=Helpers.get_os_understandable_path(backup_location),
),
"size": f["size"],
}
@ -1329,7 +1330,7 @@ class ServerInstance:
def jar_update(self):
self.stats_helper.set_update(True)
update_thread = threading.Thread(
target=self.a_jar_update, daemon=True, name=f"exe_update_{self.name}"
target=self.threaded_jar_update, daemon=True, name=f"exe_update_{self.name}"
)
update_thread.start()
@ -1370,10 +1371,26 @@ class ServerInstance:
def check_update(self):
return self.stats_helper.get_server_stats()["updating"]
def a_jar_update(self):
def threaded_jar_update(self):
server_users = PermissionsServers.get_server_user_list(self.server_id)
# check to make sure a backup config actually exists before starting the update
if len(self.management_helper.get_backups_by_server(self.server_id, True)) <= 0:
for user in server_users:
WebSocketManager().broadcast_user(
user,
"notification",
"Backup config does not exist for "
+ self.name
+ ". canceling update.",
)
logger.error(f"Back config does not exist for {self.name}. Update Failed.")
self.stats_helper.set_update(False)
return
was_started = "-1"
self.a_backup_server()
# Get default backup configuration
backup_config = HelpersManagement.get_default_server_backup(self.server_id)
# start threaded backup
self.server_backup_threader(backup_config["backup_id"], True)
# checks if server is running. Calls shutdown if it is running.
if self.check_running():
was_started = True
@ -1402,54 +1419,30 @@ class ServerInstance:
"string": message,
},
)
backup_dir = os.path.join(
Helpers.get_os_understandable_path(self.settings["path"]),
"crafty_executable_backups",
)
# checks if backup directory already exists
if os.path.isdir(backup_dir):
backup_executable = os.path.join(backup_dir, self.settings["executable"])
else:
logger.info(
f"Executable backup directory not found for Server: {self.name}."
f" Creating one."
)
os.mkdir(backup_dir)
backup_executable = os.path.join(backup_dir, self.settings["executable"])
if len(os.listdir(backup_dir)) > 0:
# removes old backup
logger.info(f"Old backups found for server: {self.name}. Removing...")
for item in os.listdir(backup_dir):
os.remove(os.path.join(backup_dir, item))
logger.info(f"Old backups removed for server: {self.name}.")
else:
logger.info(f"No old backups found for server: {self.name}")
current_executable = os.path.join(
Helpers.get_os_understandable_path(self.settings["path"]),
self.settings["executable"],
)
try:
# copies to backup dir
FileHelpers.copy_file(current_executable, backup_executable)
except FileNotFoundError:
logger.error("Could not create backup of jarfile. File not found.")
backing_up = True
# wait for backup
while self.is_backingup:
time.sleep(10)
while backing_up:
# Check to see if we're already backing up
backing_up = self.check_backup_by_id(backup_config["backup_id"])
time.sleep(2)
# check if backup was successful
if self.last_backup_failed:
backup_status = json.loads(
HelpersManagement.get_backup_config(backup_config["backup_id"])["status"]
)["status"]
if backup_status == "Failed":
for user in server_users:
WebSocketManager().broadcast_user(
user,
"notification",
"Backup failed for " + self.name + ". canceling update.",
)
return False
self.stats_helper.set_update(False)
return
# lets download the files
if HelperServers.get_server_type_by_id(self.server_id) != "minecraft-bedrock":
@ -1527,12 +1520,6 @@ class ServerInstance:
WebSocketManager().broadcast_user_page(
user, "/panel/dashboard", "send_start_reload", {}
)
WebSocketManager().broadcast_user(
user,
"notification",
"Executable update finished for " + self.name,
)
self.management_helper.add_to_audit_log_raw(
"Alert",
"-1",
@ -1655,6 +1642,14 @@ class ServerInstance:
except:
Console.critical("Can't broadcast server status to websocket")
def check_backup_by_id(self, backup_id: str) -> bool:
# Check to see if we're already backing up
for thread in threading.enumerate():
if thread.getName() == f"backup_{backup_id}":
Console.debug(f"Backup with id {backup_id} already running!")
return True
return False
def get_servers_stats(self):
server_stats = {}

View File

@ -140,7 +140,7 @@ class TasksManager:
)
elif command == "backup_server":
svr.a_backup_server()
svr.server_backup_threader(cmd["action_id"])
elif command == "update_executable":
svr.jar_update()
@ -240,6 +240,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
"action_id": schedule.action_id,
}
],
)
@ -268,6 +269,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
"action_id": schedule.action_id,
}
],
)
@ -284,6 +286,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
"action_id": schedule.action_id,
}
],
)
@ -303,6 +306,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
"action_id": schedule.action_id,
}
],
)
@ -337,6 +341,7 @@ class TasksManager:
job_data["cron_string"],
job_data["parent"],
job_data["delay"],
job_data.get("action_id", None),
)
# Checks to make sure some doofus didn't actually make the newly
@ -367,6 +372,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -393,6 +399,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -409,6 +416,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -428,6 +436,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -520,6 +529,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -543,6 +553,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -559,6 +570,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -578,6 +590,7 @@ class TasksManager:
"system"
),
"command": job_data["command"],
"action_id": job_data.get("action_id", None),
}
],
)
@ -653,6 +666,7 @@ class TasksManager:
"system"
),
"command": schedule.command,
"action_id": schedule.action_id,
}
],
)
@ -685,16 +699,16 @@ class TasksManager:
id="stats",
)
def serverjar_cache_refresher(self):
logger.info("Refreshing serverjars.com cache on start")
self.controller.server_jars.refresh_cache()
def big_bucket_cache_refresher(self):
logger.info("Refreshing big bucket cache on start")
self.controller.big_bucket.refresh_cache()
logger.info("Scheduling Serverjars.com cache refresh service every 12 hours")
logger.info("Scheduling big bucket cache refresh service every 12 hours")
self.scheduler.add_job(
self.controller.server_jars.refresh_cache,
self.controller.big_bucket.refresh_cache,
"interval",
hours=12,
id="serverjars",
id="big_bucket",
)
def realtime(self):
@ -785,6 +799,18 @@ class TasksManager:
self.helper.ensure_dir_exists(
os.path.join(self.controller.project_root, "import", "upload")
)
self.helper.ensure_dir_exists(
os.path.join(self.controller.project_root, "temp")
)
for file in os.listdir(os.path.join(self.controller.project_root, "temp")):
if self.helper.is_file_older_than_x_days(
os.path.join(self.controller.project_root, "temp", file)
):
try:
os.remove(os.path.join(file))
except FileNotFoundError:
logger.debug("Could not clear out file from temp directory")
for file in os.listdir(
os.path.join(self.controller.project_root, "import", "upload")
):
@ -793,7 +819,7 @@ class TasksManager:
):
try:
os.remove(os.path.join(file))
except:
except FileNotFoundError:
logger.debug("Could not clear out file from import directory")
def log_watcher(self):

View File

@ -20,7 +20,7 @@ class Translation:
def get_language_file(self, language: str):
return os.path.join(self.translations_path, str(language) + ".json")
def translate(self, page, word, language):
def translate(self, page, word, language, error=True):
fallback_language = "en_EN"
translated_word = self.translate_inner(page, word, language)
@ -37,7 +37,9 @@ class Translation:
if hasattr(translated_word, "__iter__"):
# Multiline strings
return "\n".join(translated_word)
return "Error while getting translation"
if error:
return "Error while getting translation"
return word
def translate_inner(self, page, word, language) -> t.Union[t.Any, None]:
language_file = self.get_language_file(language)

View File

@ -6,6 +6,7 @@ import nh3
import tornado.web
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.users import ApiKeys
from app.classes.shared.helpers import Helpers
from app.classes.shared.file_helpers import FileHelpers
@ -182,6 +183,7 @@ class BaseHandler(tornado.web.RequestHandler):
t.List[str],
bool,
t.Dict[str, t.Any],
str,
]
]:
try:
@ -190,9 +192,12 @@ class BaseHandler(tornado.web.RequestHandler):
)
superuser = user["superuser"]
server_permissions_api_mask = ""
if api_key is not None:
superuser = superuser and api_key.superuser
superuser = superuser and api_key.full_access
server_permissions_api_mask = api_key.server_permissions
if api_key.full_access:
server_permissions_api_mask = "1" * len(EnumPermissionsServer)
exec_user_role = set()
if superuser:
authorized_servers = self.controller.servers.get_all_defined_servers()
@ -214,6 +219,7 @@ class BaseHandler(tornado.web.RequestHandler):
user["user_id"]
)
)
logger.debug(user["roles"])
for r in user["roles"]:
role = self.controller.roles.get_role(r)
@ -234,6 +240,7 @@ class BaseHandler(tornado.web.RequestHandler):
exec_user_role,
superuser,
user,
server_permissions_api_mask,
)
logging.debug("Auth unsuccessful")
auth_log.error(

View File

@ -41,6 +41,8 @@ SUBPAGE_PERMS = {
"webhooks": EnumPermissionsServer.CONFIG,
}
SCHEDULE_AUTH_ERROR_URL = "/panel/error?error=Unauthorized access To Schedules"
class PanelHandler(BaseHandler):
def get_user_roles(self) -> t.Dict[str, list]:
@ -168,7 +170,7 @@ class PanelHandler(BaseHandler):
# Commented out because there is no server access control for API keys,
# they just inherit from the host user
# if api_key is not None:
# superuser = superuser and api_key.superuser
# superuser = superuser and api_key.full_access
if server_id is None:
self.redirect("/panel/error?error=Invalid Server ID")
@ -242,7 +244,7 @@ class PanelHandler(BaseHandler):
api_key, _token_data, exec_user = self.current_user
superuser = exec_user["superuser"]
if api_key is not None:
superuser = superuser and api_key.superuser
superuser = superuser and api_key.full_access
if superuser: # TODO: Figure out a better solution
defined_servers = self.controller.servers.list_defined_servers()
@ -351,7 +353,7 @@ class PanelHandler(BaseHandler):
"created": api_key.created,
"server_permissions": api_key.server_permissions,
"crafty_permissions": api_key.crafty_permissions,
"superuser": api_key.superuser,
"full_access": api_key.full_access,
}
if api_key is not None
else None
@ -677,36 +679,18 @@ class PanelHandler(BaseHandler):
page_data["java_versions"] = page_java
if subpage == "backup":
server_info = self.controller.servers.get_server_data_by_id(server_id)
page_data["backup_config"] = (
self.controller.management.get_backup_config(server_id)
)
exclusions = []
page_data["exclusions"] = (
self.controller.management.get_excluded_backup_dirs(server_id)
page_data["backups"] = self.controller.management.get_backups_by_server(
server_id, model=True
)
page_data["backing_up"] = (
self.controller.servers.get_server_instance_by_id(
server_id
).is_backingup
)
page_data["backup_stats"] = (
self.controller.servers.get_server_instance_by_id(
server_id
).send_backup_status()
)
# makes it so relative path is the only thing shown
for file in page_data["exclusions"]:
if Helpers.is_os_windows():
exclusions.append(file.replace(server_info["path"] + "\\", ""))
else:
exclusions.append(file.replace(server_info["path"] + "/", ""))
page_data["exclusions"] = exclusions
self.controller.servers.refresh_server_settings(server_id)
try:
page_data["backup_list"] = server.list_backups()
except:
page_data["backup_list"] = []
page_data["backup_path"] = Helpers.wtol_path(server_info["backup_path"])
if subpage == "metrics":
try:
@ -780,20 +764,23 @@ class PanelHandler(BaseHandler):
elif page == "download_backup":
file = self.get_argument("file", "")
backup_id = self.get_argument("backup_id", "")
server_id = self.check_server_id()
if server_id is None:
return
backup_config = self.controller.management.get_backup_config(backup_id)
server_info = self.controller.servers.get_server_data_by_id(server_id)
backup_location = os.path.join(backup_config["backup_location"], backup_id)
backup_file = os.path.abspath(
os.path.join(
Helpers.get_os_understandable_path(server_info["backup_path"]), file
Helpers.get_os_understandable_path(backup_location),
file,
)
)
if not self.helper.is_subdir(
backup_file,
Helpers.get_os_understandable_path(server_info["backup_path"]),
Helpers.get_os_understandable_path(backup_location),
) or not os.path.isfile(backup_file):
self.redirect("/panel/error?error=Invalid path detected")
return
@ -892,6 +879,8 @@ class PanelHandler(BaseHandler):
os.path.join(self.helper.root_dir, "app", "translations")
)
):
if file == "humanized_index.json":
continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@ -1130,6 +1119,9 @@ class PanelHandler(BaseHandler):
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
page_data["backups"] = self.controller.management.get_backups_by_server(
server_id, True
)
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
@ -1150,6 +1142,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["delay"] = 0
page_data["schedule"]["time"] = ""
page_data["schedule"]["interval"] = 1
page_data["schedule"]["action_id"] = ""
# we don't need to check difficulty here.
# We'll just default to basic for new schedules
page_data["schedule"]["difficulty"] = "basic"
@ -1158,7 +1151,7 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
self.redirect("/panel/error?error=Unauthorized access To Schedules")
self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
@ -1195,6 +1188,9 @@ class PanelHandler(BaseHandler):
exec_user["user_id"], server_id
)
)
page_data["backups"] = self.controller.management.get_backups_by_server(
server_id, True
)
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
@ -1209,6 +1205,7 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["server_id"] = server_id
page_data["schedule"]["schedule_id"] = schedule.schedule_id
page_data["schedule"]["action"] = schedule.action
page_data["schedule"]["action_id"] = schedule.action_id
if schedule.name:
page_data["schedule"]["name"] = schedule.name
else:
@ -1237,9 +1234,11 @@ class PanelHandler(BaseHandler):
page_data["schedule"]["interval_type"] = schedule.interval_type
if schedule.interval_type == "reaction":
difficulty = "reaction"
page_data["parent"] = self.controller.management.get_scheduled_task(
schedule.parent
)
page_data["parent"] = None
if schedule.parent:
page_data["parent"] = self.controller.management.get_scheduled_task(
schedule.parent
)
elif schedule.cron_string == "":
difficulty = "basic"
page_data["parent"] = None
@ -1250,11 +1249,141 @@ class PanelHandler(BaseHandler):
if not EnumPermissionsServer.SCHEDULE in page_data["user_permissions"]:
if not superuser:
self.redirect("/panel/error?error=Unauthorized access To Schedules")
self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_schedule_edit.html"
elif page == "edit_backup":
server_id = self.get_argument("id", None)
backup_id = self.get_argument("backup_id", None)
page_data["active_link"] = "backups"
page_data["permissions"] = {
"Commands": EnumPermissionsServer.COMMANDS,
"Terminal": EnumPermissionsServer.TERMINAL,
"Logs": EnumPermissionsServer.LOGS,
"Schedule": EnumPermissionsServer.SCHEDULE,
"Backup": EnumPermissionsServer.BACKUP,
"Files": EnumPermissionsServer.FILES,
"Config": EnumPermissionsServer.CONFIG,
"Players": EnumPermissionsServer.PLAYERS,
}
if not self.failed_server:
server_obj = self.controller.servers.get_server_instance_by_id(
server_id
)
page_data["backup_failed"] = server_obj.last_backup_status()
page_data["user_permissions"] = (
self.controller.server_perms.get_user_id_permissions_list(
exec_user["user_id"], server_id
)
)
server_info = self.controller.servers.get_server_data_by_id(server_id)
page_data["backup_config"] = self.controller.management.get_backup_config(
backup_id
)
page_data["backups"] = self.controller.management.get_backups_by_server(
server_id, model=True
)
exclusions = []
page_data["backing_up"] = self.controller.servers.get_server_instance_by_id(
server_id
).is_backingup
self.controller.servers.refresh_server_settings(server_id)
try:
page_data["backup_list"] = server.list_backups(
page_data["backup_config"]
)
except:
page_data["backup_list"] = []
page_data["backup_path"] = Helpers.wtol_path(
page_data["backup_config"]["backup_location"]
)
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
page_data["server_stats"]["server_type"] = (
self.controller.servers.get_server_type_by_id(server_id)
)
page_data["exclusions"] = (
self.controller.management.get_excluded_backup_dirs(backup_id)
)
# Make exclusion paths relative for page
for file in page_data["exclusions"]:
if Helpers.is_os_windows():
exclusions.append(file.replace(server_info["path"] + "\\", ""))
else:
exclusions.append(file.replace(server_info["path"] + "/", ""))
page_data["exclusions"] = exclusions
if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
if not superuser:
self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_backup_edit.html"
elif page == "add_backup":
server_id = self.get_argument("id", None)
backup_id = self.get_argument("backup_id", None)
page_data["active_link"] = "backups"
page_data["permissions"] = {
"Commands": EnumPermissionsServer.COMMANDS,
"Terminal": EnumPermissionsServer.TERMINAL,
"Logs": EnumPermissionsServer.LOGS,
"Schedule": EnumPermissionsServer.SCHEDULE,
"Backup": EnumPermissionsServer.BACKUP,
"Files": EnumPermissionsServer.FILES,
"Config": EnumPermissionsServer.CONFIG,
"Players": EnumPermissionsServer.PLAYERS,
}
if not self.failed_server:
server_obj = self.controller.servers.get_server_instance_by_id(
server_id
)
page_data["backup_failed"] = server_obj.last_backup_status()
page_data["user_permissions"] = (
self.controller.server_perms.get_user_id_permissions_list(
exec_user["user_id"], server_id
)
)
server_info = self.controller.servers.get_server_data_by_id(server_id)
page_data["backup_config"] = {
"excluded_dirs": [],
"max_backups": 0,
"server_id": server_id,
"backup_location": os.path.join(self.helper.backup_path, server_id),
"compress": False,
"shutdown": False,
"before": "",
"after": "",
}
page_data["backing_up"] = False
self.controller.servers.refresh_server_settings(server_id)
page_data["backup_list"] = []
page_data["backup_path"] = Helpers.wtol_path(
page_data["backup_config"]["backup_location"]
)
page_data["server_data"] = self.controller.servers.get_server_data_by_id(
server_id
)
page_data["server_stats"] = self.controller.servers.get_server_stats_by_id(
server_id
)
page_data["server_stats"]["server_type"] = (
self.controller.servers.get_server_type_by_id(server_id)
)
page_data["exclusions"] = []
if EnumPermissionsServer.BACKUP not in page_data["user_permissions"]:
if not superuser:
self.redirect(SCHEDULE_AUTH_ERROR_URL)
return
template = "panel/server_backup_edit.html"
elif page == "edit_user":
user_id = self.get_argument("id", None)
role_servers = self.controller.servers.get_authorized_servers(user_id)
@ -1305,6 +1434,8 @@ class PanelHandler(BaseHandler):
for file in sorted(
os.listdir(os.path.join(self.helper.root_dir, "app", "translations"))
):
if file == "humanized_index.json":
continue
if file.endswith(".json"):
if file.split(".")[0] not in self.helper.get_setting(
"disabled_language_files"
@ -1356,6 +1487,9 @@ class PanelHandler(BaseHandler):
page_data["crafty_permissions_all"] = (
self.controller.crafty_perms.list_defined_crafty_permissions()
)
page_data["user_crafty_permissions"] = (
self.controller.crafty_perms.get_crafty_permissions_list(user_id)
)
if user_id is None:
self.redirect("/panel/error?error=Invalid User ID")
@ -1503,8 +1637,6 @@ class PanelHandler(BaseHandler):
template = "panel/panel_edit_role.html"
elif page == "activity_logs":
page_data["audit_logs"] = self.controller.management.get_activity_log()
template = "panel/activity_logs.html"
elif page == "download_file":

View File

@ -48,7 +48,10 @@ class PublicHandler(BaseHandler):
}
if self.request.query:
page_data["query"] = self.request.query_arguments.get("next")[0].decode()
request_query = self.request.query_arguments.get("next")
if not request_query:
self.redirect("/login")
page_data["query"] = request_query[0].decode()
# sensible defaults
template = "public/404.html"

View File

@ -38,12 +38,14 @@ from app.classes.web.routes.api.servers.server.backups.index import (
)
from app.classes.web.routes.api.servers.server.backups.backup.index import (
ApiServersServerBackupsBackupIndexHandler,
ApiServersServerBackupsBackupFilesIndexHandler,
)
from app.classes.web.routes.api.servers.server.files import (
ApiServersServerFilesIndexHandler,
ApiServersServerFilesCreateHandler,
ApiServersServerFilesZipHandler,
)
from app.classes.web.routes.api.crafty.upload.index import ApiFilesUploadHandler
from app.classes.web.routes.api.servers.server.tasks.task.children import (
ApiServersServerTasksTaskChildrenHandler,
)
@ -218,13 +220,13 @@ def api_handlers(handler_args):
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/backups/backup/?",
r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/?",
ApiServersServerBackupsBackupIndexHandler,
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/files/?",
ApiServersServerFilesIndexHandler,
r"/api/v2/servers/([a-z0-9-]+)/backups/backup/([a-z0-9-]+)/files/?",
ApiServersServerBackupsBackupFilesIndexHandler,
handler_args,
),
(
@ -237,6 +239,26 @@ def api_handlers(handler_args):
ApiServersServerFilesZipHandler,
handler_args,
),
(
r"/api/v2/crafty/admin/upload/?",
ApiFilesUploadHandler,
handler_args,
),
(
r"/api/v2/servers/import/upload/?",
ApiFilesUploadHandler,
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/files/upload/?",
ApiFilesUploadHandler,
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/files(?:/([a-zA-Z0-9-]+))?/?",
ApiServersServerFilesIndexHandler,
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/tasks/?",
ApiServersServerTasksIndexHandler,
@ -273,7 +295,8 @@ def api_handlers(handler_args):
handler_args,
),
(
r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)/?",
# optional third argument when we need a action ID
r"/api/v2/servers/([a-z0-9-]+)/action/([a-z_]+)(?:/([a-z0-9-]+))?/?",
ApiServersServerActionHandler,
handler_args,
),

View File

@ -1,6 +1,6 @@
import datetime
import logging
from app.classes.web.base_api_handler import BaseApiHandler
from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
@ -13,7 +13,7 @@ class ApiAuthInvalidateTokensHandler(BaseApiHandler):
logger.debug(f"Invalidate tokens for user {auth_data[4]['user_id']}")
self.controller.users.raw_update_user(
auth_data[4]["user_id"], {"valid_tokens_from": datetime.datetime.now()}
auth_data[4]["user_id"], {"valid_tokens_from": Helpers.get_utc_now()}
)
self.finish_json(200, {"status": "ok"})

View File

@ -26,6 +26,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
_,
_,
_user,
_,
) = auth_data
data = self.helper.get_announcements()
@ -72,6 +73,7 @@ class ApiAnnounceIndexHandler(BaseApiHandler):
_,
_,
_user,
_,
) = auth_data
try:
data = json.loads(self.request.body)

View File

@ -1,3 +1,5 @@
import os
import json
from app.classes.web.base_api_handler import BaseApiHandler
@ -12,6 +14,7 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
if not superuser:
@ -22,9 +25,17 @@ class ApiCraftyLogIndexHandler(BaseApiHandler):
raise NotImplementedError
if log_type == "audit":
with open(
os.path.join(self.controller.project_root, "logs", "audit.log"),
"r",
encoding="utf-8",
) as f:
log_lines = [json.loads(line) for line in f]
rev_log_lines = log_lines[::-1]
return self.finish_json(
200,
{"status": "ok", "data": self.controller.management.get_activity_log()},
{"status": "ok", "data": rev_log_lines},
)
if log_type == "session":

View File

@ -31,6 +31,7 @@ config_json_schema = {
"monitored_mounts": {"type": "array"},
"dir_size_poll_freq_minutes": {"type": "integer"},
"crafty_logs_delete_after_days": {"type": "integer"},
"big_bucket_repo": {"type": "string"},
},
"additionalProperties": False,
"minProperties": 1,
@ -67,6 +68,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
# GET /api/v2/roles?ids=true
@ -93,13 +95,7 @@ class ApiCraftyConfigIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
(
_,
_,
_,
superuser,
user,
) = auth_data
(_, _, _, superuser, user, _) = auth_data
if not superuser:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -149,6 +145,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
# GET /api/v2/roles?ids=true
@ -181,6 +178,7 @@ class ApiCraftyCustomizeIndexHandler(BaseApiHandler):
_,
superuser,
user,
_,
) = auth_data
if not superuser:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})

View File

@ -24,6 +24,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
# GET /api/v2/roles?ids=true
@ -56,6 +57,7 @@ class ApiCraftyConfigServerDirHandler(BaseApiHandler):
_,
_,
_,
_,
) = auth_data
if not auth_data:

View File

@ -12,16 +12,17 @@ class ApiCraftyJarCacheIndexHandler(BaseApiHandler):
_,
_,
_,
_,
) = auth_data
if not auth_data[4]["superuser"]:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.server_jars.manual_refresh_cache()
self.controller.big_bucket.manual_refresh_cache()
self.finish_json(
200,
{
"status": "ok",
"data": self.controller.server_jars.get_serverjar_data(),
"data": self.controller.big_bucket.get_bucket_data(),
},
)

View File

@ -0,0 +1,308 @@
import os
import logging
import shutil
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.shared.helpers import Helpers
from app.classes.web.base_api_handler import BaseApiHandler
logger = logging.getLogger(__name__)
IMAGE_MIME_TYPES = [
"image/bmp",
"image/cis-cod",
"image/gif",
"image/ief",
"image/jpeg",
"image/pipeg",
"image/svg+xml",
"image/tiff",
"image/x-cmu-raster",
"image/x-cmx",
"image/x-icon",
"image/x-portable-anymap",
"image/x-portable-bitmap",
"image/x-portable-graymap",
"image/x-portable-pixmap",
"image/x-rgb",
"image/x-xbitmap",
"image/x-xpixmap",
"image/x-xwindowdump",
"image/png",
"image/webp",
]
ARCHIVE_MIME_TYPES = ["application/zip"]
class ApiFilesUploadHandler(BaseApiHandler):
async def post(self, server_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
upload_type = self.request.headers.get("type")
accepted_types = []
if server_id:
# Check to make sure user is authorized for the server
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(
400, {"status": "error", "error": "NOT_AUTHORIZED"}
)
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
# Make sure user has file access for the server
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(
400, {"status": "error", "error": "NOT_AUTHORIZED"}
)
u_type = "server_upload"
# Make sure user is a super user if they're changing panel settings
elif auth_data[4]["superuser"] and upload_type == "background":
u_type = "admin_config"
self.upload_dir = os.path.join(
self.controller.project_root,
"app/frontend/static/assets/images/auth/custom",
)
accepted_types = IMAGE_MIME_TYPES
elif upload_type == "import":
# Check that user can make servers
if (
not self.controller.crafty_perms.can_create_server(
auth_data[4]["user_id"]
)
and not auth_data[4]["superuser"]
):
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"data": {"message": ""},
},
)
# Set directory to upload import dir
self.upload_dir = os.path.join(
self.controller.project_root, "import", "upload"
)
u_type = "server_import"
accepted_types = ARCHIVE_MIME_TYPES
else:
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"data": {"message": ""},
},
)
# Get the headers from the request
self.chunk_hash = self.request.headers.get("chunkHash", 0)
self.file_id = self.request.headers.get("fileId")
self.chunked = self.request.headers.get("chunked", False)
self.filename = self.request.headers.get("fileName", None)
try:
file_size = int(self.request.headers.get("fileSize", None))
total_chunks = int(self.request.headers.get("totalChunks", 0))
except TypeError:
return self.finish_json(
400, {"status": "error", "error": "TYPE ERROR", "data": {}}
)
self.chunk_index = self.request.headers.get("chunkId")
if u_type == "server_upload":
self.upload_dir = self.request.headers.get("location", None)
self.temp_dir = os.path.join(self.controller.project_root, "temp", self.file_id)
if u_type == "server_upload":
# If this is an upload from a server the path will be what
# Is requested
full_path = os.path.join(self.upload_dir, self.filename)
# Check to make sure the requested path is inside the server's directory
if not self.helper.is_subdir(
full_path,
Helpers.get_os_understandable_path(
self.controller.servers.get_server_data_by_id(server_id)["path"]
),
):
return self.finish_json(
400,
{
"status": "error",
"error": "NOT AUTHORIZED",
"data": {"message": "Traversal detected"},
},
)
# Check to make sure the file type we're being sent is what we're expecting
if (
self.file_helper.check_mime_types(self.filename) not in accepted_types
and u_type != "server_upload"
):
return self.finish_json(
422,
{
"status": "error",
"error": "INVALID FILE TYPE",
"data": {
"message": f"Invalid File Type only accepts {accepted_types}"
},
},
)
_total, _used, free = shutil.disk_usage(self.upload_dir)
# Check to see if we have enough space
if free <= file_size:
return self.finish_json(
507,
{
"status": "error",
"error": "NO STORAGE SPACE",
"data": {"message": "Out Of Space!"},
},
)
# If this has no chunk index we know it's the inital request
if self.chunked and not self.chunk_index:
return self.finish_json(
200, {"status": "ok", "data": {"file-id": self.file_id}}
)
# Create the upload and temp directories if they don't exist
os.makedirs(self.upload_dir, exist_ok=True)
# Check for chunked header. We will handle this request differently
# if it doesn't exist
if not self.chunked:
# Write the file directly to the upload dir
with open(os.path.join(self.upload_dir, self.filename), "wb") as file:
chunk = self.request.body
if chunk:
file.write(chunk)
# We'll check the file hash against the sent hash once the file is
# written. We cannot check this buffer.
calculated_hash = self.file_helper.calculate_file_hash(
os.path.join(self.upload_dir, self.filename)
)
logger.info(
f"File upload completed. Filename: {self.filename} Type: {u_type}"
)
return self.finish_json(
200,
{
"status": "completed",
"data": {"message": "File uploaded successfully"},
},
)
# Since this is a chunked upload we'll create the temp dir for parts.
os.makedirs(self.temp_dir, exist_ok=True)
# Read headers and query parameters
content_length = int(self.request.headers.get("Content-Length"))
if content_length <= 0:
logger.error(
f"File upload failed. Filename: {self.filename}"
f"Type: {u_type} Error: INVALID CONTENT LENGTH"
)
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID CONTENT LENGTH",
"data": {"message": "Invalid content length"},
},
)
# At this point filename, chunk index and total chunks are required
# in the request
if not self.filename or self.chunk_index is None:
logger.error(
f"File upload failed. Filename: {self.filename}"
f"Type: {u_type} Error: CHUNK INDEX NOT FOUND"
)
return self.finish_json(
400,
{
"status": "error",
"error": "INDEX ERROR",
"data": {
"message": "Filename, chunk_index,"
" and total_chunks are required"
},
},
)
# Calculate the hash of the buffer and compare it against the expected hash
calculated_hash = self.file_helper.calculate_buffer_hash(self.request.body)
if str(self.chunk_hash) != str(calculated_hash):
logger.error(
f"File upload failed. Filename: {self.filename}"
f"Type: {u_type} Error: INVALID HASH"
)
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_HASH",
"data": {
"message": "Hash recieved does not match reported sent hash.",
"chunk_id": self.chunk_index,
},
},
)
# File paths
file_path = os.path.join(self.upload_dir, self.filename)
chunk_path = os.path.join(
self.temp_dir, f"{self.filename}.part{self.chunk_index}"
)
# Save the chunk
with open(chunk_path, "wb") as f:
f.write(self.request.body)
# Check if all chunks are received
received_chunks = [
f
for f in os.listdir(self.temp_dir)
if f.startswith(f"{self.filename}.part")
]
# When we've reached the total chunks we'll
# Compare the hash and write the file
if len(received_chunks) == total_chunks:
with open(file_path, "wb") as outfile:
for i in range(total_chunks):
chunk_file = os.path.join(self.temp_dir, f"{self.filename}.part{i}")
with open(chunk_file, "rb") as infile:
outfile.write(infile.read())
os.remove(chunk_file)
logger.info(
f"File upload completed. Filename: {self.filename}"
f" Path: {file_path} Type: {u_type}"
)
self.controller.management.add_to_audit_log(
auth_data[4]["user_id"],
f"Uploaded file {self.filename}",
server_id,
self.request.remote_ip,
)
self.finish_json(
200,
{
"status": "completed",
"data": {"message": "File uploaded successfully"},
},
)
else:
self.finish_json(
200,
{
"status": "partial",
"data": {"message": f"Chunk {self.chunk_index} received"},
},
)

View File

@ -2,6 +2,7 @@ import typing as t
from jsonschema import ValidationError, validate
import orjson
from playhouse.shortcuts import model_to_dict
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
create_role_schema = {
@ -10,6 +11,7 @@ create_role_schema = {
"name": {
"type": "string",
"minLength": 1,
"pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@ -22,7 +24,7 @@ create_role_schema = {
},
"permissions": {
"type": "string",
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
"pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@ -71,16 +73,20 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
_,
exec_user_permissions_crafty,
_,
superuser,
_,
_,
) = auth_data
# GET /api/v2/roles?ids=true
get_only_ids = self.get_query_argument("ids", None) == "true"
if not superuser:
if (
not superuser
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@ -103,13 +109,17 @@ class ApiRolesIndexHandler(BaseApiHandler):
return
(
_,
_,
exec_user_permissions_crafty,
_,
superuser,
user,
_,
) = auth_data
if not superuser:
if (
not superuser
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -136,6 +146,8 @@ class ApiRolesIndexHandler(BaseApiHandler):
role_name = data["name"]
manager = data.get("manager", None)
if not superuser and not manager:
manager = auth_data[4]["user_id"]
if manager == self.controller.users.get_id_by_name("SYSTEM") or manager == 0:
manager = None

View File

@ -1,6 +1,7 @@
from jsonschema import ValidationError, validate
import orjson
from peewee import DoesNotExist
from peewee import DoesNotExist, IntegrityError
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.web.base_api_handler import BaseApiHandler
modify_role_schema = {
@ -9,6 +10,7 @@ modify_role_schema = {
"name": {
"type": "string",
"minLength": 1,
"pattern": r"^[^,\[\]]*$",
},
"servers": {
"type": "array",
@ -21,7 +23,7 @@ modify_role_schema = {
},
"permissions": {
"type": "string",
"pattern": "^[01]{8}$", # 8 bits, see EnumPermissionsServer
"pattern": r"^[01]{8}$", # 8 bits, see EnumPermissionsServer
},
},
"required": ["server_id", "permissions"],
@ -70,13 +72,17 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
_,
exec_user_permissions_crafty,
_,
superuser,
_,
_,
) = auth_data
if not superuser:
if (
not superuser
and EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -97,9 +103,13 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
_,
superuser,
user,
_,
) = auth_data
if not superuser:
role = self.controller.roles.get_role(role_id)
if (
str(role.get("manager", "no manager found")) != str(auth_data[4]["user_id"])
and not superuser
):
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.roles.remove_role(role_id)
@ -122,14 +132,26 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
return
(
_,
_,
exec_user_permissions_crafty,
_,
superuser,
user,
_,
) = auth_data
if not superuser:
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
role = self.controller.roles.get_role(role_id)
if not superuser and (
user["user_id"] != role["manager"]
or EnumPermissionsCrafty.ROLES_CONFIG not in exec_user_permissions_crafty
):
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": "Not Authorized",
},
)
try:
data = orjson.loads(self.request.body)
@ -168,7 +190,10 @@ class ApiRolesRoleIndexHandler(BaseApiHandler):
)
except DoesNotExist:
return self.finish_json(404, {"status": "error", "error": "ROLE_NOT_FOUND"})
except IntegrityError:
return self.finish_json(
404, {"status": "error", "error": "ROLE_NAME_EXISTS"}
)
self.controller.management.add_to_audit_log(
user["user_id"],
f"modified role with ID {role_id}",

View File

@ -13,6 +13,7 @@ class ApiRolesRoleServersHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
# GET /api/v2/roles/role/servers?ids=true

View File

@ -12,6 +12,7 @@ class ApiRolesRoleUsersHandler(BaseApiHandler):
_,
superuser,
_,
_,
) = auth_data
if not superuser:

View File

@ -23,6 +23,7 @@ new_server_schema = {
"type": "string",
"examples": ["My Server"],
"minLength": 2,
"pattern": "^[^/\\\\]*$",
},
"roles": {"title": "Roles to add", "type": "array", "examples": [1, 2, 3]},
"stop_command": {
@ -139,7 +140,7 @@ new_server_schema = {
"category": {
"title": "Jar Category",
"type": "string",
"examples": ["modded", "vanilla"],
"examples": ["Mc_java_servers", "Mc_java_proxies"],
},
"properties": {
"type": {
@ -685,6 +686,7 @@ class ApiServersIndexHandler(BaseApiHandler):
_,
_superuser,
user,
_,
) = auth_data
if EnumPermissionsCrafty.SERVER_CREATION not in exec_user_crafty_permissions:

View File

@ -1,5 +1,6 @@
import logging
import os
import json
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.models.servers import Servers
from app.classes.shared.file_helpers import FileHelpers
@ -10,7 +11,7 @@ logger = logging.getLogger(__name__)
class ApiServersServerActionHandler(BaseApiHandler):
def post(self, server_id: str, action: str):
def post(self, server_id: str, action: str, action_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -18,13 +19,14 @@ class ApiServersServerActionHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.COMMANDS
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.COMMANDS not in server_permissions:
# if the user doesn't have Commands permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -53,7 +55,7 @@ class ApiServersServerActionHandler(BaseApiHandler):
return self._agree_eula(server_id, auth_data[4]["user_id"])
self.controller.management.send_command(
auth_data[4]["user_id"], server_id, self.get_remote_ip(), action
auth_data[4]["user_id"], server_id, self.get_remote_ip(), action, action_id
)
self.finish_json(
@ -81,6 +83,20 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_id = self.helper.create_uuid()
new_server_path = os.path.join(self.helper.servers_dir, new_server_id)
new_backup_path = os.path.join(self.helper.backup_path, new_server_id)
backup_data = {
"backup_name": f"{new_server_name} Backup",
"backup_location": new_backup_path,
"excluded_dirs": "",
"max_backups": 0,
"server_id": new_server_id,
"compress": False,
"shutdown": False,
"before": "",
"after": "",
"default": True,
"status": json.dumps({"status": "Standby", "message": ""}),
"enabled": True,
}
new_server_command = str(server_data.get("execution_command")).replace(
server_id, new_server_id
)
@ -92,7 +108,6 @@ class ApiServersServerActionHandler(BaseApiHandler):
new_server_name,
new_server_id,
new_server_path,
new_backup_path,
new_server_command,
server_data.get("executable"),
new_server_log_path,
@ -102,6 +117,8 @@ class ApiServersServerActionHandler(BaseApiHandler):
server_data.get("type"),
)
self.controller.management.add_backup_config(backup_data)
self.controller.management.add_to_audit_log(
user_id,
f"is cloning server {server_id} named {server_data.get('server_name')}",

View File

@ -11,7 +11,7 @@ from app.classes.shared.helpers import Helpers
logger = logging.getLogger(__name__)
backup_schema = {
BACKUP_SCHEMA = {
"type": "object",
"properties": {
"filename": {"type": "string", "minLength": 5},
@ -19,36 +19,157 @@ backup_schema = {
"additionalProperties": False,
"minProperties": 1,
}
BACKUP_PATCH_SCHEMA = {
"type": "object",
"properties": {
"backup_name": {"type": "string", "minLength": 3},
"backup_location": {"type": "string", "minLength": 1},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
"before": {"type": "string"},
"after": {"type": "string"},
"excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
}
BASIC_BACKUP_PATCH_SCHEMA = {
"type": "object",
"properties": {
"backup_name": {"type": "string", "minLength": 3},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
"before": {"type": "string"},
"after": {"type": "string"},
"excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
}
ID_MISMATCH = "Server ID backup server ID different"
GENERAL_AUTH_ERROR = "Authorization Error"
class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
def get(self, server_id: str):
def get(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
backup_conf = self.controller.management.get_backup_config(backup_id)
if not auth_data:
return
if (
EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
if backup_conf["server_id"]["server_id"] != server_id:
return self.finish_json(
400,
{
"status": "error",
"error": "ID_MISMATCH",
"error_data": ID_MISMATCH,
},
)
):
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(200, self.controller.management.get_backup_config(server_id))
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
self.finish_json(200, backup_conf)
def delete(self, server_id: str):
def delete(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
backup_conf = self.controller.management.get_backup_config(server_id)
backup_conf = self.controller.management.get_backup_config(backup_id)
if backup_conf["server_id"]["server_id"] != server_id:
return self.finish_json(
400,
{
"status": "error",
"error": "ID_MISMATCH",
"error_data": ID_MISMATCH,
},
)
if not auth_data:
return
if (
EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
self.controller.management.add_to_audit_log(
auth_data[4]["user_id"],
f"Edited server {server_id}: removed backup config"
f" {backup_conf['backup_name']}",
server_id,
self.get_remote_ip(),
)
if backup_conf["default"]:
return self.finish_json(
405,
{
"status": "error",
"error": "NOT_ALLOWED",
"error_data": "Cannot delete default backup",
},
)
self.controller.management.delete_backup_config(backup_id)
return self.finish_json(200, {"status": "ok"})
def post(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
backup_config = self.controller.management.get_backup_config(backup_id)
if backup_config["server_id"]["server_id"] != server_id:
return self.finish_json(
400,
{
"status": "error",
"error": "ID_MISMATCH",
"error_data": ID_MISMATCH,
},
)
try:
data = json.loads(self.request.body)
@ -57,7 +178,7 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
validate(data, backup_schema)
validate(data, BACKUP_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
@ -68,9 +189,246 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
},
)
svr_obj = self.controller.servers.get_server_obj(server_id)
server_data = self.controller.servers.get_server_data_by_id(server_id)
zip_name = data["filename"]
# import the server again based on zipfile
backup_config = self.controller.management.get_backup_config(backup_id)
backup_location = os.path.join(
backup_config["backup_location"], backup_config["backup_id"]
)
if Helpers.validate_traversal(backup_location, zip_name):
try:
temp_dir = Helpers.unzip_backup_archive(backup_location, zip_name)
except (FileNotFoundError, NotADirectoryError) as e:
return self.finish_json(
400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
)
if server_data["type"] == "minecraft-java":
new_server = self.controller.restore_java_zip_server(
svr_obj.server_name,
temp_dir,
server_data["executable"],
"1",
"2",
server_data["server_port"],
server_data["created_by"],
)
elif server_data["type"] == "minecraft-bedrock":
new_server = self.controller.restore_bedrock_zip_server(
svr_obj.server_name,
temp_dir,
server_data["executable"],
server_data["server_port"],
server_data["created_by"],
)
new_server_id = new_server
new_server = self.controller.servers.get_server_data(new_server)
self.controller.rename_backup_dir(
server_id,
new_server_id,
new_server["server_id"],
)
# preserve current schedules
for schedule in self.controller.management.get_schedules_by_server(
server_id
):
job_data = self.controller.management.get_scheduled_task(
schedule.schedule_id
)
job_data["server_id"] = new_server_id
del job_data["schedule_id"]
self.tasks_manager.update_job(schedule.schedule_id, job_data)
# preserve execution command
new_server_obj = self.controller.servers.get_server_obj(new_server_id)
new_server_obj.execution_command = server_data["execution_command"]
# reset executable path
if svr_obj.path in svr_obj.executable:
new_server_obj.executable = str(svr_obj.executable).replace(
svr_obj.path, new_server_obj.path
)
# reset run command path
if svr_obj.path in svr_obj.execution_command:
new_server_obj.execution_command = str(
svr_obj.execution_command
).replace(svr_obj.path, new_server_obj.path)
# reset log path
if svr_obj.path in svr_obj.log_path:
new_server_obj.log_path = str(svr_obj.log_path).replace(
svr_obj.path, new_server_obj.path
)
self.controller.servers.update_server(new_server_obj)
# preserve backup config
server_backups = self.controller.management.get_backups_by_server(server_id)
for backup in server_backups:
old_backup_id = server_backups[backup]["backup_id"]
del server_backups[backup]["backup_id"]
server_backups[backup]["server_id"] = new_server_id
if str(server_id) in (server_backups[backup]["backup_location"]):
server_backups[backup]["backup_location"] = str(
server_backups[backup]["backup_location"]
).replace(str(server_id), str(new_server_id))
new_backup_id = self.controller.management.add_backup_config(
server_backups[backup]
)
os.listdir(server_backups[backup]["backup_location"])
FileHelpers.move_dir(
os.path.join(
server_backups[backup]["backup_location"], old_backup_id
),
os.path.join(
server_backups[backup]["backup_location"], new_backup_id
),
)
# remove old server's tasks
try:
self.tasks_manager.remove_all_server_tasks(server_id)
except JobLookupError as e:
logger.info("No active tasks found for server: {e}")
self.controller.remove_server(server_id, True)
self.controller.management.add_to_audit_log(
auth_data[4]["user_id"],
f"Restored server {server_id} backup {data['filename']}",
server_id,
self.get_remote_ip(),
)
return self.finish_json(200, {"status": "ok"})
def patch(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
try:
data = json.loads(self.request.body)
except json.decoder.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
if auth_data[4]["superuser"]:
validate(data, BACKUP_PATCH_SCHEMA)
else:
validate(data, BASIC_BACKUP_PATCH_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
"error_data": str(e),
},
)
backup_conf = self.controller.management.get_backup_config(backup_id)
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
if backup_conf["server_id"]["server_id"] != server_id:
return self.finish_json(
400,
{
"status": "error",
"error": "ID_MISMATCH",
"error_data": ID_MISMATCH,
},
)
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
self.controller.management.update_backup_config(backup_id, data)
return self.finish_json(200, {"status": "ok"})
class ApiServersServerBackupsBackupFilesIndexHandler(BaseApiHandler):
def delete(self, server_id: str, backup_id: str):
auth_data = self.authenticate_user()
backup_conf = self.controller.management.get_backup_config(backup_id)
if backup_conf["server_id"]["server_id"] != server_id:
return self.finish_json(
400,
{
"status": "error",
"error": "ID_MISMATCH",
"error_data": ID_MISMATCH,
},
)
if not auth_data:
return
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(
400,
{
"status": "error",
"error": "NOT_AUTHORIZED",
"error_data": GENERAL_AUTH_ERROR,
},
)
try:
data = json.loads(self.request.body)
except json.decoder.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
validate(data, BACKUP_SCHEMA)
except ValidationError as e:
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
"error_data": str(e),
},
)
self.helper.validate_traversal(
os.path.join(backup_conf["backup_location"], backup_conf["backup_id"]),
os.path.join(
backup_conf["backup_location"],
backup_conf["backup_id"],
data["filename"],
),
)
try:
FileHelpers.del_file(
os.path.join(backup_conf["backup_path"], data["filename"])
os.path.join(
backup_conf["backup_location"],
backup_conf["backup_id"],
data["filename"],
)
)
except Exception as e:
return self.finish_json(
@ -84,134 +442,3 @@ class ApiServersServerBackupsBackupIndexHandler(BaseApiHandler):
)
return self.finish_json(200, {"status": "ok"})
def post(self, server_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
auth_data[4]["user_id"], server_id
)
):
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
data = json.loads(self.request.body)
except json.decoder.JSONDecodeError as e:
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
validate(data, backup_schema)
except ValidationError as e:
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
"error_data": str(e),
},
)
try:
svr_obj = self.controller.servers.get_server_obj(server_id)
server_data = self.controller.servers.get_server_data_by_id(server_id)
zip_name = data["filename"]
# import the server again based on zipfile
backup_path = svr_obj.backup_path
if Helpers.validate_traversal(backup_path, zip_name):
temp_dir = Helpers.unzip_backup_archive(backup_path, zip_name)
if server_data["type"] == "minecraft-java":
new_server = self.controller.restore_java_zip_server(
svr_obj.server_name,
temp_dir,
server_data["executable"],
"1",
"2",
server_data["server_port"],
server_data["created_by"],
)
elif server_data["type"] == "minecraft-bedrock":
new_server = self.controller.restore_bedrock_zip_server(
svr_obj.server_name,
temp_dir,
server_data["executable"],
server_data["server_port"],
server_data["created_by"],
)
new_server_id = new_server
new_server = self.controller.servers.get_server_data(new_server)
self.controller.rename_backup_dir(
server_id, new_server_id, new_server["server_id"]
)
# preserve current schedules
for schedule in self.controller.management.get_schedules_by_server(
server_id
):
job_data = self.controller.management.get_scheduled_task(
schedule.schedule_id
)
job_data["server_id"] = new_server_id
del job_data["schedule_id"]
self.tasks_manager.update_job(schedule.schedule_id, job_data)
# preserve execution command
new_server_obj = self.controller.servers.get_server_obj(new_server_id)
new_server_obj.execution_command = server_data["execution_command"]
# reset executable path
if svr_obj.path in svr_obj.executable:
new_server_obj.executable = str(svr_obj.executable).replace(
svr_obj.path, new_server_obj.path
)
# reset run command path
if svr_obj.path in svr_obj.execution_command:
new_server_obj.execution_command = str(
svr_obj.execution_command
).replace(svr_obj.path, new_server_obj.path)
# reset log path
if svr_obj.path in svr_obj.log_path:
new_server_obj.log_path = str(svr_obj.log_path).replace(
svr_obj.path, new_server_obj.path
)
self.controller.servers.update_server(new_server_obj)
# preserve backup config
backup_config = self.controller.management.get_backup_config(server_id)
excluded_dirs = []
server_obj = self.controller.servers.get_server_obj(server_id)
loop_backup_path = self.helper.wtol_path(server_obj.path)
for item in self.controller.management.get_excluded_backup_dirs(
server_id
):
item_path = self.helper.wtol_path(item)
bu_path = os.path.relpath(item_path, loop_backup_path)
bu_path = os.path.join(new_server_obj.path, bu_path)
excluded_dirs.append(bu_path)
self.controller.management.set_backup_config(
new_server_id,
new_server_obj.backup_path,
backup_config["max_backups"],
excluded_dirs,
backup_config["compress"],
backup_config["shutdown"],
)
# remove old server's tasks
try:
self.tasks_manager.remove_all_server_tasks(server_id)
except JobLookupError as e:
logger.info("No active tasks found for server: {e}")
self.controller.remove_server(server_id, True)
except (FileNotFoundError, NotADirectoryError) as e:
return self.finish_json(
400, {"status": "error", "error": f"NO BACKUP FOUND {e}"}
)
self.controller.management.add_to_audit_log(
auth_data[4]["user_id"],
f"Restored server {server_id} backup {data['filename']}",
server_id,
self.get_remote_ip(),
)
return self.finish_json(200, {"status": "ok"})

View File

@ -1,3 +1,4 @@
import os
import logging
import json
from jsonschema import validate
@ -10,13 +11,14 @@ logger = logging.getLogger(__name__)
backup_patch_schema = {
"type": "object",
"properties": {
"backup_path": {"type": "string", "minLength": 1},
"backup_name": {"type": "string", "minLength": 3},
"backup_location": {"type": "string", "minLength": 1},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
"backup_before": {"type": "string"},
"backup_after": {"type": "string"},
"exclusions": {"type": "array"},
"before": {"type": "string"},
"after": {"type": "string"},
"excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@ -25,12 +27,13 @@ backup_patch_schema = {
basic_backup_patch_schema = {
"type": "object",
"properties": {
"backup_name": {"type": "string", "minLength": 3},
"max_backups": {"type": "integer"},
"compress": {"type": "boolean"},
"shutdown": {"type": "boolean"},
"backup_before": {"type": "string"},
"backup_after": {"type": "string"},
"exclusions": {"type": "array"},
"before": {"type": "string"},
"after": {"type": "string"},
"excluded_dirs": {"type": "array"},
},
"additionalProperties": False,
"minProperties": 1,
@ -42,17 +45,21 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(200, self.controller.management.get_backup_config(server_id))
self.finish_json(
200, self.controller.management.get_backups_by_server(server_id)
)
def patch(self, server_id: str):
def post(self, server_id: str):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -78,46 +85,25 @@ class ApiServersServerBackupsIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.BACKUP not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.controller.management.set_backup_config(
server_id,
data.get(
"backup_path",
self.controller.management.get_backup_config(server_id)["backup_path"],
),
data.get(
"max_backups",
self.controller.management.get_backup_config(server_id)["max_backups"],
),
data.get("exclusions"),
data.get(
"compress",
self.controller.management.get_backup_config(server_id)["compress"],
),
data.get(
"shutdown",
self.controller.management.get_backup_config(server_id)["shutdown"],
),
data.get(
"backup_before",
self.controller.management.get_backup_config(server_id)["before"],
),
data.get(
"backup_after",
self.controller.management.get_backup_config(server_id)["after"],
),
)
# Set the backup location automatically for non-super users. We should probably
# make the default location configurable for SU eventually
if not auth_data[4]["superuser"]:
data["backup_location"] = os.path.join(self.helper.backup_path, server_id)
data["server_id"] = server_id
if not data.get("excluded_dirs", None):
data["excluded_dirs"] = []
self.controller.management.add_backup_config(data)
return self.finish_json(200, {"status": "ok"})

View File

@ -72,7 +72,7 @@ file_delete_schema = {
class ApiServersServerFilesIndexHandler(BaseApiHandler):
def post(self, server_id: str):
def post(self, server_id: str, backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -80,16 +80,16 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
auth_data[4]["user_id"], server_id
)
and EnumPermissionsServer.BACKUP
not in self.controller.server_perms.get_user_id_permissions_list(
auth_data[4]["user_id"], server_id
)
EnumPermissionsServer.FILES not in server_permissions
and EnumPermissionsServer.BACKUP not in server_permissions
):
# if the user doesn't have Files or Backup permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -149,21 +149,35 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
filename = html.escape(raw_filename)
rel = os.path.join(folder, raw_filename)
dpath = os.path.join(folder, filename)
if str(dpath) in self.controller.management.get_excluded_backup_dirs(
server_id
):
if os.path.isdir(rel):
return_json[filename] = {
"path": dpath,
"dir": True,
"excluded": True,
}
if backup_id:
if str(
dpath
) in self.controller.management.get_excluded_backup_dirs(backup_id):
if os.path.isdir(rel):
return_json[filename] = {
"path": dpath,
"dir": True,
"excluded": True,
}
else:
return_json[filename] = {
"path": dpath,
"dir": False,
"excluded": True,
}
else:
return_json[filename] = {
"path": dpath,
"dir": False,
"excluded": True,
}
if os.path.isdir(rel):
return_json[filename] = {
"path": dpath,
"dir": True,
"excluded": False,
}
else:
return_json[filename] = {
"path": dpath,
"dir": False,
"excluded": False,
}
else:
if os.path.isdir(rel):
return_json[filename] = {
@ -189,7 +203,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
)
self.finish_json(200, {"status": "ok", "data": file_contents})
def delete(self, server_id: str):
def delete(self, server_id: str, _backup_id=None):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -197,13 +211,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -246,7 +261,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
return self.finish_json(200, {"status": "ok"})
return self.finish_json(500, {"status": "error", "error": str(proc)})
def patch(self, server_id: str):
def patch(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -254,13 +269,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -299,7 +315,7 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
file_object.write(file_contents)
return self.finish_json(200, {"status": "ok"})
def put(self, server_id: str):
def put(self, server_id: str, _backup_id):
auth_data = self.authenticate_user()
if not auth_data:
return
@ -307,13 +323,14 @@ class ApiServersServerFilesIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -373,13 +390,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -438,13 +456,14 @@ class ApiServersServerFilesCreateHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:
@ -504,13 +523,14 @@ class ApiServersServerFilesZipHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.FILES
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.FILES not in server_permissions:
# if the user doesn't have Files permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
try:

View File

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
server_patch_schema = {
"type": "object",
"properties": {
"server_name": {"type": "string", "minLength": 1},
"server_name": {"type": "string", "minLength": 2, "pattern": "^[^/\\\\]*$"},
"backup_path": {"type": "string"},
"executable": {"type": "string"},
"log_path": {"type": "string", "minLength": 1},
@ -102,13 +102,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Config permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -154,13 +155,14 @@ class ApiServersServerIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Config permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})

View File

@ -30,13 +30,14 @@ class ApiServersServerLogsHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.LOGS
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.LOGS not in server_permissions:
# if the user doesn't have Logs permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})

View File

@ -16,13 +16,14 @@ class ApiServersServerStdinHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.COMMANDS
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.COMMANDS not in server_permissions:
# if the user doesn't have Commands permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})

View File

@ -21,6 +21,9 @@ new_task_schema = {
"action": {
"type": "string",
},
"action_id": {
"type": "string",
},
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@ -78,13 +81,14 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.SCHEDULE
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
data["server_id"] = server_id
@ -109,6 +113,18 @@ class ApiServersServerTasksIndexHandler(BaseApiHandler):
)
if "parent" not in data:
data["parent"] = None
if data.get("action_id"):
backup_config = self.controller.management.get_backup_config(
data["action_id"]
)
if backup_config["server_id"]["server_id"] != server_id:
return self.finish_json(
405,
{
"status": "error",
"error": "Server ID Mismatch",
},
)
task_id = self.tasks_manager.schedule_job(data)
self.controller.management.add_to_audit_log(

View File

@ -22,6 +22,9 @@ task_patch_schema = {
"action": {
"type": "string",
},
"action_id": {
"type": "string",
},
"interval": {"type": "integer"},
"interval_type": {
"type": "string",
@ -54,12 +57,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.SCHEDULE
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(200, self.controller.management.get_scheduled_task(task_id))
@ -68,12 +73,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.SCHEDULE
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -120,13 +127,14 @@ class ApiServersServerTasksTaskIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.SCHEDULE
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.SCHEDULE not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})

View File

@ -38,12 +38,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
self.finish_json(
@ -81,13 +83,14 @@ class ApiServersServerWebhooksIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
data["server_id"] = server_id

View File

@ -39,12 +39,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
@ -66,12 +68,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
auth_data = self.authenticate_user()
if not auth_data:
return
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -117,13 +121,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
@ -159,13 +164,14 @@ class ApiServersServerWebhooksManagementIndexHandler(BaseApiHandler):
if server_id not in [str(x["server_id"]) for x in auth_data[0]]:
# if the user doesn't have access to the server, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
if (
EnumPermissionsServer.CONFIG
not in self.controller.server_perms.get_user_id_permissions_list(
mask = self.controller.server_perms.get_lowest_api_perm_mask(
self.controller.server_perms.get_user_permissions_mask(
auth_data[4]["user_id"], server_id
)
):
),
auth_data[5],
)
server_permissions = self.controller.server_perms.get_permissions(mask)
if EnumPermissionsServer.CONFIG not in server_permissions:
# if the user doesn't have Schedule permission, return an error
return self.finish_json(400, {"status": "error", "error": "NOT_AUTHORIZED"})
webhook = self.controller.management.get_webhook_by_id(webhook_id)

View File

@ -2,6 +2,7 @@ import logging
import json
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from app.classes.shared.translation import Translation
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.roles import Roles, HelperRoles
from app.classes.models.users import PUBLIC_USER_ATTRS
@ -21,6 +22,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
_,
_,
user,
_,
) = auth_data
# GET /api/v2/users?ids=true
@ -53,6 +55,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
)
def post(self):
self.translator = Translation(self.helper)
new_user_schema = {
"type": "object",
"properties": {
@ -70,6 +73,7 @@ class ApiUsersIndexHandler(BaseApiHandler):
_,
superuser,
user,
_,
) = auth_data
if EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions:
@ -85,12 +89,17 @@ class ApiUsersIndexHandler(BaseApiHandler):
try:
validate(data, new_user_schema)
except ValidationError as e:
err = self.translator.translate(
"validators",
e.schema["error"],
self.controller.users.get_user_lang_by_id(auth_data[4]["user_id"]),
)
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID_JSON_SCHEMA",
"error_data": str(e),
"error_data": f"{str(err)}",
},
)
username = data["username"]
@ -149,11 +158,16 @@ class ApiUsersIndexHandler(BaseApiHandler):
400, {"status": "error", "error": "INVALID_SUPERUSER_CREATE"}
)
if len(roles) != 0 and not superuser:
# HACK: This should check if the user has the roles or something
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
)
for role in roles:
role = self.controller.roles.get_role(role)
if (
str(role.get("manager", "no manager found"))
!= str(auth_data[4]["user_id"])
and not superuser
):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_CREATE"}
)
# TODO: do this in the most efficient way
user_id = self.controller.users.add_user(

View File

@ -75,7 +75,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
"name": key.name,
"server_permissions": key.server_permissions,
"crafty_permissions": key.crafty_permissions,
"superuser": key.superuser,
"full_access": key.full_access,
}
)
self.finish_json(
@ -99,7 +99,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
"type": "string",
"pattern": "^[01]{3}$", # 8 bits, see EnumPermissionsCrafty
},
"superuser": {"type": "boolean"},
"full_access": {"type": "boolean"},
},
"additionalProperties": False,
"minProperties": 1,
@ -113,6 +113,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
_,
_superuser,
user,
_,
) = auth_data
try:
@ -163,7 +164,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
key_id = self.controller.users.add_user_api_key(
data["name"],
user_id,
data["superuser"],
data["full_access"],
data["server_permissions_mask"],
data["crafty_permissions_mask"],
)
@ -188,6 +189,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
_,
_,
_user,
_,
) = auth_data
if key_id:
key = self.controller.users.get_user_api_key(key_id)
@ -215,7 +217,7 @@ class ApiUsersUserKeyHandler(BaseApiHandler):
)
if (
target_key.user_id != auth_data[4]["user_id"]
str(target_key.user_id) != str(auth_data[4]["user_id"])
and not auth_data[4]["superuser"]
):
return self.finish_json(

View File

@ -24,6 +24,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
_,
user,
_,
) = auth_data
if user_id in ["@me", user["user_id"]]:
@ -72,6 +73,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
_,
user,
_,
) = auth_data
if (user_id in ["@me", user["user_id"]]) and self.helper.get_setting(
@ -121,6 +123,7 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
_,
superuser,
user,
_,
) = auth_data
try:
@ -129,7 +132,6 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_JSON", "error_data": str(e)}
)
try:
validate(data, user_patch_schema)
except ValidationError as e:
@ -141,10 +143,8 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
"error_data": str(e),
},
)
if user_id == "@me":
user_id = user["user_id"]
if (
EnumPermissionsCrafty.USER_CONFIG not in exec_user_crafty_permissions
and str(user["user_id"]) != str(user_id)
@ -212,6 +212,25 @@ class ApiUsersUserIndexHandler(BaseApiHandler):
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
)
user_modify = self.controller.users.get_user_roles_id(user_id)
for role in data["roles"]:
# Check if user is not a super user and that the exec user is the role
# manager or that the role already exists in the user's list
if not superuser and (
str(
self.controller.roles.get_role(role).get(
"manager", "no manager found"
)
)
!= str(auth_data[4]["user_id"])
and role not in user_modify
):
for item in user_modify:
print(type(role), type(item))
return self.finish_json(
400, {"status": "error", "error": "INVALID_ROLES_MODIFY"}
)
user_obj = HelperUsers.get_user_model(user_id)
if "password" in data and str(user["user_id"]) != str(user_id):

View File

@ -27,6 +27,7 @@ class ApiUsersUserPermissionsHandler(BaseApiHandler):
_,
_,
user,
_,
) = auth_data
if user_id in ["@me", user["user_id"]]:

View File

@ -17,6 +17,7 @@ class ApiUsersUserPublicHandler(BaseApiHandler):
_,
_,
user,
_,
) = auth_data
if user_id == "@me":

View File

@ -30,7 +30,7 @@ class ServerHandler(BaseHandler):
) = self.current_user
superuser = exec_user["superuser"]
if api_key is not None:
superuser = superuser and api_key.superuser
superuser = superuser and api_key.full_access
if superuser:
defined_servers = self.controller.servers.list_defined_servers()
@ -124,7 +124,7 @@ class ServerHandler(BaseHandler):
"created": api_key.created,
"server_permissions": api_key.server_permissions,
"crafty_permissions": api_key.crafty_permissions,
"superuser": api_key.superuser,
"full_access": api_key.full_access,
}
if api_key is not None
else None
@ -146,12 +146,12 @@ class ServerHandler(BaseHandler):
return
page_data["server_api"] = False
if page_data["online"]:
page_data["server_api"] = self.helper.check_address_status(
"https://api.serverjars.com"
page_data["server_api"] = (
self.controller.big_bucket._check_bucket_alive()
)
page_data["server_types"] = self.controller.server_jars.get_serverjar_data()
page_data["server_types"] = self.controller.big_bucket.get_bucket_data()
page_data["js_server_types"] = json.dumps(
self.controller.server_jars.get_serverjar_data()
self.controller.big_bucket.get_bucket_data()
)
if page_data["server_types"] is None:
page_data["server_types"] = []

View File

@ -24,7 +24,6 @@ from app.classes.web.routes.metrics.metrics_handlers import metrics_handlers
from app.classes.web.server_handler import ServerHandler
from app.classes.web.websocket_handler import WebSocketHandler
from app.classes.web.static_handler import CustomStaticHandler
from app.classes.web.upload_handler import UploadHandler
from app.classes.web.status_handler import StatusHandler
@ -142,7 +141,6 @@ class Webserver:
(r"/panel/(.*)", PanelHandler, handler_args),
(r"/server/(.*)", ServerHandler, handler_args),
(r"/ws", WebSocketHandler, handler_args),
(r"/upload", UploadHandler, handler_args),
(r"/status", StatusHandler, handler_args),
# API Routes V2
*api_handlers(handler_args),

View File

@ -1,331 +0,0 @@
import logging
import os
import time
import urllib.parse
import tornado.web
import tornado.options
import tornado.httpserver
from app.classes.models.crafty_permissions import EnumPermissionsCrafty
from app.classes.models.server_permissions import EnumPermissionsServer
from app.classes.shared.console import Console
from app.classes.shared.helpers import Helpers
from app.classes.shared.main_controller import Controller
from app.classes.web.base_handler import BaseHandler
from app.classes.shared.websocket_manager import WebSocketManager
logger = logging.getLogger(__name__)
@tornado.web.stream_request_body
class UploadHandler(BaseHandler):
# noinspection PyAttributeOutsideInit
def initialize(
self,
helper: Helpers = None,
controller: Controller = None,
tasks_manager=None,
translator=None,
file_helper=None,
):
self.helper = helper
self.controller = controller
self.tasks_manager = tasks_manager
self.translator = translator
self.file_helper = file_helper
def prepare(self):
# Class & Function Defination
api_key, _token_data, exec_user = self.current_user
self.upload_type = str(self.request.headers.get("X-Content-Upload-Type"))
if self.upload_type == "server_import":
superuser = exec_user["superuser"]
if api_key is not None:
superuser = superuser and api_key.superuser
user_id = exec_user["user_id"]
stream_size_value = self.helper.get_setting("stream_size_GB")
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
self.content_len = int(self.request.headers.get("Content-Length"))
if self.content_len > max_streamed_size:
logger.error(
f"User with ID {user_id} attempted to upload a file that"
f" exceeded the max body size."
)
return self.finish_json(
413,
{
"status": "error",
"error": "TOO LARGE",
"info": self.helper.translation.translate(
"error",
"fileTooLarge",
self.controller.users.get_user_lang_by_id(user_id),
),
},
)
self.do_upload = True
if superuser:
exec_user_server_permissions = (
self.controller.server_perms.list_defined_permissions()
)
elif api_key is not None:
exec_user_server_permissions = (
self.controller.crafty_perms.get_api_key_permissions_list(api_key)
)
else:
exec_user_server_permissions = (
self.controller.crafty_perms.get_crafty_permissions_list(
exec_user["user_id"]
)
)
if user_id is None:
logger.warning("User ID not found in upload handler call")
Console.warning("User ID not found in upload handler call")
self.do_upload = False
if (
EnumPermissionsCrafty.SERVER_CREATION
not in exec_user_server_permissions
and not exec_user["superuser"]
):
logger.warning(
f"User {user_id} tried to upload a server" " without permissions!"
)
Console.warning(
f"User {user_id} tried to upload a server" " without permissions!"
)
self.do_upload = False
path = os.path.join(self.controller.project_root, "import", "upload")
self.helper.ensure_dir_exists(path)
# Delete existing files
if len(os.listdir(path)) > 0:
for item in os.listdir():
try:
os.remove(os.path.join(path, item))
except:
logger.debug("Could not delete file on user server upload")
self.helper.ensure_dir_exists(path)
filename = urllib.parse.unquote(
self.request.headers.get("X-FileName", None)
)
if not str(filename).endswith(".zip"):
WebSocketManager().broadcast("close_upload_box", "error")
self.finish("error")
full_path = os.path.join(path, filename)
if self.do_upload:
try:
self.f = open(full_path, "wb")
except Exception as e:
logger.error(f"Upload failed with error: {e}")
self.do_upload = False
# If max_body_size is not set, you cannot upload files > 100MB
self.request.connection.set_max_body_size(max_streamed_size)
elif self.upload_type == "background":
superuser = exec_user["superuser"]
if api_key is not None:
superuser = superuser and api_key.superuser
user_id = exec_user["user_id"]
stream_size_value = self.helper.get_setting("stream_size_GB")
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
self.content_len = int(self.request.headers.get("Content-Length"))
if self.content_len > max_streamed_size:
logger.error(
f"User with ID {user_id} attempted to upload a file that"
f" exceeded the max body size."
)
return self.finish_json(
413,
{
"status": "error",
"error": "TOO LARGE",
"info": self.helper.translation.translate(
"error",
"fileTooLarge",
self.controller.users.get_user_lang_by_id(user_id),
),
},
)
self.do_upload = True
if not superuser:
return self.finish_json(
401,
{
"status": "error",
"error": "UNAUTHORIZED ACCESS",
"info": self.helper.translation.translate(
"error",
"superError",
self.controller.users.get_user_lang_by_id(user_id),
),
},
)
if not self.request.headers.get("X-Content-Type", None).startswith(
"image/"
):
return self.finish_json(
415,
{
"status": "error",
"error": "TYPE ERROR",
"info": self.helper.translation.translate(
"error",
"fileError",
self.controller.users.get_user_lang_by_id(user_id),
),
},
)
if user_id is None:
logger.warning("User ID not found in upload handler call")
Console.warning("User ID not found in upload handler call")
self.do_upload = False
path = os.path.join(
self.controller.project_root,
"app/frontend/static/assets/images/auth/custom",
)
filename = self.request.headers.get("X-FileName", None)
full_path = os.path.join(path, filename)
if self.do_upload:
try:
self.f = open(full_path, "wb")
except Exception as e:
logger.error(f"Upload failed with error: {e}")
self.do_upload = False
# If max_body_size is not set, you cannot upload files > 100MB
self.request.connection.set_max_body_size(max_streamed_size)
else:
server_id = self.get_argument("server_id", None)
superuser = exec_user["superuser"]
if api_key is not None:
superuser = superuser and api_key.superuser
user_id = exec_user["user_id"]
stream_size_value = self.helper.get_setting("stream_size_GB")
max_streamed_size = (1024 * 1024 * 1024) * stream_size_value
self.content_len = int(self.request.headers.get("Content-Length"))
if self.content_len > max_streamed_size:
logger.error(
f"User with ID {user_id} attempted to upload a file that"
f" exceeded the max body size."
)
return self.finish_json(
413,
{
"status": "error",
"error": "TOO LARGE",
"info": self.helper.translation.translate(
"error",
"fileTooLarge",
self.controller.users.get_user_lang_by_id(user_id),
),
},
)
self.do_upload = True
if superuser:
exec_user_server_permissions = (
self.controller.server_perms.list_defined_permissions()
)
elif api_key is not None:
exec_user_server_permissions = (
self.controller.server_perms.get_api_key_permissions_list(
api_key, server_id
)
)
else:
exec_user_server_permissions = (
self.controller.server_perms.get_user_id_permissions_list(
exec_user["user_id"], server_id
)
)
server_id = self.request.headers.get("X-ServerId", None)
if server_id is None:
logger.warning("Server ID not found in upload handler call")
Console.warning("Server ID not found in upload handler call")
self.do_upload = False
if user_id is None:
logger.warning("User ID not found in upload handler call")
Console.warning("User ID not found in upload handler call")
self.do_upload = False
if EnumPermissionsServer.FILES not in exec_user_server_permissions:
logger.warning(
f"User {user_id} tried to upload a file to "
f"{server_id} without permissions!"
)
Console.warning(
f"User {user_id} tried to upload a file to "
f"{server_id} without permissions!"
)
self.do_upload = False
path = self.request.headers.get("X-Path", None)
filename = self.request.headers.get("X-FileName", None)
full_path = os.path.join(path, filename)
if not self.helper.is_subdir(
full_path,
Helpers.get_os_understandable_path(
self.controller.servers.get_server_data_by_id(server_id)["path"]
),
):
logger.warning(
f"User {user_id} tried to upload a file to {server_id} "
f"but the path is not inside of the server!"
)
Console.warning(
f"User {user_id} tried to upload a file to {server_id} "
f"but the path is not inside of the server!"
)
self.do_upload = False
if self.do_upload:
try:
self.f = open(full_path, "wb")
except Exception as e:
logger.error(f"Upload failed with error: {e}")
self.do_upload = False
# If max_body_size is not set, you cannot upload files > 100MB
self.request.connection.set_max_body_size(max_streamed_size)
def post(self):
logger.info("Upload completed")
if self.upload_type == "server_files":
files_left = int(self.request.headers.get("X-Files-Left", None))
else:
files_left = 0
if self.do_upload:
time.sleep(5)
if files_left == 0:
WebSocketManager().broadcast("close_upload_box", "success")
self.finish("success") # Nope, I'm sending "success"
self.f.close()
else:
time.sleep(5)
if files_left == 0:
WebSocketManager().broadcast("close_upload_box", "error")
self.finish("error")
def data_received(self, chunk):
if self.do_upload:
self.f.write(chunk)

View File

@ -14,6 +14,9 @@
"auth": {
"format": "%(asctime)s - [AUTH] - %(levelname)s - %(message)s"
},
"audit": {
"()": "app.classes.logging.log_formatter.JsonFormatter"
},
"cmd_queue": {
"format": "%(asctime)s - [CMD_QUEUE] - %(levelname)s - %(message)s"
}
@ -70,6 +73,14 @@
"maxBytes": 10485760,
"backupCount": 20,
"encoding": "utf8"
},
"audit_log_handler": {
"class": "logging.handlers.RotatingFileHandler",
"formatter": "audit",
"filename": "logs/audit.log",
"maxBytes": 10485760,
"backupCount": 20,
"encoding": "utf8"
}
},
"loggers": {
@ -108,6 +119,12 @@
"cmd_queue_file_handler"
],
"propagate": false
},
"audit_log": {
"level": "INFO",
"handlers": [
"audit_log_handler"
]
}
}
}

View File

@ -1,5 +1,5 @@
{
"major": 4,
"minor": 3,
"sub": 2
"minor": 4,
"sub": 3
}

View File

@ -12,6 +12,16 @@ nav.sidebar {
position: fixed;
}
td {
-ms-overflow-style: none;
/* IE and Edge */
scrollbar-width: none;
/* Firefox */
}
td::-webkit-scrollbar {
display: none;
}
@media (min-width: 992px) {
nav.sidebar {
@ -267,4 +277,7 @@ div.warnings div.wssError a:hover {
font-family: 'Sarabun', 'roboto', sans-serif;
}
/**************************************************************/
/**************************************************************/
.hidden-input {
margin-left: -40px;
}

View File

@ -0,0 +1,537 @@
/*!
* Bootstrap-select v1.13.18 (https://developer.snapappointments.com/bootstrap-select)
*
* Copyright 2012-2020 SnapAppointments, LLC
* Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
*/
@-webkit-keyframes bs-notify-fadeOut {
0% {
opacity: .9
}
100% {
opacity: 0
}
}
@-o-keyframes bs-notify-fadeOut {
0% {
opacity: .9
}
100% {
opacity: 0
}
}
@keyframes bs-notify-fadeOut {
0% {
opacity: .9
}
100% {
opacity: 0
}
}
.bootstrap-select>select.bs-select-hidden,
select.bs-select-hidden,
select.selectpicker {
display: none !important
}
.bootstrap-select {
width: 220px;
vertical-align: middle
}
.bootstrap-select>.dropdown-toggle {
position: relative;
width: 100%;
text-align: right;
white-space: nowrap;
display: -webkit-inline-box;
display: -webkit-inline-flex;
display: -ms-inline-flexbox;
display: inline-flex;
-webkit-box-align: center;
-webkit-align-items: center;
-ms-flex-align: center;
align-items: center;
-webkit-box-pack: justify;
-webkit-justify-content: space-between;
-ms-flex-pack: justify;
justify-content: space-between
}
.bootstrap-select>.dropdown-toggle:after {
margin-top: -1px
}
.bootstrap-select>.dropdown-toggle.bs-placeholder,
.bootstrap-select>.dropdown-toggle.bs-placeholder:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder:hover {
color: #999
}
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-danger:hover,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-dark:hover,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-info:hover,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-primary:hover,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-secondary:hover,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:active,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:focus,
.bootstrap-select>.dropdown-toggle.bs-placeholder.btn-success:hover {
color: rgba(255, 255, 255, .5)
}
.bootstrap-select>select {
position: absolute !important;
bottom: 0;
left: 50%;
display: block !important;
width: .5px !important;
height: 100% !important;
padding: 0 !important;
opacity: 0 !important;
border: none;
z-index: 0 !important
}
.bootstrap-select>select.mobile-device {
top: 0;
left: 0;
display: block !important;
width: 100% !important;
z-index: 2 !important
}
.bootstrap-select.is-invalid .dropdown-toggle,
.error .bootstrap-select .dropdown-toggle,
.has-error .bootstrap-select .dropdown-toggle,
.was-validated .bootstrap-select select:invalid+.dropdown-toggle {
border-color: #b94a48
}
.bootstrap-select.is-valid .dropdown-toggle,
.was-validated .bootstrap-select select:valid+.dropdown-toggle {
border-color: #28a745
}
.bootstrap-select.fit-width {
width: auto !important
}
.bootstrap-select:not([class*=col-]):not([class*=form-control]):not(.input-group-btn) {
width: 220px
}
.bootstrap-select .dropdown-toggle:focus,
.bootstrap-select>select.mobile-device:focus+.dropdown-toggle {
outline: thin dotted #333 !important;
outline: 5px auto -webkit-focus-ring-color !important;
outline-offset: -2px
}
.bootstrap-select.form-control {
margin-bottom: 0;
padding: 0;
border: none;
height: auto
}
:not(.input-group)>.bootstrap-select.form-control:not([class*=col-]) {
width: 100%
}
.bootstrap-select.form-control.input-group-btn {
float: none;
z-index: auto
}
.form-inline .bootstrap-select,
.form-inline .bootstrap-select.form-control:not([class*=col-]) {
width: auto
}
.bootstrap-select:not(.input-group-btn),
.bootstrap-select[class*=col-] {
float: none;
display: inline-block;
margin-left: 0
}
.bootstrap-select.dropdown-menu-right,
.bootstrap-select[class*=col-].dropdown-menu-right,
.row .bootstrap-select[class*=col-].dropdown-menu-right {
float: right
}
.form-group .bootstrap-select,
.form-horizontal .bootstrap-select,
.form-inline .bootstrap-select {
margin-bottom: 0
}
.form-group-lg .bootstrap-select.form-control,
.form-group-sm .bootstrap-select.form-control {
padding: 0
}
.form-group-lg .bootstrap-select.form-control .dropdown-toggle,
.form-group-sm .bootstrap-select.form-control .dropdown-toggle {
height: 100%;
font-size: inherit;
line-height: inherit;
border-radius: inherit
}
.bootstrap-select.form-control-lg .dropdown-toggle,
.bootstrap-select.form-control-sm .dropdown-toggle {
font-size: inherit;
line-height: inherit;
border-radius: inherit
}
.bootstrap-select.form-control-sm .dropdown-toggle {
padding: .25rem .5rem
}
.bootstrap-select.form-control-lg .dropdown-toggle {
padding: .5rem 1rem
}
.form-inline .bootstrap-select .form-control {
width: 100%
}
.bootstrap-select.disabled,
.bootstrap-select>.disabled {
cursor: not-allowed
}
.bootstrap-select.disabled:focus,
.bootstrap-select>.disabled:focus {
outline: 0 !important
}
.bootstrap-select.bs-container {
position: absolute;
top: 0;
left: 0;
height: 0 !important;
padding: 0 !important
}
.bootstrap-select.bs-container .dropdown-menu {
z-index: 1060
}
.bootstrap-select .dropdown-toggle .filter-option {
position: static;
top: 0;
left: 0;
float: left;
height: 100%;
width: 100%;
text-align: left;
overflow: hidden;
-webkit-box-flex: 0;
-webkit-flex: 0 1 auto;
-ms-flex: 0 1 auto;
flex: 0 1 auto
}
.bs3.bootstrap-select .dropdown-toggle .filter-option {
padding-right: inherit
}
.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option {
position: absolute;
padding-top: inherit;
padding-bottom: inherit;
padding-left: inherit;
float: none
}
.input-group .bs3-has-addon.bootstrap-select .dropdown-toggle .filter-option .filter-option-inner {
padding-right: inherit
}
.bootstrap-select .dropdown-toggle .filter-option-inner-inner {
overflow: hidden
}
.bootstrap-select .dropdown-toggle .filter-expand {
width: 0 !important;
float: left;
opacity: 0 !important;
overflow: hidden
}
.bootstrap-select .dropdown-toggle .caret {
position: absolute;
top: 50%;
right: 12px;
margin-top: -2px;
vertical-align: middle
}
.input-group .bootstrap-select.form-control .dropdown-toggle {
border-radius: inherit
}
.bootstrap-select[class*=col-] .dropdown-toggle {
width: 100%
}
.bootstrap-select .dropdown-menu {
min-width: 100%;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box
}
.bootstrap-select .dropdown-menu>.inner:focus {
outline: 0 !important
}
.bootstrap-select .dropdown-menu.inner {
position: static;
float: none;
border: 0;
padding: 0;
margin: 0;
border-radius: 0;
-webkit-box-shadow: none;
box-shadow: none
}
.bootstrap-select .dropdown-menu li {
position: relative
}
.bootstrap-select .dropdown-menu li.active small {
color: rgba(255, 255, 255, .5) !important
}
.bootstrap-select .dropdown-menu li.disabled a {
cursor: not-allowed
}
.bootstrap-select .dropdown-menu li a {
cursor: pointer;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none
}
.bootstrap-select .dropdown-menu li a.opt {
position: relative;
padding-left: 2.25em
}
.bootstrap-select .dropdown-menu li a span.check-mark {
display: none
}
.bootstrap-select .dropdown-menu li a span.text {
display: inline-block
}
.bootstrap-select .dropdown-menu li small {
padding-left: .5em
}
.bootstrap-select .dropdown-menu .notify {
position: absolute;
bottom: 5px;
width: 96%;
margin: 0 2%;
min-height: 26px;
padding: 3px 5px;
background: #f5f5f5;
border: 1px solid #e3e3e3;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, .05);
pointer-events: none;
opacity: .9;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box
}
.bootstrap-select .dropdown-menu .notify.fadeOut {
-webkit-animation: .3s linear 750ms forwards bs-notify-fadeOut;
-o-animation: .3s linear 750ms forwards bs-notify-fadeOut;
animation: .3s linear 750ms forwards bs-notify-fadeOut
}
.bootstrap-select .no-results {
padding: 3px;
background: #f5f5f5;
margin: 0 5px;
white-space: nowrap
}
.bootstrap-select.fit-width .dropdown-toggle .filter-option {
position: static;
display: inline;
padding: 0
}
.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner,
.bootstrap-select.fit-width .dropdown-toggle .filter-option-inner-inner {
display: inline
}
.bootstrap-select.fit-width .dropdown-toggle .bs-caret:before {
content: '\00a0'
}
.bootstrap-select.fit-width .dropdown-toggle .caret {
position: static;
top: auto;
margin-top: -1px
}
.bootstrap-select.show-tick .dropdown-menu .selected span.check-mark {
position: absolute;
display: inline-block;
right: 15px;
top: 5px
}
.bootstrap-select.show-tick .dropdown-menu li a span.text {
margin-right: 34px
}
.bootstrap-select .bs-ok-default:after {
content: '';
display: block;
width: .5em;
height: 1em;
border-style: solid;
border-width: 0 .26em .26em 0;
-webkit-transform-style: preserve-3d;
transform-style: preserve-3d;
-webkit-transform: rotate(45deg);
-ms-transform: rotate(45deg);
-o-transform: rotate(45deg);
transform: rotate(45deg)
}
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle,
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle {
z-index: 1061
}
.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:before {
content: '';
border-left: 7px solid transparent;
border-right: 7px solid transparent;
border-bottom: 7px solid rgba(204, 204, 204, .2);
position: absolute;
bottom: -4px;
left: 9px;
display: none
}
.bootstrap-select.show-menu-arrow .dropdown-toggle .filter-option:after {
content: '';
border-left: 6px solid transparent;
border-right: 6px solid transparent;
border-bottom: 6px solid #fff;
position: absolute;
bottom: -4px;
left: 10px;
display: none
}
.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:before {
bottom: auto;
top: -4px;
border-top: 7px solid rgba(204, 204, 204, .2);
border-bottom: 0
}
.bootstrap-select.show-menu-arrow.dropup .dropdown-toggle .filter-option:after {
bottom: auto;
top: -4px;
border-top: 6px solid #fff;
border-bottom: 0
}
.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:before {
right: 12px;
left: auto
}
.bootstrap-select.show-menu-arrow.pull-right .dropdown-toggle .filter-option:after {
right: 13px;
left: auto
}
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:after,
.bootstrap-select.show-menu-arrow.open>.dropdown-toggle .filter-option:before,
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:after,
.bootstrap-select.show-menu-arrow.show>.dropdown-toggle .filter-option:before {
display: block
}
.bs-actionsbox,
.bs-donebutton,
.bs-searchbox {
padding: 4px 8px
}
.bs-actionsbox {
width: 100%;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box
}
.bs-actionsbox .btn-group button {
width: 50%
}
.bs-donebutton {
float: left;
width: 100%;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box
}
.bs-donebutton .btn-group button {
width: 100%
}
.bs-searchbox+.bs-actionsbox {
padding: 0 8px 4px
}
.bs-searchbox .form-control {
margin-bottom: 0;
width: 100%;
float: none
}

View File

@ -1,120 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 26.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 683.6 143.8" style="enable-background:new 0 0 683.6 143.8;" xml:space="preserve">
<style type="text/css">
.st0{opacity:0.85;fill:#FFFFFF;enable-background:new ;}
.st1{opacity:0.85;}
.st2{fill:#FFFFFF;}
.st3{fill:none;}
.st4{fill:url(#SVGID_1_);}
.st5{fill:url(#SVGID_00000137122815686618769650000009047437546445953421_);}
.st6{fill:url(#SVGID_00000170963539203169094570000007184871682409824703_);}
.st7{fill:url(#SVGID_00000169549353698428389090000007910489870824235905_);}
.st8{fill-rule:evenodd;clip-rule:evenodd;fill:url(#SVGID_00000029754379306852418700000008865188217784465572_);}
</style>
<path class="st0" d="M175.8,111.5h17.6v3.8h-13.2v8.9h12.1v3.7h-12.1v11.8h-4.4V111.5z"/>
<path class="st0" d="M196.3,119.1h4.2v3.5h0.1c0.4-2.3,2.4-3.9,4.7-3.9c0.5,0,1,0.1,1.5,0.2v3.9c-0.6-0.2-1.3-0.3-1.9-0.2
c-2.7,0-4.4,1.8-4.4,4.8v12.3h-4.2L196.3,119.1z"/>
<path class="st0" d="M207.2,129.4L207.2,129.4c0-6.6,3.9-10.6,9.7-10.6s9.8,4,9.8,10.6l0,0c0,6.6-3.9,10.7-9.8,10.7
S207.2,136,207.2,129.4z M222.4,129.4L222.4,129.4c0-4.5-2.2-7.1-5.5-7.1s-5.4,2.6-5.4,7.1l0,0c0,4.5,2.2,7.2,5.5,7.2
S222.4,133.9,222.4,129.4L222.4,129.4z"/>
<path class="st0" d="M229.6,119.1h4.2v3.2h0.1c1-2.3,3.2-3.7,5.7-3.6c2.6-0.2,5,1.5,5.7,4h0.1c1.1-2.5,3.6-4.1,6.4-4
c4.1,0,6.7,2.7,6.7,6.8v14.1h-4.2v-13.1c0-2.7-1.4-4.2-3.9-4.2c-2.3,0-4.2,1.8-4.3,4.2c0,0.1,0,0.2,0,0.3v12.9H242v-13.4
c0.2-2-1.3-3.8-3.3-3.9c-0.2,0-0.4,0-0.5,0c-2.4,0-4.3,2-4.3,4.3c0,0.1,0,0.2,0,0.3v12.7h-4.2L229.6,119.1z"/>
<g id="Layer_2_00000138553854520646606810000012156271018779627156_" class="st1">
<g id="Layer_1-2">
<path class="st2" d="M343.7,139.9c-6.9,0-12.5-5.6-12.5-12.5s5.6-12.5,12.5-12.5c2.1,0,4.2,0.5,6,1.5c1.8,1,3.3,2.4,4.3,4.1
l-4.1,2.4c-0.6-1.1-1.5-1.9-2.5-2.5c-3.1-1.6-6.8-1.1-9.4,1.3c-1.5,1.5-2.2,3.6-2.1,5.7c-0.1,2.1,0.7,4.1,2.1,5.7
c1.5,1.5,3.5,2.3,5.7,2.2c1.3,0,2.6-0.3,3.7-0.9c1.1-0.6,2-1.4,2.5-2.5l4.1,2.4c-1,1.7-2.5,3.2-4.3,4.1
C347.8,139.4,345.8,139.9,343.7,139.9z"/>
<path class="st2" d="M361.4,122.3v3c0.3-1,1.1-1.9,2-2.5c1-0.6,2.1-0.9,3.2-0.8v4.9c-1.3-0.2-2.6,0.1-3.6,0.8
c-1.1,0.8-1.7,2.2-1.6,3.5v8.2H357v-17.2H361.4z"/>
<path class="st2" d="M381.6,124.3v-2h4.4v17.2h-4.4v-2c-1.4,1.7-3.4,2.6-5.6,2.5c-2.2,0-4.4-0.9-5.9-2.6c-1.6-1.8-2.5-4.1-2.4-6.5
c-0.1-2.4,0.8-4.7,2.4-6.4c1.5-1.7,3.6-2.7,5.9-2.7C378.1,121.7,380.2,122.6,381.6,124.3z M373.4,134.3c1.9,1.8,4.9,1.8,6.8,0
c0.9-0.9,1.4-2.2,1.4-3.5c0.1-1.3-0.4-2.6-1.4-3.5c-1.9-1.8-4.9-1.8-6.8,0c-0.9,0.9-1.4,2.2-1.3,3.5
C372,132.1,372.5,133.4,373.4,134.3z"/>
<path class="st2" d="M399.2,115v4.2c-2.4-0.2-3.6,0.8-3.7,2.9v0.2h3.6v4.3h-3.6v12.9h-4.4v-12.9h-2.5v-4.2h2.5v-0.2
c-0.2-2,0.6-4.1,2-5.5C394.5,115.3,396.6,114.8,399.2,115z"/>
<path class="st2" d="M411.6,122.3v4.2h-3.9v7.1c0,0.5,0.1,1,0.5,1.3c0.4,0.3,0.8,0.5,1.3,0.5c0.7,0,1.4,0,2.1,0v4
c-3,0.3-5.1,0.1-6.4-0.8s-1.9-2.5-1.9-4.9v-7.1h-3v-4.2h3v-3.5l4.4-1.3v4.8L411.6,122.3z"/>
<path class="st2" d="M427.2,124.3v-2h4.4v17.2h-4.4v-2c-1.4,1.7-3.4,2.6-5.6,2.5c-2.2,0-4.4-0.9-5.9-2.6c-1.6-1.8-2.5-4.1-2.4-6.5
c-0.1-2.4,0.8-4.7,2.4-6.4c1.5-1.7,3.6-2.7,5.9-2.7C423.8,121.7,425.9,122.6,427.2,124.3z M419.1,134.3c1.9,1.8,4.9,1.8,6.8,0
c0.9-0.9,1.4-2.2,1.4-3.5c0-1.3-0.4-2.5-1.4-3.5c-1.9-1.8-4.9-1.8-6.8,0c-0.9,0.9-1.4,2.2-1.3,3.5
C417.7,132.1,418.2,133.4,419.1,134.3L419.1,134.3z"/>
<path class="st2" d="M440.1,122.3v3c0.4-1,1.1-1.9,2-2.5c1-0.6,2.1-0.9,3.2-0.8v4.9c-1.3-0.2-2.6,0.1-3.6,0.8
c-1.1,0.8-1.7,2.2-1.6,3.5v8.2h-4.4v-17.2H440.1z"/>
<path class="st2" d="M461.9,137.3c-3.6,3.6-9.3,3.6-12.9,0s-3.6-9.3,0-12.9l0,0c3.6-3.5,9.3-3.5,12.9,0.1c1.7,1.7,2.6,4,2.6,6.4
C464.5,133.3,463.6,135.6,461.9,137.3z M452.1,134.3c1.9,1.8,4.8,1.8,6.7,0c1.8-1.9,1.8-4.9,0-6.8c-1.9-1.8-4.8-1.8-6.7,0
C450.3,129.4,450.3,132.3,452.1,134.3L452.1,134.3z"/>
<path class="st2" d="M320,137.6l-2.9-20.3c-0.4-2.7-2.7-4.7-5.5-4.7h-9c-0.3,0-0.5,0.2-0.7,0.4l-0.9,2H292l-0.9-2
c-0.1-0.3-0.4-0.4-0.7-0.4h-9c-2.7,0-5.1,2-5.5,4.7l-2.9,20.3c-0.4,3,1.7,5.8,4.7,6.2c0,0,0,0,0,0l0,0c0.3,0,0.5,0.1,0.8,0.1h36
c3,0,5.5-2.5,5.5-5.5l0,0C320,138.1,320,137.8,320,137.6z M287.1,130c-2.7,0-4.9-2.2-4.9-4.9c0-2.7,2.2-4.9,4.9-4.9
c2.7,0,4.9,2.2,4.9,4.9c0,0,0,0,0,0l0,0C292,127.8,289.8,130,287.1,130z M296.5,138c-2.7,0-4.9-2.2-4.9-4.9h9.8
C301.4,135.8,299.3,138,296.5,138L296.5,138L296.5,138z M305.9,130c-2.7,0-4.9-2.2-4.9-4.9c0-2.7,2.2-4.9,4.9-4.9
c2.7,0,4.9,2.2,4.9,4.9c0,0,0,0,0,0l0,0C310.8,127.8,308.6,130,305.9,130L305.9,130z"/>
</g>
</g>
<path class="st2" d="M133.1,19.2H9.7c-1.8,0-3.2-1.4-3.2-3.2V3.2C6.5,1.5,7.9,0,9.7,0h123.4c1.8,0,3.2,1.4,3.2,3.2V16
C136.3,17.8,134.9,19.2,133.1,19.2"/>
<path class="st2" d="M23.6,36.7c-3.4,0-6.7,1.6-8.8,4.3c-2.9,3.6-4.1,8.3-3.2,12.8l9.2,51.9c1.2,6.6,6.2,11.4,12.1,11.4H110
c5.8,0,10.9-4.8,12.1-11.4l9.2-51.9c0.8-4.5-0.4-9.2-3.3-12.8c-2.1-2.7-5.4-4.3-8.8-4.3H23.6z M110,128.3H32.8
c-11.3,0-21-8.7-23.1-20.7L0.5,55.8c-1.5-7.8,0.6-15.9,5.7-22c4.3-5.2,10.7-8.3,17.4-8.3h95.6c6.8,0.1,13.1,3.1,17.4,8.3
c5.1,6.1,7.2,14.2,5.7,22l-9.2,51.9C130.9,119.7,121.2,128.4,110,128.3"/>
<path class="st2" d="M120.8,23.8v-2.2c2,0,3.5-1.6,3.5-3.6c0-1.8-1.5-3.4-3.3-3.5H21.6c-2,0.1-3.5,1.8-3.4,3.7
c0.1,1.8,1.5,3.3,3.4,3.4v2.2c-3.2-0.1-5.7-2.8-5.6-6c0.1-3,2.5-5.4,5.6-5.6h99.2c3.2-0.1,5.9,2.4,6,5.6s-2.4,5.9-5.6,6
C121.1,23.8,121,23.8,120.8,23.8"/>
<path class="st2" d="M120.8,33.1H21.6c-3.2,0-5.8-2.6-5.8-5.8c0-3.2,2.6-5.8,5.8-5.8v2.2c-2,0.1-3.5,1.8-3.4,3.7
c0.1,1.8,1.5,3.3,3.4,3.4h99.2c2,0.1,3.7-1.3,3.8-3.3c0.1-2-1.3-3.7-3.3-3.8c-0.1,0-0.2,0-0.3,0h-0.2v-2.2c3.2-0.1,5.9,2.4,6,5.6
s-2.4,5.9-5.6,6C121.1,33.1,121,33.1,120.8,33.1"/>
<path class="st2" d="M21.6,21.5l36.1,1.1l-36.1,1.1V21.5z"/>
<path class="st2" d="M125.5,23.8l-45.1-1.1l45.1-1.1V23.8z"/>
<rect x="-2.5" y="-1.1" class="st3" width="571.3" height="131.4"/>
<path class="st2" d="M163.8,91.7l7.3-10.9c5.8,5.5,14.3,9.3,22.3,9.3c7.1,0,13.1-3.3,13.1-8.3c0-6-8.1-7.9-15.4-9.6
c-13.7-3.2-24.8-9.8-24.8-22.3c0-12.7,11.1-21,27.1-21c10.7,0,19.4,3.7,24.7,8.9l-6.6,10.8c-4-3.9-11.2-6.9-18.3-6.9
s-12.2,3.2-12.2,7.7c0,5.5,7.4,7.9,14.1,9.3s26.2,6.2,26.2,22.5c0,12.8-12.2,21.6-27.8,21.6C182.6,102.8,171.1,98.4,163.8,91.7z"/>
<path class="st2" d="M281.7,80.1h-40.9c1.9,6.6,7.5,10.9,15.1,10.9c5.6,0.1,10.9-2.3,14.5-6.5l9,7.9c-5.5,6.5-14,10.5-23.9,10.5
c-16.8,0-29.3-12-29.3-27.8c0-15.6,12.1-27.4,28-27.4S282,59.4,282,75.3C282,76.9,281.9,78.5,281.7,80.1z M240.8,70.3h26.9
c-1.7-6.6-6.9-10.9-13.4-10.9C247.7,59.4,242.5,63.8,240.8,70.3L240.8,70.3z"/>
<path class="st2" d="M321.3,48v13.9h-2.3c-9.6,0-15.2,5.7-15.2,14.7v25h-13.4V48.9h13.5v6.8c3.6-4.8,9.2-7.7,15.2-7.7L321.3,48z"/>
<path class="st2" d="M381.9,48.9L360,101.6h-13.9l-21.9-52.8h15.3l13.8,35.9L367,48.9H381.9z"/>
<path class="st2" d="M437.1,80.1h-40.9c1.9,6.6,7.5,10.9,15.1,10.9c5.6,0.1,10.9-2.3,14.5-6.5l9,7.9c-5.5,6.5-14,10.5-23.9,10.5
c-16.8,0-29.3-12-29.3-27.8c0-15.6,12.1-27.4,28-27.4s27.7,11.8,27.7,27.7C437.4,76.9,437.3,78.5,437.1,80.1z M396.1,70.3H423
c-1.7-6.6-6.9-10.9-13.4-10.9S397.7,63.8,396.1,70.3L396.1,70.3z"/>
<path class="st2" d="M476.7,48v13.9h-2.2c-9.6,0-15.2,5.7-15.2,14.7v25h-13.5V48.9h13.5v6.8c3.6-4.8,9.2-7.7,15.2-7.7L476.7,48z"/>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="870.0443" y1="434.2369" x2="907.1767" y2="465.2789" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
<stop offset="0" style="stop-color:#FEAF6F"/>
<stop offset="1" style="stop-color:#FD5E83"/>
</linearGradient>
<path class="st4" d="M492.5,100.6V87c3.2,1.4,6.6,2.1,10,2.2c7.3,0,11.8-3.9,11.8-10.9v-48h14.3V79c0,15-9.8,23.9-24.5,23.9
C500,102.9,496.1,102.1,492.5,100.6z"/>
<linearGradient id="SVGID_00000162328622213414588160000008200821717462734513_" gradientUnits="userSpaceOnUse" x1="920.7661" y1="434.5518" x2="972.3098" y2="477.6348" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
<stop offset="0" style="stop-color:#FEAF6F"/>
<stop offset="1" style="stop-color:#FD5E83"/>
</linearGradient>
<path style="fill:url(#SVGID_00000162328622213414588160000008200821717462734513_);" d="M593.2,48.9v52.8h-13.5v-6.3
c-4.4,4.9-10.6,7.6-17.2,7.5c-14.7,0-25.8-11.9-25.8-27.6s11.1-27.6,25.8-27.6c6.5-0.1,12.8,2.7,17.2,7.5v-6.3L593.2,48.9z
M579.8,75.2c0-8-6.6-14.5-14.6-14.5c-8,0-14.5,6.6-14.5,14.6c0,8,6.5,14.4,14.5,14.5c7.9,0.2,14.4-6,14.6-13.9
C579.8,75.7,579.8,75.5,579.8,75.2z"/>
<linearGradient id="SVGID_00000026849485640012965730000014957007722205225107_" gradientUnits="userSpaceOnUse" x1="973.2171" y1="437.9167" x2="1007.0711" y2="466.2133" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
<stop offset="0" style="stop-color:#FEAF6F"/>
<stop offset="1" style="stop-color:#FD5E83"/>
</linearGradient>
<path style="fill:url(#SVGID_00000026849485640012965730000014957007722205225107_);" d="M635.9,48v13.9h-2.3
c-9.6,0-15.2,5.7-15.2,14.7v25H605V48.9h13.4v6.8c3.6-4.8,9.2-7.7,15.2-7.7L635.9,48z"/>
<linearGradient id="SVGID_00000011000279650532451330000005619277557075874698_" gradientUnits="userSpaceOnUse" x1="1015.3561" y1="439.477" x2="1056.9301" y2="474.2302" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
<stop offset="0" style="stop-color:#FEAF6F"/>
<stop offset="1" style="stop-color:#FD5E83"/>
</linearGradient>
<path style="fill:url(#SVGID_00000011000279650532451330000005619277557075874698_);" d="M638.7,94.8l6.5-8.9
c4.2,3.8,9.7,5.9,15.4,5.9c5.4,0,9.3-1.8,9.3-5c0-3.5-4.6-4.8-10.3-6.1c-8.4-1.9-19.2-4.5-19.2-16.5c0-11.2,9.8-16.7,21.5-16.7
c7.4-0.1,14.6,2.3,20.5,6.9l-6.5,9c-3.9-3.1-8.7-4.8-13.7-4.9c-4.6,0-8.3,1.5-8.3,4.5c0,3.5,4.4,4.7,10.3,5.9
c8.4,1.9,19.2,4.5,19.2,16.4c0,11.2-9.9,17.3-22.6,17.3C652.9,102.9,644.9,100.1,638.7,94.8z"/>
<linearGradient id="SVGID_00000176732902084481618460000012775063734620060048_" gradientUnits="userSpaceOnUse" x1="408.7259" y1="431.5905" x2="485.4144" y2="495.6844" gradientTransform="matrix(1 0 0 1 -374.6 -381.3801)">
<stop offset="0" style="stop-color:#FEAF6F"/>
<stop offset="1" style="stop-color:#FD5E83"/>
</linearGradient>
<path style="fill-rule:evenodd;clip-rule:evenodd;fill:url(#SVGID_00000176732902084481618460000012775063734620060048_);" d="
M124.5,62c-12.7,0.9-27,5.5-35.7,12.3c-38.7,30.3-69.2-6.6-69.3-6.6l6.8,36.8c0.8,4.3,4.6,7.5,9,7.5l73,0.2c4.5,0,8.3-3.2,9.1-7.6
L124.5,62z"/>
</svg>

Before

Width:  |  Height:  |  Size: 10 KiB

View File

@ -1 +0,0 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 142.71 128.36"><defs><style>.cls-1{fill:#fff;}.cls-2{fill-rule:evenodd;fill:url(#linear-gradient);}</style><linearGradient id="linear-gradient" x1="408.73" y1="431.59" x2="485.41" y2="495.68" gradientTransform="translate(-374.6 -381.38)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#feaf6f"/><stop offset="1" stop-color="#fd5e83"/></linearGradient></defs><path class="cls-1" d="M133.09,19.17H9.67A3.24,3.24,0,0,1,6.46,16V3.24A3.24,3.24,0,0,1,9.7,0H133.09a3.25,3.25,0,0,1,3.25,3.24V16a3.25,3.25,0,0,1-3.25,3.24"/><path class="cls-1" d="M23.61,36.67A11.41,11.41,0,0,0,14.8,41a15.79,15.79,0,0,0-3.25,12.8l9.18,51.92c1.17,6.62,6.25,11.42,12.06,11.42H110c5.82,0,10.89-4.8,12.06-11.42l9.18-51.91A15.86,15.86,0,0,0,128,41a11.5,11.5,0,0,0-8.82-4.33ZM110,128.35H32.8c-11.27,0-21-8.7-23.12-20.69L.46,55.75a26.72,26.72,0,0,1,5.71-22,22.77,22.77,0,0,1,17.41-8.34h95.56a22.8,22.8,0,0,1,17.41,8.34,26.79,26.79,0,0,1,5.71,22l-9.19,51.91c-2.12,12-11.84,20.7-23.12,20.7"/><path class="cls-1" d="M120.8,23.76V21.51A3.56,3.56,0,0,0,121,14.4H21.59a3.56,3.56,0,0,0,0,7.11v2.25a5.81,5.81,0,0,1,0-11.61H120.8a5.81,5.81,0,0,1,.48,11.61h-.48"/><path class="cls-1" d="M120.8,33.11H21.59a5.8,5.8,0,0,1,0-11.6v2.24a3.56,3.56,0,0,0,0,7.11H120.8a3.56,3.56,0,0,0,.52-7.1h-.52V21.51a5.81,5.81,0,0,1,.48,11.61,3.84,3.84,0,0,1-.48,0"/><path class="cls-1" d="M21.59,21.51l36.13,1.13L21.59,23.76Z"/><path class="cls-1" d="M125.46,23.76,80.35,22.64l45.11-1.13Z"/><path class="cls-2" d="M124.46,62c-12.72.93-27,5.55-35.7,12.34-38.69,30.34-69.25-6.6-69.28-6.58l6.75,36.83a9.16,9.16,0,0,0,9,7.52l73,.16a9.17,9.17,0,0,0,9.06-7.64Z"/></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

File diff suppressed because one or more lines are too long

View File

@ -41,7 +41,7 @@ async function getTreeView(path, unzip = false, upload = false) {
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
process_tree_response(responseData);
process_tree_response(responseData, unzip);
let x = document.querySelector('.bootbox');
if (x) {
x.remove()
@ -61,7 +61,7 @@ async function getTreeView(path, unzip = false, upload = false) {
}
}
function process_tree_response(response) {
function process_tree_response(response, unzip) {
const styles = window.getComputedStyle(document.getElementById("lower_half"));
//If this value is still hidden we know the user is executing a zip import and not an upload
if (styles.visibility === "hidden") {
@ -70,7 +70,9 @@ function process_tree_response(response) {
document.getElementById('upload_submit').disabled = false;
}
let path = response.data.root_path.path;
$(".root-input").val(response.data.root_path.path);
if (unzip) {
$(".root-input").val(response.data.root_path.path);
}
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
Object.entries(response.data).forEach(([key, value]) => {
if (key === "root_path" || key === "db_stats") {
@ -83,7 +85,7 @@ function process_tree_response(response) {
if (value.dir) {
text += `<li class="tree-item" id="${dpath}li" data-path="${dpath}">
<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
<input type="radio" name="root_path" value="${dpath}">
<input type="radio" class="root-input" name="root_path" value="${dpath}">
<span id="${dpath}span" class="files-tree-title" data-path="${dpath}" data-name="${filename}" onclick="getDirView(event)">
<i style="color: var(--info);" class="far fa-folder"></i>
<i style="color: var(--info);" class="far fa-folder-open"></i>

View File

@ -0,0 +1,226 @@
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i, file_num, updateProgressBar) {
return fetch(url, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size,
'fileSize': file.size,
'chunkHash': chunk_hash,
'chunked': true,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
'chunkId': i,
},
})
.then(async response => {
if (!response.ok) {
const errorData = await response.json();
throw new Error(JSON.stringify(errorData) || 'Unknown error occurred');
}
return response.json(); // Return the JSON data
})
.then(data => {
if (data.status !== "completed" && data.status !== "partial") {
throw new Error(data.message || 'Unknown error occurred');
}
// Update progress bar
const progress = (i + 1) / totalChunks * 100;
updateProgressBar(Math.round(progress), type, file_num);
});
}
async function uploadFile(type, file = null, path = null, file_num = 0, _onProgress = null) {
if (file == null) {
try {
file = $("#file")[0].files[0];
} catch {
bootbox.alert("Please select a file first.");
return;
}
}
const fileId = uuidv4();
const token = getCookie("_xsrf");
if (type !== "server_upload") {
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div id="upload-progress-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
}
let url = '';
if (type === "server_upload") {
url = `/api/v2/servers/${serverId}/files/upload/`;
} else if (type === "background") {
url = `/api/v2/crafty/admin/upload/`;
} else if (type === "import") {
url = `/api/v2/servers/import/upload/`;
}
console.log(url);
const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize);
const errors = [];
const batchSize = 30; // Number of chunks to upload in each batch
try {
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token,
'chunked': true,
'fileSize': file.size,
'type': type,
'totalChunks': totalChunks,
'fileName': file.name,
'location': path,
'fileId': fileId,
},
body: null,
});
if (!res.ok) {
let errorResponse = await res.json();
throw new Error(JSON.stringify(errorResponse));
}
let responseData = await res.json();
if (responseData.status !== "ok") {
throw new Error(JSON.stringify(responseData));
}
for (let i = 0; i < totalChunks; i += batchSize) {
const batchPromises = [];
for (let j = 0; j < batchSize && (i + j) < totalChunks; j++) {
const start = (i + j) * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const chunk_hash = await calculateFileHash(chunk);
const uploadPromise = uploadChunk(file, url, chunk, start, end, chunk_hash, totalChunks, type, path, fileId, i + j, file_num, updateProgressBar)
.catch(error => {
errors.push(error); // Store the error
});
batchPromises.push(uploadPromise);
}
// Wait for the current batch to complete before proceeding to the next batch
await Promise.all(batchPromises);
// Optional delay between batches to account for rate limiting
await delay(2000); // Adjust the delay time (in milliseconds) as needed
}
} catch (error) {
errors.push(error); // Store the error
}
if (errors.length > 0) {
const errorMessage = errors.map(error => JSON.parse(error.message).data.message || 'Unknown error occurred').join('<br>');
console.log(errorMessage);
bootbox.alert({
title: 'Error',
message: errorMessage,
callback: function () {
window.location.reload();
},
});
} else if (type !== "server_upload") {
// All promises resolved successfully
$("#upload_input").html(`<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%;"><input value="${file.name}" type="text" id="file-uploaded" disabled></input> 🔒</div>`);
if (type === "import") {
document.getElementById("lower_half").style.visibility = "visible";
document.getElementById("lower_half").hidden = false;
} else if (type === "background") {
setTimeout(function () {
location.href = `/panel/custom_login`;
}, 2000);
}
} else {
let caught = false;
let expanded = false;
try {
expanded = document.getElementById(path).classList.contains("clicked");
} catch { }
let par_el;
let items;
try {
par_el = document.getElementById(path + "ul");
items = par_el.children;
} catch (err) {
console.log(err);
caught = true;
par_el = document.getElementById("files-tree");
items = par_el.children;
}
let name = file.name;
let full_path = path + '/' + name;
let flag = false;
for (let item of items) {
if ($(item).attr("data-name") === name) {
flag = true;
}
}
if (!flag) {
if (caught && !expanded) {
$(par_el).append(`<li id="${full_path}li" class="d-block tree-ctx-item tree-file tree-item" data-path="${full_path}" data-name="${name}" onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>${name}</li>`);
} else if (expanded) {
$(par_el).append(`<li id="${full_path}li" class="tree-ctx-item tree-file tree-item" data-path="${full_path}" data-name="${name}" onclick="clickOnFile(event)"><span style="margin-right: 6px;"><i class="far fa-file"></i></span>${name}</li>`);
}
setTreeViewContext();
}
$(`#upload-progress-bar-${file_num + 1}`).removeClass("progress-bar-striped");
$(`#upload-progress-bar-${file_num + 1}`).addClass("bg-success");
$(`#upload-progress-bar-${file_num + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>');
}
}
async function calculateFileHash(file) {
const arrayBuffer = await file.arrayBuffer();
const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
return hashHex;
}
function updateProgressBar(progress, type, i) {
if (type !== "server_upload") {
if (progress === 100) {
$(`#upload-progress-bar`).removeClass("progress-bar-striped")
$(`#upload-progress-bar`).removeClass("progress-bar-animated")
}
$(`#upload-progress-bar`).css('width', progress + '%');
$(`#upload-progress-bar`).html(progress + '%');
} else {
if (progress === 100) {
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped")
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-animated")
}
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
$(`#upload-progress-bar-${i + 1}`).html(progress + '%');
}
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0,
v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}

View File

@ -63,9 +63,6 @@
<nav class="sidebar sidebar-offcanvas" id="sidebar">
<ul class="nav">
<li class="nav-item nav-category" style="margin-top:10px;">{{ translate('sidebar', 'navigation', data['lang']) }}
</li>
<li class="nav-item">
<a class="nav-link" href="/panel/dashboard">
<i class="fa-solid fa-diagram-project"></i>&nbsp;

View File

@ -36,25 +36,21 @@
<table class="table table-hover" id="audit_table" style="overflow: scroll;" width="100%">
<thead>
<tr class="rounded">
<td>Username</td>
<td>Time</td>
<td>Action</td>
<td>Server ID</td>
<td>IP</td>
<th>Time</th>
<th>Username</th>
<th>Action</th>
<th>Server ID</th>
<th>IP</th>
</tr>
</thead>
<tbody>
{% for row in data['audit_logs'] %}
<tr>
<td>{{ row['user_name'] }}</td>
<td>
{{ row['created'].strftime('%Y-%m-%d %H:%M:%S') }}
<td colspan="5" id="image-div" class="text-center"> <!-- Center image within table -->
<img class="img-center" id="logo-animate" src="../static/assets/images/crafty-logo-square-1024.png"
alt="Crafty Logo, Crafty is loading" width="20%"><br><br>{{ translate('datatables',
'loadingRecords', data['lang'])}}
</td>
<td>{{ row['log_msg'] }}</td>
<td>{{ row['server_id'] }}</td>
<td>{{ row['source_ip'] }}</td>
</tr>
{% end %}
</tbody>
</table>
@ -79,17 +75,6 @@
{% end %}
{% block js %}
<script>
$(document).ready(function () {
console.log('ready for JS!')
$('#audit_table').DataTable({
'order': [1, 'desc']
}
);
});
</script>
<script>
$(document).ready(function () {
$('[data-toggle="popover"]').popover();
@ -112,6 +97,74 @@
$('.too_small').popover("hide");
} // New width
});
$(document).ready(function () {
console.log('ready for JS!')
// Initialize DataTables
// Load initial data
getActivity();
});
function updateActivity(data) {
let tbody = $('#audit_table tbody');
tbody.empty(); // Clear existing rows
$.each(data, function (index, value) {
let row = $('<tr>');
row.append(`<td>${value.time}</td>`);
if (value.user_name != "system" && value.user_id != "-1") {
row.append(`<td><a href="/panel/edit_user?id=${value.user_id}">${value.user_name}</a></td>`);
} else {
row.append(`<td>${value.user_name}</td>`);
}
row.append(`<td>${value.log_msg}</td>`);
row.append(`<td>${value.server_id}</td>`);
row.append(`<td>${value.source_ip}</td>`);
tbody.append(row);
});
$('#audit_table').DataTable({
'order': [[0, 'desc']], // Sort by the first column in descending order
filter: true,
"searching": true,
})
}
async function getActivity() {
var token = getCookie("_xsrf");
let res = await fetch(`/api/v2/crafty/logs/audit`, {
method: 'GET',
headers: {
'X-XSRFToken': token
},
});
let responseData = await res.json();
console.log(responseData);
if (responseData.status === "ok") {
updateActivity(responseData.data);
console.log("activity update")
} else {
bootbox.alert(responseData.error)
}
}
function rotateImage(degree) {
$('#logo-animate').animate({ transform: degree }, {
step: function (now, fx) {
$(this).css({
'-webkit-transform': 'rotate(' + now + 'deg)',
'-moz-transform': 'rotate(' + now + 'deg)',
'transform': 'rotate(' + now + 'deg)'
});
}
});
setTimeout(function () {
rotateImage(360);
}, 2000);
}
$(document).ready(function () {
setTimeout(function () {
rotateImage(360);
}, 2000);
});
</script>
{% end %}

View File

@ -69,7 +69,7 @@
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info upload-button" id="upload-button"
onclick="sendFile()" disabled>UPLOAD</button>
onclick="uploadFile('background')" disabled>UPLOAD</button>
</div>
</div>
</div>
@ -381,61 +381,6 @@
}
img.src = src_path;
}
var file;
function sendFile() {
file = $("#file")[0].files[0]
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%"><div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>';
let xmlHttpRequest = new XMLHttpRequest();
let token = getCookie("_xsrf")
let fileName = file.name
let target = '/upload'
let mimeType = file.type
let size = file.size
let type = 'background'
xmlHttpRequest.upload.addEventListener('progress', function (e) {
if (e.loaded <= size) {
var percent = Math.round(e.loaded / size * 100);
$(`#upload-progress-bar`).css('width', percent + '%');
$(`#upload-progress-bar`).html(percent + '%');
}
});
xmlHttpRequest.open('POST', target, true);
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', type);
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
xmlHttpRequest.addEventListener('load', (event) => {
if (event.target.responseText == 'success') {
console.log('Upload for file', file.name, 'was successful!')
document.getElementById("upload_input").innerHTML = '<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%"><span id="file-uploaded" style="color: gray;">' + fileName + '</span> 🔒</div>';
setTimeout(function () {
window.location.reload();
}, 2000);
}
else {
let response_text = JSON.parse(event.target.responseText);
var x = document.querySelector('.bootbox');
console.log(JSON.parse(event.target.responseText).info)
bootbox.alert({
message: JSON.parse(event.target.responseText).info,
callback: function () {
window.location.reload();
}
});
doUpload = false;
}
}, false);
xmlHttpRequest.addEventListener('error', (e) => {
console.error('Error while uploading file', file.name + '.', 'Event:', e)
}, false);
xmlHttpRequest.send(file);
}
</script>
<script src="../../static/assets/js/shared/upload.js"></script>
{% end %}

View File

@ -20,7 +20,8 @@
data-internet="{{ translate('startup', 'internet', data['lang']) }}"
data-tasks="{{ translate('startup', 'tasks', data['lang']) }}"
data-internals="{{ translate('startup', 'internals', data['lang']) }}"
data-almost="{{ translate('startup', 'almost', data['lang']) }}">
data-almost="{{ translate('startup', 'almost', data['lang']) }}"
data-cache="{{ translate('startup', 'cache', data['lang'])}}">
{{ translate('startup', 'starting', data['lang']) }}</h2>
</div>
@ -70,4 +71,4 @@
});
</script>
{% end %}
{% end %}

View File

@ -428,10 +428,13 @@
if (responseData.status === "ok") {
window.location.href = "/panel/panel_config";
} else {
let errordata = responseData.error;
if (responseData.error_data){
errordata = responseData.error
}
bootbox.alert({
title: responseData.error,
message: responseData.error_data
message: errordata
});
}
});

View File

@ -122,7 +122,7 @@ data['lang']) }}{% end %}
name="lang" form="user_form">
{% for lang in data['languages'] %}
{% if not 'incomplete' in lang %}
<option value="{{lang}}">{{lang}}</option>
<option value="{{lang}}" >{{translate('language', lang, 'humanized_index')}}</option>
{% else %}
<option value="{{lang}}" disabled>{{lang}}</option>
{% end %}
@ -393,6 +393,7 @@ data['lang']) }}{% end %}
}
function replacer(key, value) {
if (typeof value == "boolean" || key === "email" || key === "permissions" || key === "roles") {
console.log(key)
return value
} else {
console.log(key, value)
@ -433,6 +434,7 @@ data['lang']) }}{% end %}
let disabled_flag = false;
let roles = null;
if (superuser || userId != edit_id){
console.log("ROLES")
roles = $('.role_check').map(function() {
if ($(this).attr("disabled")){
disabled_flag = true;
@ -457,9 +459,7 @@ data['lang']) }}{% end %}
delete formDataObject.username
}
if (superuser || userId != edit_id){
if (!disabled_flag){
formDataObject.roles = roles;
}
if ($("#permissions").length){
formDataObject.permissions = permissions;
}

View File

@ -58,7 +58,7 @@
<!--<th>ID</th>-->
<th>{{ translate('apiKeys', 'name', data['lang']) }}</th>
<th>{{ translate('apiKeys', 'created', data['lang']) }}</th>
<th>{{ translate('apiKeys', 'superUser', data['lang']) }}</th>
<th>{{ translate('apiKeys', 'fullAccess', data['lang']) }}</th>
<th>{{ translate('apiKeys', 'perms', data['lang']) }}</th>
<th>{{ translate('apiKeys', 'buttons', data['lang']) }}</th>
</tr>
@ -70,7 +70,7 @@
<td>{{ apikey.name }}</td>
<td>{{ apikey.created.strftime('%d/%m/%Y %H:%M:%S') }}</td>
<td>
{% if apikey.superuser %}
{% if apikey.full_access %}
<span class="text-success">
<i class="fas fa-check-square"></i> {{
translate('apiKeys', 'yes', data['lang']) }}
@ -148,9 +148,15 @@
}}</label>
</td>
<td>
{% if permission in data['user_crafty_permissions'] %}
<input type="checkbox" class="crafty_perm"
id="permission_{{ permission.name }}"
name="permission_{{ permission.name }}" value="1">
{% else %}
<input type="checkbox" class="crafty_perm"
id="permission_{{ permission.name }}"
name="permission_{{ permission.name }}" value="1" disabled>
{% end %}
</td>
</tr>
{% end %}
@ -158,8 +164,8 @@
</tbody>
</table>
<label for="superuser">Superuser</label>
<input type="checkbox" class="" id="superuser" name="superuser" value="1">
<label for="full_access">{{translate('apiKeys', 'fullAccess', data['lang'])}}</label>
<input type="checkbox" class="" id="full_access" name="full_access" value="1">
<br />
@ -240,7 +246,7 @@
"name": formDataObject.name,
"server_permissions_mask": server_permissions,
"crafty_permissions_mask": crafty_permissions,
"superuser": $("#superuser").prop('checked'),
"full_access": $("#full_access").prop('checked'),
});
console.log(formDataJsonString);

View File

@ -39,208 +39,151 @@
<span class="d-block d-sm-none">
{% include "parts/m_server_controls_list.html %}
</span>
<div class="row">
<div class="col-md-6 col-sm-12">
<br>
<br>
{% if data['backing_up'] %}
<div class="progress" style="height: 15px;">
<div class="progress-bar progress-bar-striped progress-bar-animated" id="backup_progress_bar"
role="progressbar" style="width:{{data['backup_stats']['percent']}}%;"
aria-valuenow="{{data['backup_stats']['percent']}}" aria-valuemin="0" aria-valuemax="100">{{
data['backup_stats']['percent'] }}%</div>
</div>
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
id="total_files">{{data['server_stats']['world_size']}}</span></p>
{% end %}
<br>
{% if not data['backing_up'] %}
<div id="backup_button" class="form-group">
<button class="btn btn-primary" id="backup_now_button">{{ translate('serverBackups', 'backupNow',
data['lang']) }}</button>
</div>
{% end %}
<form id="backup-form" class="forms-sample">
<div class="form-group">
{% if data['super_user'] %}
<label for="server_name">{{ translate('serverBackups', 'storageLocation', data['lang']) }} <small
class="text-muted ml-1"> - {{ translate('serverBackups', 'storageLocationDesc', data['lang'])
}}</small> </label>
<input type="text" class="form-control" name="backup_path" id="backup_path"
value="{{ data['server_stats']['server_id']['backup_path'] }}"
placeholder="{{ translate('serverBackups', 'storageLocation', data['lang']) }}">
<div class="col-md-12 col-sm-12" style="overflow-x:auto;">
<div class="card">
<div class="card-header header-sm d-flex justify-content-between align-items-center">
<h4 class="card-title"><i class="fa-regular fa-bell"></i> {{ translate('serverBackups', 'backups',
data['lang']) }} </h4>
{% if data['user_data']['hints'] %}
<span class="too_small" title="{{ translate('serverSchedules', 'cannotSee', data['lang']) }}" ,
data-content="{{ translate('serverSchedules', 'cannotSeeOnMobile', data['lang']) }}" ,
data-placement="bottom"></span>
{% end %}
</div>
<div class="form-group">
<label for="server_path">{{ translate('serverBackups', 'maxBackups', data['lang']) }} <small
class="text-muted ml-1"> - {{ translate('serverBackups', 'maxBackupsDesc', data['lang'])
}}</small> </label>
<input type="text" class="form-control" name="max_backups" id="max_backups"
value="{{ data['backup_config']['max_backups'] }}"
placeholder="{{ translate('serverBackups', 'maxBackups', data['lang']) }}">
</div>
<div class="form-group">
<label for="compress" class="form-check-label ml-4 mb-4"></label>
{% if data['backup_config']['compress'] %}
<input type="checkbox" class="form-check-input" id="compress" name="compress" checked=""
value="True">{{ translate('serverBackups', 'compress', data['lang']) }}
{% else %}
<input type="checkbox" class="form-check-input" id="compress" name="compress" value="True">{{
translate('serverBackups', 'compress', data['lang']) }}
{% end %}
</div>
<div class="form-group">
<label for="shutdown" class="form-check-label ml-4 mb-4"></label>
{% if data['backup_config']['shutdown'] %}
<input type="checkbox" class="form-check-input" id="shutdown" name="shutdown" checked=""
value="True">{{ translate('serverBackups', 'shutdown', data['lang']) }}
{% else %}
<input type="checkbox" class="form-check-input" id="shutdown" name="shutdown" value="True">{{
translate('serverBackups', 'shutdown', data['lang']) }}
{% end %}
</div>
<div class="form-group">
<label for="command-check" class="form-check-label ml-4 mb-4"></label>
{% if data['backup_config']['before'] %}
<input type="checkbox" class="form-check-input" id="before-check" name="before-check" checked>{{
translate('serverBackups', 'before', data['lang']) }}
<br>
<input type="text" class="form-control" name="backup_before" id="backup_before"
value="{{ data['backup_config']['before'] }}" placeholder="We enter the / for you"
style="display: inline-block;">
{% else %}
<input type="checkbox" class="form-check-input" id="before-check" name="before-check">{{
translate('serverBackups', 'before', data['lang']) }}
<br>
<input type="text" class="form-control" name="backup_before" id="backup_before" value=""
placeholder="We enter the / for you." style="display: none;">
{% end %}
</div>
<div class="form-group">
<label for="command-check" class="form-check-label ml-4 mb-4"></label>
{% if data['backup_config']['after'] %}
<input type="checkbox" class="form-check-input" id="after-check" name="after-check" checked>{{
translate('serverBackups', 'after', data['lang']) }}
<br>
<input type="text" class="form-control" name="backup_after" id="backup_after"
value="{{ data['backup_config']['after'] }}" placeholder="We enter the / for you"
style="display: inline-block;">
{% else %}
<input type="checkbox" class="form-check-input" id="after-check" name="after-check">{{
translate('serverBackups', 'after', data['lang']) }}
<br>
<input type="text" class="form-control" name="backup_after" id="backup_after" value=""
placeholder="We enter the / for you." style="display: none;">
{% end %}
</div>
<div class="form-group">
<label for="server">{{ translate('serverBackups', 'exclusionsTitle', data['lang']) }} <small> - {{
translate('serverBackups', 'excludedChoose', data['lang']) }}</small></label>
<br>
<button class="btn btn-primary mr-2" id="root_files_button"
data-server_path="{{ data['server_stats']['server_id']['path']}}" type="button">{{
translate('serverBackups', 'clickExclude', data['lang']) }}</button>
</div>
<div class="modal fade" id="dir_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">{{ translate('serverBackups',
'excludedChoose', data['lang']) }}</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<div class="modal-body">
<div class="tree-ctx-item" id="main-tree-div" data-path=""
style="overflow: scroll; max-height:75%;">
<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled>
<span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path="">
<i class="far fa-folder"></i>
<i class="far fa-folder-open"></i>
{{ translate('serverFiles', 'files', data['lang']) }}
</span>
</input>
</div>
</div>
<div class="modal-footer">
<button type="button" id="modal-cancel" class="btn btn-secondary" data-dismiss="modal"><i class="fa-solid fa-xmark"></i></button>
<button type="button" id="modal-okay" data-dismiss="modal" class="btn btn-primary"><i class="fa-solid fa-thumbs-up"></i></button>
</div>
</div>
<div><a class="btn btn-info"
href="/panel/add_backup?id={{ data['server_stats']['server_id']['server_id'] }}"><i
class="fas fa-plus-circle"></i> {{ translate('serverBackups', 'newBackup', data['lang']) }}</a>
</div>
</div>
<button type="submit" class="btn btn-success mr-2">{{ translate('serverBackups', 'save', data['lang'])
}}</button>
<button type="reset" class="btn btn-light">{{ translate('serverBackups', 'cancel', data['lang'])
}}</button>
</form>
</div>
<div class="col-md-6 col-sm-12">
<div class="text-center">
<table class="table table-responsive dataTable" id="backup_table">
<h4 class="card-title">{{ translate('serverBackups', 'currentBackups', data['lang']) }}</h4>
<thead>
<tr>
<th width="10%">{{ translate('serverBackups', 'options', data['lang']) }}</th>
<th>{{ translate('serverBackups', 'path', data['lang']) }}</th>
<th width="20%">{{ translate('serverBackups', 'size', data['lang']) }}</th>
</tr>
</thead>
<tbody>
{% for backup in data['backup_list'] %}
<tr>
<td>
<a href="/panel/download_backup?file={{ backup['path'] }}&id={{ data['server_stats']['server_id']['server_id'] }}"
class="btn btn-primary">
<i class="fas fa-download" aria-hidden="true"></i>
{{ translate('serverBackups', 'download', data['lang']) }}
</a>
<br>
<br>
<button data-file="{{ backup['path'] }}" data-backup_path="{{ data['backup_path'] }}"
class="btn btn-danger del_button">
<i class="fas fa-trash" aria-hidden="true"></i>
{{ translate('serverBackups', 'delete', data['lang']) }}
</button>
<button data-file="{{ backup['path'] }}" class="btn btn-warning restore_button">
<i class="fas fa-undo-alt" aria-hidden="true"></i>
{{ translate('serverBackups', 'restore', data['lang']) }}
</button>
</td>
<td>{{ backup['path'] }}</td>
<td>{{ backup['size'] }}</td>
</tr>
{% end %}
</tbody>
</table>
<div class="card-body">
{% if len(data['backups']) == 0 %}
<div style="text-align: center; color: grey;">
<h7>{{ translate('serverBackups', 'no-backup', data['lang']) }}.</h7>
</div>
{% end %}
{% if len(data['backups']) > 0 %}
<div class="d-none d-lg-block">
<table class="table table-hover responsive-table" aria-label="backups list" id="backup_table"
style="table-layout:fixed;">
<thead>
<tr class="rounded">
<th scope="col" style="width: 15%; min-width: 10px;">{{ translate('serverBackups', 'name',
data['lang']) }} </th>
<th scope="col" style="width: 10%; min-width: 10px;">{{ translate('serverBackups', 'status',
data['lang']) }} </th>
<th scope="col" style="width: 50%; min-width: 50px;">{{ translate('serverBackups',
'storageLocation', data['lang']) }}</th>
<th scope="col" style="width: 10%; min-width: 50px;">{{ translate('serverBackups',
'maxBackups', data['lang']) }}</th>
<th scope="col" style="width: 10%; min-width: 50px;">{{ translate('serverBackups', 'actions',
data['lang']) }}</th>
</tr>
</thead>
<tbody>
{% for backup in data['backups'] %}
<tr>
<td id="{{backup.backup_name}}" class="id">
<p>{{backup.backup_name}}</p>
<br>
{% if backup.default %}
<span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
class="fa-solid fa-question"></i></button></small>
{% end %}
</td>
<td>
<div id="{{backup.backup_id}}_status">
<button class="btn btn-outline-success backup-status" data-status="{{ backup.status }}"
data-Standby="{{ translate('serverBackups', 'standby', data['lang'])}}"
data-Failed="{{ translate('serverBackups', 'failed', data['lang'])}}"></button>
</div>
</td>
<td id="{{backup.backup_location}}" class="type">
<p style="overflow: scroll;" class="no-scroll">{{backup.backup_location}}</p>
</td>
<td id="{{backup.max_backups}}" class="trigger" style="overflow: scroll; max-width: 30px;">
<p>{{backup.max_backups}}</p>
</td>
<td id="backup_edit" class="action">
<button
onclick="window.location.href=`/panel/edit_backup?id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{backup.backup_id}}`"
class="btn btn-info">
<i class="fas fa-pencil-alt"></i>
</button>
{% if not backup.default %}
<button data-backup={{ backup.backup_id }} class="btn btn-danger del_button">
<i class="fas fa-trash" aria-hidden="true"></i>
</button>
{% end %}
<button data-backup={{ backup.backup_id }} data-toggle="tooltip"
title="{{ translate('serverBackups', 'run', data['lang']) }}"
class="btn btn-outline-warning run-backup backup_now_button">
<i class="fa-solid fa-forward"></i>
</button>
</td>
</tr>
{% end %}
</tbody>
</table>
</div>
<div class="d-block d-lg-none">
<table aria-label="backups list" class="table table-hover responsive-table" id="backup_table_mini"
style="table-layout:fixed;">
<thead>
<tr class="rounded">
<th style="width: 40%; min-width: 10px;">Name
</th>
<th style="width: 40%; min-width: 50px;">{{ translate('serverBackups', 'edit', data['lang'])
}}</th>
</tr>
</thead>
<tbody>
{% for backup in data['backups'] %}
<tr>
<td id="{{backup.backup_name}}" class="id">
<p>{{backup.backup_name}}</p>
<br>
<div id="{{backup.backup_id}}_status">
<button class="btn btn-outline-success backup-status" data-status="{{ backup.status }}"
data-Standby="{{ translate('serverBackups', 'standby', data['lang'])}}"
data-Failed="{{ translate('serverBackups', 'failed', data['lang'])}}"></button>
</div>
<br>
{% if backup.default %}
<span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
class="fa-solid fa-question"></i></button></small>
{% end %}
</td>
<td id="backup_edit" class="action">
<button
onclick="window.location.href=`/panel/edit_backup?id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{backup.backup_id}}`"
class="btn btn-info">
<i class="fas fa-pencil-alt"></i>
</button>
{% if not backup.default %}
<button data-backup={{ backup.backup_id }} class="btn btn-danger del_button">
<i class="fas fa-trash" aria-hidden="true"></i>
</button>
{% end %}
<button data-backup={{ backup.backup_id }} data-toggle="tooltip"
title="{{ translate('serverBackups', 'run', data['lang']) }}"
class="btn btn-outline-warning test-socket backup_now_button">
<i class="fa-solid fa-forward"></i>
</button>
</td>
</tr>
{% end %}
</tbody>
</table>
</div>
{% end %}
</div>
</div>
</div>
</div>
<div class="col-md-12 col-sm-12">
<br>
<br>
<div class="card-header header-sm d-flex justify-content-between align-items-center">
<h4 class="card-title"><i class="fas fa-server"></i> {{ translate('serverBackups', 'excludedBackups',
data['lang']) }} <small class="text-muted ml-1"></small> </h4>
</div>
<br>
<ul>
{% for item in data['exclusions'] %}
<li>{{item}}</li>
<br>
{% end %}
</ul>
</div>
</div>
</div>
@ -298,7 +241,7 @@
{% block js %}
<script>
const server_id = new URLSearchParams(document.location.search).get('id')
const serverId = new URLSearchParams(document.location.search).get('id')
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
@ -307,183 +250,105 @@
return r ? r[1] : undefined;
}
async function backup_started() {
async function backup_started(backup_id) {
const token = getCookie("_xsrf")
let res = await fetch(`/api/v2/servers/${server_id}/action/backup_server`, {
method: 'POST',
headers: {
'X-XSRFToken': token
}
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
$("#backup_button").html(`<div class="progress" style="height: 15px;">
<div class="progress-bar progress-bar-striped progress-bar-animated" id="backup_progress_bar"
role="progressbar" style="width:{{data['backup_stats']['percent']}}%;"
aria-valuenow="{{data['backup_stats']['percent']}}" aria-valuemin="0" aria-valuemax="100">{{
data['backup_stats']['percent'] }}%</div>
</div>
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
id="total_files">{{data['server_stats']['world_size']}}</span></p>`);
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
console.log(backup_id)
let res = await fetch(`/api/v2/servers/${serverId}/action/backup_server/${backup_id}/`, {
method: 'POST',
headers: {
'X-XSRFToken': token
}
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
$("#backup_button").prop('disabled', true)
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
}
return;
}
async function del_backup(filename, id) {
async function del_backup(backup_id) {
const token = getCookie("_xsrf")
let contents = JSON.stringify({"filename": filename})
let res = await fetch(`/api/v2/servers/${id}/backups/backup/`, {
let res = await fetch(`/api/v2/servers/${serverId}/backups/backup/${backup_id}`, {
method: 'DELETE',
headers: {
'token': token,
},
body: contents
body: {}
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.reload();
}else{
bootbox.alert({"title": responseData.status,
"message": responseData.error})
}
}
async function restore_backup(filename, id) {
const token = getCookie("_xsrf")
let contents = JSON.stringify({"filename": filename})
var dialog = bootbox.dialog({
message: "<i class='fa fa-spin fa-spinner'></i> {{ translate('serverBackups', 'restoring', data['lang']) }}",
closeButton: false
});
let res = await fetch(`/api/v2/servers/${id}/backups/backup/`, {
method: 'POST',
headers: {
'token': token,
},
body: contents
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.href = "/panel/dashboard";
}else{
bootbox.alert({"title": responseData.status,
"message": responseData.error})
}
}
$("#before-check").on("click", function () {
if ($("#before-check:checked").val()) {
$("#backup_before").css("display", "inline-block");
} else {
$("#backup_before").css("display", "none");
$("#backup_before").val("");
}
});
$("#after-check").on("click", function () {
if ($("#after-check:checked").val()) {
$("#backup_after").css("display", "inline-block");
} else {
$("#backup_after").css("display", "none");
$("#backup_after").val("");
}
});
function replacer(key, value) {
if (key != "backup_before" && key != "backup_after") {
if (typeof value == "boolean" || key === "executable_update_url") {
return value
} else {
return (isNaN(value) ? value : +value);
}
} else {
return value;
bootbox.alert({
"title": responseData.status,
"message": responseData.error
})
}
}
$(document).ready(function () {
$("#backup-form").on("submit", async function (e) {
e.preventDefault();
const token = getCookie("_xsrf")
let backupForm = document.getElementById("backup-form");
let formData = new FormData(backupForm);
//Remove checks that we don't need in form data.
formData.delete("after-check");
formData.delete("before-check");
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
//We need to make sure these are sent regardless of whether or not they're checked
formDataObject.compress = $("#compress").prop('checked');
formDataObject.shutdown = $("#shutdown").prop('checked');
let excluded = [];
$('input.excluded:checkbox:checked').each(function () {
excluded.push($(this).val());
});
if ($("#root_files_button").hasClass("clicked")){
formDataObject.exclusions = excluded;
}
delete formDataObject.root_path
console.log(excluded);
console.log(formDataObject);
// Format the plain form data as JSON
let formDataJsonString = JSON.stringify(formDataObject, replacer);
console.log(formDataJsonString);
let res = await fetch(`/api/v2/servers/${server_id}/backups/`, {
method: 'PATCH',
headers: {
'X-XSRFToken': token
},
body: formDataJsonString,
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.reload();
} else {
bootbox.alert({
title: responseData.error,
message: responseData.error_data
});
}
});
try {
if ($('#backup_path').val() == '') {
console.log('true')
try {
document.getElementById('backup_now_button').disabled = true;
} catch {
}
} else {
document.getElementById('backup_now_button').disabled = false;
}
} catch {
try {
document.getElementById('backup_now_button').disabled = false;
} catch {
}
}
console.log("ready!");
$("#backup_config_box").hide();
$("#backup_save_note").hide();
$("#show_config").click(function () {
$("#backup_config_box").toggle();
$('#backup_button').hide();
$('#backup_save_note').show();
$('#backup_data').hide();
$(".backup-explain").on("click", function () {
bootbox.alert($(this).data("explain"));
});
$(".backup-status").on("click", function () {
if ($(this).data('message') != "") {
bootbox.alert($(this).data('message'));
}
});
$('.backup-status').each(function () {
// Get the JSON string from the element's text
var data = $(this).data('status');
try {
// Update the element's text with the status value
$(this).text($(this).data(data["status"].toLowerCase()));
// Optionally, add classes based on status to style the element
$(this).attr('data-message', data["message"]);
if (data.status === 'Active') {
$(this).removeClass();
$(this).addClass('badge-pill badge-outline-success btn');
} else if (data.status === 'Failed') {
$(this).removeClass();
$(this).addClass('badge-pill badge-outline-danger btn');
} else if (data.status === 'Standby') {
$(this).removeClass();
$(this).addClass('badge-pill badge-outline-secondary btn');
}
} catch (e) {
console.error('Invalid JSON string:', e);
}
});
if (webSocket) {
webSocket.on('backup_status', function (backup) {
text = ``;
console.log(backup)
if (backup.percent >= 100) {
$(`#${backup.backup_id}_status`).html(`<span class="badge-pill badge-outline-success backup-status"
>Completed</span>`);
setTimeout(function () {
window.location.reload(1);
}, 5000);
} else {
text = `<div class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar" style="width:${backup.percent}%;"
aria-valuenow="${backup.percent}" aria-valuemin="0" aria-valuemax="100">${backup.percent}%</div>`
$(`#${backup.backup_id}_status`).html(text);
}
});
}
$('#backup_table').DataTable({
"order": [[1, "desc"]],
"paging": false,
@ -491,11 +356,12 @@
"searching": true,
"ordering": true,
"info": true,
"autoWidth": false,
"responsive": true,
"autoWidth": true,
"responsive": false,
});
$(".del_button").click(function () {
let backup = $(this).data('backup');
var file_to_del = $(this).data("file");
var backup_path = $(this).data('backup_path');
@ -515,8 +381,8 @@
callback: function (result) {
console.log(result);
if (result == true) {
var full_path = backup_path + '/' + file_to_del;
del_backup(file_to_del, server_id);
del_backup(backup);
}
}
});
@ -541,13 +407,13 @@
callback: function (result) {
console.log(result);
if (result == true) {
restore_backup(file_to_restore, server_id);
restore_backup(file_to_restore, serverId);
}
}
});
});
$("#backup_now_button").click(function () {
backup_started();
$(".backup_now_button").click(function () {
backup_started($(this).data('backup'));
});
});
@ -591,70 +457,55 @@
bootbox.alert("You must input a path before selecting this button");
}
});
if (webSocket) {
webSocket.on('backup_status', function (backup) {
if (backup.percent >= 100) {
document.getElementById('backup_progress_bar').innerHTML = '100%';
document.getElementById('backup_progress_bar').style.width = '100%';
setTimeout(function () {
window.location.reload(1);
}, 5000);
} else {
document.getElementById('backup_progress_bar').innerHTML = backup.percent + '%';
document.getElementById('backup_progress_bar').style.width = backup.percent + '%';
document.getElementById('total_files').innerHTML = backup.total_files;
}
});
}
function getDirView(event){
function getDirView(event) {
let path = event.target.parentElement.getAttribute("data-path");
if (document.getElementById(path).classList.contains('clicked')) {
return;
}else{
} else {
getTreeView(path);
}
}
async function getTreeView(path){
async function getTreeView(path) {
console.log(path)
const token = getCookie("_xsrf");
let res = await fetch(`/api/v2/servers/${server_id}/files`, {
method: 'POST',
headers: {
'X-XSRFToken': token
},
body: JSON.stringify({"page": "backups", "path": path}),
let res = await fetch(`/api/v2/servers/${serverId}/files`, {
method: 'POST',
headers: {
'X-XSRFToken': token
},
body: JSON.stringify({ "page": "backups", "path": path }),
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
process_tree_response(responseData);
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
process_tree_response(responseData);
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
}
}
}
function process_tree_response(response) {
let path = response.data.root_path.path;
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
Object.entries(response.data).forEach(([key, value]) => {
if (key === "root_path" || key === "db_stats"){
//continue is not valid in for each. Return acts as a continue.
return;
}
Object.entries(response.data).forEach(([key, value]) => {
if (key === "root_path" || key === "db_stats") {
//continue is not valid in for each. Return acts as a continue.
return;
}
let checked = ""
let dpath = value.path;
let filename = key;
if (value.excluded){
if (value.excluded) {
checked = "checked"
}
if (value.dir){
if (value.dir) {
text += `<li class="tree-item" data-path="${dpath}">
\n<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
<input type="checkbox" class="checkBoxClass excluded" value="${dpath}" ${checked}>
@ -664,7 +515,7 @@
<strong>${filename}</strong>
</span>
</input></div><li>`
}else{
} else {
text += `<li
class="d-block tree-ctx-item tree-file"
data-path="${dpath}"
@ -674,30 +525,30 @@
}
});
text += `</ul>`;
if(response.data.root_path.top){
if (response.data.root_path.top) {
try {
document.getElementById('main-tree-div').innerHTML += text;
document.getElementById('main-tree').parentElement.classList.add("clicked");
} catch {
document.getElementById('files-tree').innerHTML = text;
}
}else{
document.getElementById('main-tree-div').innerHTML += text;
document.getElementById('main-tree').parentElement.classList.add("clicked");
} catch {
document.getElementById('files-tree').innerHTML = text;
}
} else {
try {
document.getElementById(path + "span").classList.add('tree-caret-down');
document.getElementById(path).innerHTML += text;
document.getElementById(path).classList.add("clicked");
} catch {
console.log("Bad")
}
document.getElementById(path + "span").classList.add('tree-caret-down');
document.getElementById(path).innerHTML += text;
document.getElementById(path).classList.add("clicked");
} catch {
console.log("Bad")
}
var toggler = document.getElementById(path + "span");
var toggler = document.getElementById(path + "span");
if (toggler.classList.contains('files-tree-title')) {
document.getElementById(path + "span").addEventListener("click", function caretListener() {
document.getElementById(path + "ul").classList.toggle("d-block");
document.getElementById(path + "span").classList.toggle("tree-caret-down");
});
}
if (toggler.classList.contains('files-tree-title')) {
document.getElementById(path + "span").addEventListener("click", function caretListener() {
document.getElementById(path + "ul").classList.toggle("d-block");
document.getElementById(path + "span").classList.toggle("tree-caret-down");
});
}
}
}

View File

@ -0,0 +1,758 @@
{% extends ../base.html %}
{% block meta %}
{% end %}
{% block title %}Crafty Controller - {{ translate('serverDetails', 'serverDetails', data['lang']) }}{% end %}
{% block content %}
<div class="content-wrapper">
<!-- Page Title Header Starts-->
<div class="row page-title-header">
<div class="col-12">
<div class="page-header">
<h4 class="page-title">
{{ translate('serverDetails', 'serverDetails', data['lang']) }} - {{
data['server_stats']['server_id']['server_name'] }}
<br />
<small>UUID: {{ data['server_stats']['server_id']['server_id'] }}</small>
</h4>
</div>
</div>
</div>
<!-- Page Title Header Ends-->
{% include "parts/details_stats.html %}
<div class="row">
<div class="col-sm-12 grid-margin">
<div class="card">
<div class="card-body pt-0">
<span class="d-none d-sm-block">
{% include "parts/server_controls_list.html %}
</span>
<span class="d-block d-sm-none">
{% include "parts/m_server_controls_list.html %}
</span>
<div class="row">
<div class="col-md-6 col-sm-12">
<br>
<br>
<div id="{{data['backup_config'].get('backup_id', None)}}_status" class="progress"
style="height: 15px; display: none;">
</div>
{% if data['backing_up'] %}
<p>Backing up <i class="fas fa-spin fa-spinner"></i> <span
id="total_files">{{data['server_stats']['world_size']}}</span></p>
{% end %}
<br>
{% if not data['backing_up'] %}
<div id="backup_button" class="form-group">
<button class="btn btn-primary" id="backup_now_button">{{ translate('serverBackups', 'backupNow',
data['lang']) }}</button>
</div>
{% end %}
<form id="backup-form" class="forms-sample">
<div class="form-group">
<label for="backup_name">{{ translate('serverBackups', 'name', data['lang']) }}
{% if data["backup_config"].get("default", None) %}
&nbsp;&nbsp; <span class="badge-pill badge-outline-warning">{{ translate('serverBackups', 'default',
data['lang']) }}</span><small><button class="badge-pill badge-outline-info backup-explain"
data-explain="{{ translate('serverBackups', 'defaultExplain', data['lang'])}}"><i
class="fa-solid fa-question"></i></button></small>
{% end %}
</label>
{% if data["backup_config"].get("backup_id", None) %}
<input type="text" class="form-control" name="backup_name" id="backup_name"
value="{{ data['backup_config']['backup_name'] }}">
{% else %}
<input type="text" class="form-control" name="backup_name" id="backup_name"
placeholder="{{ translate('serverBackups', 'myBackup', data['lang']) }}">
{% end %}
<br>
<br>
{% if data['super_user'] %}
<label for="server_name">{{ translate('serverBackups', 'storageLocation', data['lang']) }} <small
class="text-muted ml-1"> - {{ translate('serverBackups', 'storageLocationDesc', data['lang'])
}}</small> </label>
<input type="text" class="form-control" name="backup_location" id="backup_location"
value="{{ data['backup_config']['backup_location'] }}"
placeholder="{{ translate('serverBackups', 'storageLocation', data['lang']) }}">
{% end %}
</div>
<div class="form-group">
<label for="server_path">{{ translate('serverBackups', 'maxBackups', data['lang']) }} <small
class="text-muted ml-1"> - {{ translate('serverBackups', 'maxBackupsDesc', data['lang'])
}}</small> </label>
<input type="text" class="form-control" name="max_backups" id="max_backups"
value="{{ data['backup_config']['max_backups'] }}"
placeholder="{{ translate('serverBackups', 'maxBackups', data['lang']) }}">
</div>
<div class="form-group">
<div class="custom-control custom-switch">
{% if data['backup_config']['compress'] %}
<input type="checkbox" class="custom-control-input" id="compress" name="compress" checked=""
value="True">
{% else %}
<input type="checkbox" class="custom-control-input" id="compress" name="compress" value="True">
{% end %}
<label for="compress" class="custom-control-label">{{ translate('serverBackups', 'compress',
data['lang']) }}</label>
</div>
</div>
<div class="form-group">
<div class="custom-control custom-switch">
{% if data['backup_config']['shutdown']%}
<input type="checkbox" class="custom-control-input" id="shutdown" name="shutdown" checked=""
value="True">
{% else %}
<input type="checkbox" class="custom-control-input" id="shutdown" name="shutdown" value="True">
{% end %}
<label for="shutdown" class="custom-control-label">{{ translate('serverBackups', 'shutdown',
data['lang']) }}</label>
</div>
</div>
<div class="form-group">
<div class="custom-control custom-switch">
{% if data['backup_config']['before'] %}
<input type="checkbox" class="custom-control-input" id="before-check" name="before-check" checked>
<input type="text" class="form-control hidden-input" name="before" id="backup_before"
value="{{ data['backup_config']['before'] }}" placeholder="We enter the / for you"
style="display: inline-block;">
{% else %}
<input type="checkbox" class="custom-control-input" id="before-check" name="before-check">
<input type="text" class="form-control hidden-input" name="before" id="backup_before" value=""
placeholder="We enter the / for you." style="display: none;">
{% end %}
<label for="before-check" class="custom-control-label">{{
translate('serverBackups', 'before', data['lang']) }}</label>
</div>
</div>
<div class="form-group">
<div class="custom-control custom-switch">
{% if data['backup_config']['after'] %}
<input type="checkbox" class="custom-control-input" id="after-check" name="after-check" checked>
<input type="text" class="form-control hidden-input" name="after" id="backup_after"
value="{{ data['backup_config']['after'] }}" placeholder="We enter the / for you"
style="display: inline-block;">
<br>
{% else %}
<input type="checkbox" class="custom-control-input" id="after-check" name="after-check">
<input type="text" class="form-control hidden-input" name="after" id="backup_after" value=""
placeholder="We enter the / for you." style="display: none;">
{% end %}
<label for="after-check" class="custom-control-label">{{
translate('serverBackups', 'after', data['lang']) }}</label>
</div>
</div>
<div class="form-group">
<label for="server">{{ translate('serverBackups', 'exclusionsTitle', data['lang']) }} <small> - {{
translate('serverBackups', 'excludedChoose', data['lang']) }}</small></label>
<br>
<button class="btn btn-primary mr-2" id="root_files_button"
data-server_path="{{ data['server_stats']['server_id']['path']}}" type="button">{{
translate('serverBackups', 'clickExclude', data['lang']) }}</button>
</div>
<div class="modal fade" id="dir_select" tabindex="-1" aria-labelledby="dir_select" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">{{ translate('serverBackups',
'excludedChoose', data['lang']) }}</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<div class="modal-body">
<div class="tree-ctx-item" id="main-tree-div" data-path=""
style="overflow: scroll; max-height:75%;">
<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled>
<span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path="">
<i class="far fa-folder"></i>
<i class="far fa-folder-open"></i>
{{ translate('serverFiles', 'files', data['lang']) }}
</span>
</input>
</div>
</div>
<div class="modal-footer">
<button type="button" id="modal-cancel" class="btn btn-secondary" data-dismiss="modal"><i
class="fa-solid fa-xmark"></i></button>
<button type="button" id="modal-okay" data-dismiss="modal" class="btn btn-primary"><i
class="fa-solid fa-thumbs-up"></i></button>
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-success mr-2">{{ translate('serverBackups', 'save', data['lang'])
}}</button>
<button type="reset" class="btn btn-light cancel-button">{{ translate('serverBackups', 'cancel',
data['lang'])
}}</button>
</form>
</div>
<div class="col-md-6 col-sm-12">
<div class="text-center">
<table class="table table-responsive dataTable" id="backup_table">
<h4 class="card-title">{{ translate('serverBackups', 'currentBackups', data['lang']) }}</h4>
<thead>
<tr>
<th>{{ translate('serverBackups', 'options', data['lang']) }}</th>
<th>{{ translate('serverBackups', 'path', data['lang']) }}</th>
<th>{{ translate('serverBackups', 'size', data['lang']) }}</th>
</tr>
</thead>
<tbody>
{% for backup in data['backup_list'] %}
<tr>
<td>
<a href="/panel/download_backup?file={{ backup['path'] }}&id={{ data['server_stats']['server_id']['server_id'] }}&backup_id={{ data['backup_config']['backup_id']}}"
class="btn btn-primary">
<i class="fas fa-download" aria-hidden="true"></i>
{{ translate('serverBackups', 'download', data['lang']) }}
</a>
<br>
<br>
<button data-file="{{ backup['path'] }}"
data-backup_location="{{ data['backup_config']['backup_location'] }}"
class="btn btn-danger del_button">
<i class="fas fa-trash" aria-hidden="true"></i>
{{ translate('serverBackups', 'delete', data['lang']) }}
</button>
<button data-file="{{ backup['path'] }}" class="btn btn-warning restore_button">
<i class="fas fa-undo-alt" aria-hidden="true"></i>
{{ translate('serverBackups', 'restore', data['lang']) }}
</button>
</td>
<td>{{ backup['path'] }}</td>
<td>{{ backup['size'] }}</td>
</tr>
{% end %}
</tbody>
</table>
</div>
</div>
</div>
<div class="col-md-12 col-sm-12">
<br>
<br>
<div class="card-header header-sm d-flex justify-content-between align-items-center">
<h4 class="card-title"><i class="fas fa-server"></i> {{ translate('serverBackups', 'excludedBackups',
data['lang']) }} <small class="text-muted ml-1"></small> </h4>
</div>
<br>
<ul>
{% for item in data['exclusions'] %}
<li>{{item}}</li>
<br>
{% end %}
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
<style>
/* Remove default bullets */
.tree-view,
.tree-nested {
list-style-type: none;
margin: 0;
padding: 0;
margin-left: 10px;
}
/* Style the items */
.tree-item,
.files-tree-title {
cursor: pointer;
user-select: none;
/* Prevent text selection */
}
/* Create the caret/arrow with a unicode, and style it */
.tree-caret .fa-folder {
display: inline-block;
}
.tree-caret .fa-folder-open {
display: none;
}
/* Rotate the caret/arrow icon when clicked on (using JavaScript) */
.tree-caret-down .fa-folder {
display: none;
}
.tree-caret-down .fa-folder-open {
display: inline-block;
}
/* Hide the nested list */
.tree-nested {
display: none;
}
</style>
<!-- content-wrapper ends -->
{% end %}
{% block js %}
<script>
const server_id = new URLSearchParams(document.location.search).get('id')
const backup_id = new URLSearchParams(document.location.search).get('backup_id')
//used to get cookies from browser - this is part of tornados xsrf protection - it's for extra security
function getCookie(name) {
var r = document.cookie.match("\\b" + name + "=([^;]*)\\b");
return r ? r[1] : undefined;
}
async function backup_started() {
const token = getCookie("_xsrf")
let res = await fetch(`/api/v2/servers/${server_id}/action/backup_server/${backup_id}`, {
method: 'POST',
headers: {
'X-XSRFToken': token
}
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
$("#backup_button").prop('disabled', true)
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
}
return;
}
async function del_backup(filename, id) {
const token = getCookie("_xsrf")
let contents = JSON.stringify({ "filename": filename })
let res = await fetch(`/api/v2/servers/${server_id}/backups/backup/${backup_id}/files/`, {
method: 'DELETE',
headers: {
'token': token,
},
body: contents
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.reload();
} else {
bootbox.alert({
"title": responseData.status,
"message": responseData.error
})
}
}
async function restore_backup(filename, id) {
const token = getCookie("_xsrf")
let contents = JSON.stringify({ "filename": filename })
var dialog = bootbox.dialog({
message: "<i class='fa fa-spin fa-spinner'></i> {{ translate('serverBackups', 'restoring', data['lang']) }}",
closeButton: false
});
let res = await fetch(`/api/v2/servers/${server_id}/backups/backup/${backup_id}/`, {
method: 'POST',
headers: {
'token': token,
},
body: contents
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.href = "/panel/dashboard";
} else {
bootbox.alert({
"title": responseData.status,
"message": responseData.error
})
}
}
$("#before-check").on("click", function () {
if ($("#before-check:checked").val()) {
$("#backup_before").css("display", "inline-block");
} else {
$("#backup_before").css("display", "none");
$("#backup_before").val("");
}
});
$("#after-check").on("click", function () {
if ($("#after-check:checked").val()) {
$("#backup_after").css("display", "inline-block");
} else {
$("#backup_after").css("display", "none");
$("#backup_after").val("");
}
});
function replacer(key, value) {
if (key === "excluded_dirs") {
if (value == 0) {
return []
} else {
return value
}
}
if (key != "before" && key != "after") {
if (typeof value == "boolean" || key === "executable_update_url") {
return value
} else {
return (isNaN(value) ? value : +value);
}
} else {
return value;
}
}
$(document).ready(function () {
$(".backup-explain").on("click", function (e) {
e.preventDefault();
bootbox.alert($(this).data("explain"));
});
$(".cancel-button").on("click", function () {
location.href = `/panel/server_detail?id=${server_id}&subpage=backup`
});
webSocket.on('backup_status', function (backup) {
text = ``;
$(`#${backup.backup_id}_status`).show();
if (backup.percent >= 100) {
$(`#${backup.backup_id}_status`).hide()
setTimeout(function () {
window.location.reload(1);
}, 5000);
} else {
text = `<div class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar" style="width:${backup.percent}%;"
aria-valuenow="${backup.percent}" aria-valuemin="0" aria-valuemax="100">${backup.percent}%</div>`
$(`#${backup.backup_id}_status`).html(text);
}
});
$("#backup-form").on("submit", async function (e) {
e.preventDefault();
const token = getCookie("_xsrf")
let backupForm = document.getElementById("backup-form");
let formData = new FormData(backupForm);
//Remove checks that we don't need in form data.
formData.delete("after-check");
formData.delete("before-check");
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
//We need to make sure these are sent regardless of whether or not they're checked
formDataObject.compress = $("#compress").prop('checked');
formDataObject.shutdown = $("#shutdown").prop('checked');
if ($("#root_files_button").hasClass("clicked")) {
excluded = []
$('input.excluded:checkbox:checked').each(function () {
excluded.push($(this).val());
});
formDataObject.excluded_dirs = excluded;
}
delete formDataObject.root_path
console.log(formDataObject);
// Format the plain form data as JSON
let formDataJsonString = JSON.stringify(formDataObject, replacer);
console.log(formDataJsonString);
let url = `/api/v2/servers/${server_id}/backups/backup/${backup_id}/`
let method = "PATCH"
if (!backup_id) {
url = `/api/v2/servers/${server_id}/backups/`
method = "POST";
}
let res = await fetch(url, {
method: method,
headers: {
'X-XSRFToken': token
},
body: formDataJsonString,
});
let responseData = await res.json();
if (responseData.status === "ok") {
window.location.href = `/panel/server_detail?id=${server_id}&subpage=backup`;
} else {
bootbox.alert({
title: responseData.error,
message: responseData.error_data
});
}
});
try {
if ($('#backup_location').val() == '') {
console.log('true')
try {
document.getElementById('backup_now_button').disabled = true;
} catch {
}
} else {
document.getElementById('backup_now_button').disabled = false;
}
} catch {
try {
document.getElementById('backup_now_button').disabled = false;
} catch {
}
}
console.log("ready!");
$("#backup_config_box").hide();
$("#backup_save_note").hide();
$("#show_config").click(function () {
$("#backup_config_box").toggle();
$('#backup_button').hide();
$('#backup_save_note').show();
$('#backup_data').hide();
});
$('#backup_table').DataTable({
"order": [[1, "desc"]],
"paging": false,
"lengthChange": false,
"searching": true,
"ordering": true,
"info": true,
"autoWidth": false,
"responsive": true,
});
$(".del_button").click(function () {
var file_to_del = $(this).data("file");
var backup_location = $(this).data('backup_location');
console.log("file to delete is" + file_to_del);
bootbox.confirm({
title: "{% raw translate('serverBackups', 'destroyBackup', data['lang']) %}",
message: "{{ translate('serverBackups', 'confirmDelete', data['lang']) }}",
buttons: {
cancel: {
label: '<i class="fas fa-times"></i> {{ translate("serverBackups", "cancel", data['lang']) }}'
},
confirm: {
label: '<i class="fas fa-check"></i> {{ translate("serverBackups", "confirm", data['lang']) }}'
}
},
callback: function (result) {
console.log(result);
if (result == true) {
var full_path = backup_location + '/' + file_to_del;
del_backup(file_to_del, server_id);
}
}
});
});
$(".restore_button").click(function () {
var file_to_restore = $(this).data("file");
bootbox.confirm({
title: "{{ translate('serverBackups', 'restore', data['lang']) }} " + file_to_restore,
message: "{{ translate('serverBackups', 'confirmRestore', data['lang']) }}",
buttons: {
cancel: {
label: '<i class="fas fa-times"></i> {{ translate("serverBackups", "cancel", data['lang']) }}'
},
confirm: {
label: '<i class="fas fa-check"></i> {{ translate("serverBackups", "restore", data['lang']) }}',
className: 'btn-outline-danger'
}
},
callback: function (result) {
console.log(result);
if (result == true) {
restore_backup(file_to_restore, server_id);
}
}
});
});
$("#backup_now_button").click(function () {
backup_started();
});
});
document.getElementById("modal-cancel").addEventListener("click", function () {
document.getElementById("root_files_button").classList.remove('clicked');
document.getElementById("main-tree-div").innerHTML = '<input type="checkbox" id="main-tree-input" name="root_path" value="" disabled><span id="main-tree" class="files-tree-title tree-caret-down root-dir" data-path=""><i class="far fa-folder"></i><i class="far fa-folder-open"></i>{{ translate("serverFiles", "files", data["lang"]) }}</span></input>'
})
document.getElementById("root_files_button").addEventListener("click", function () {
if ($("#root_files_button").data('server_path') != "") {
if (document.getElementById('root_files_button').classList.contains('clicked')) {
show_file_tree();
return;
} else {
document.getElementById('root_files_button').classList.add('clicked');
}
path = $("#root_files_button").data('server_path')
console.log($("#root_files_button").data('server_path'))
const token = getCookie("_xsrf");
var dialog = bootbox.dialog({
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
closeButton: false
});
setTimeout(function () {
var x = document.querySelector('.bootbox');
if (x) {
x.remove()
}
var x = document.querySelector('.modal-backdrop');
if (x) {
x.remove()
}
document.getElementById('main-tree-input').setAttribute('value', path)
getTreeView(path);
show_file_tree();
}, 5000);
} else {
bootbox.alert("You must input a path before selecting this button");
}
});
function getDirView(event) {
let path = event.target.parentElement.getAttribute("data-path");
if (document.getElementById(path).classList.contains('clicked')) {
return;
} else {
getTreeView(path);
}
}
async function getTreeView(path) {
console.log(path)
const token = getCookie("_xsrf");
let url = `/api/v2/servers/${server_id}/files/${backup_id}`
if (!backup_id) {
url = `/api/v2/servers/${server_id}/files/`
console.log("NEW URL")
}
console.log(url);
let res = await fetch(url, {
method: 'POST',
headers: {
'X-XSRFToken': token
},
body: JSON.stringify({ "page": "backups", "path": path }),
});
let responseData = await res.json();
if (responseData.status === "ok") {
console.log(responseData);
process_tree_response(responseData);
} else {
bootbox.alert({
title: responseData.status,
message: responseData.error
});
}
}
function process_tree_response(response) {
let path = response.data.root_path.path;
let text = `<ul class="tree-nested d-block" id="${path}ul">`;
Object.entries(response.data).forEach(([key, value]) => {
if (key === "root_path" || key === "db_stats") {
//continue is not valid in for each. Return acts as a continue.
return;
}
let checked = ""
let dpath = value.path;
let filename = key;
if (value.excluded) {
checked = "checked"
}
if (value.dir) {
text += `<li class="tree-item" data-path="${dpath}">
\n<div id="${dpath}" data-path="${dpath}" data-name="${filename}" class="tree-caret tree-ctx-item tree-folder">
<input type="checkbox" class="checkBoxClass excluded" value="${dpath}" ${checked}>
<span id="${dpath}span" class="files-tree-title" data-path="${dpath}" data-name="${filename}" onclick="getDirView(event)">
<i style="color: var(--info);" class="far fa-folder"></i>
<i style="color: var(--info);" class="far fa-folder-open"></i>
<strong>${filename}</strong>
</span>
</input></div><li>`
} else {
text += `<li
class="d-block tree-ctx-item tree-file"
data-path="${dpath}"
data-name="${filename}"
onclick=""><input type='checkbox' class="checkBoxClass excluded" name='root_path' value="${dpath}" ${checked}><span style="margin-right: 6px;">
<i class="far fa-file"></i></span></input>${filename}</li>`
}
});
text += `</ul>`;
if (response.data.root_path.top) {
try {
document.getElementById('main-tree-div').innerHTML += text;
document.getElementById('main-tree').parentElement.classList.add("clicked");
} catch {
document.getElementById('files-tree').innerHTML = text;
}
} else {
try {
document.getElementById(path + "span").classList.add('tree-caret-down');
document.getElementById(path).innerHTML += text;
document.getElementById(path).classList.add("clicked");
} catch {
console.log("Bad")
}
var toggler = document.getElementById(path + "span");
if (toggler.classList.contains('files-tree-title')) {
document.getElementById(path + "span").addEventListener("click", function caretListener() {
document.getElementById(path + "ul").classList.toggle("d-block");
document.getElementById(path + "span").classList.toggle("tree-caret-down");
});
}
}
}
function getToggleMain(event) {
path = event.target.parentElement.getAttribute('data-path');
document.getElementById("files-tree").classList.toggle("d-block");
document.getElementById(path + "span").classList.toggle("tree-caret-down");
document.getElementById(path + "span").classList.toggle("tree-caret");
}
function show_file_tree() {
$("#dir_select").modal();
}
</script>
{% end %}

View File

@ -67,7 +67,8 @@
translate('serverFiles', 'download', data['lang']) }}</a>
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteFile" href="#"
style="color: red">{{ translate('serverFiles', 'delete', data['lang']) }}</a>
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteDir" href="#" style="color: red">{{
<a onclick="deleteFileE(event)" href="javascript:void(0)" id="deleteDir" href="#"
style="color: red">{{
translate('serverFiles', 'delete', data['lang']) }}</a>
<a href="javascript:void(0)" class="closebtn" style="color: var(--info);"
onclick="document.getElementById('files-tree-nav').style.display = 'none';">{{
@ -156,7 +157,8 @@
right: 35px;
}
}
.tree-file:hover{
.tree-file:hover {
cursor: pointer;
}
</style>
@ -721,105 +723,7 @@
}
}
async function sendFile(file, path, serverId, left, i, onProgress) {
let xmlHttpRequest = new XMLHttpRequest();
let token = getCookie("_xsrf")
let fileName = file.name
let target = '/upload?server_id=' + serverId
let mimeType = file.type
let size = file.size
xmlHttpRequest.upload.addEventListener('progress', function (e) {
if (e.loaded <= size) {
var percent = Math.round(e.loaded / size * 100);
$(`#upload-progress-bar-${i + 1}`).css('width', percent + '%');
$(`#upload-progress-bar-${i + 1}`).html(percent + '%');
}
});
xmlHttpRequest.open('POST', target, true);
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
xmlHttpRequest.setRequestHeader('X-Path', path);
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', 'server_files')
xmlHttpRequest.setRequestHeader('X-Files-Left', left);
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
xmlHttpRequest.setRequestHeader('X-ServerId', serverId);
xmlHttpRequest.upload.addEventListener('progress', (event) =>
onProgress(Math.floor(event.loaded / event.total * 100)), false);
xmlHttpRequest.addEventListener('load', (event) => {
if (event.target.responseText == 'success') {
console.log('Upload for file', file.name, 'was successful!');
let caught = false;
try {
if (document.getElementById(path).classList.contains("clicked")) {
var expanded = true;
}
} catch {
var expanded = false;
}
try {
var par_el = document.getElementById(path + "ul");
var items = par_el.children;
} catch (err) {
console.log(err)
caught = true;
var par_el = document.getElementById("files-tree");
var items = par_el.children;
}
let name = file.name;
console.log(par_el)
let full_path = path + '/' + name
let flag = false;
for (var k = 0; k < items.length; ++k) {
if ($(items[k]).attr("data-name") == name) {
flag = true;
}
}
if (!flag) {
if (caught && expanded == false) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="d-block tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
} else if (expanded == true) {
$(par_el).append('<li id=' + '"' + full_path.toString() + 'li' + '"' + 'class="tree-ctx-item tree-file tree-item" data-path=' + '"' + full_path.toString() + '"' + ' data-name=' + '"' + name.toString() + '"' + ' onclick="clickOnFile(event)" ><span style="margin-right: 6px;"><i class="far fa-file"></i></span>' + name + '</li>');
}
setTreeViewContext();
}
$(`#upload-progress-bar-${i + 1}`).removeClass("progress-bar-striped");
$(`#upload-progress-bar-${i + 1}`).addClass("bg-success");
$(`#upload-progress-bar-${i + 1}`).html('<i style="color: black;" class="fas fa-box-check"></i>')
}
else {
let response_text = JSON.parse(event.target.responseText);
var x = document.querySelector('.bootbox');
if (x) {
x.remove()
}
var x = document.querySelector('.modal-content');
if (x) {
x.remove()
}
console.log(JSON.parse(event.target.responseText).info)
bootbox.alert({
message: JSON.parse(event.target.responseText).info,
callback: function () {
window.location.reload();
}
});
doUpload = false;
}
}, false);
xmlHttpRequest.addEventListener('error', (e) => {
console.error('Error while uploading file', file.name + '.', 'Event:', e)
}, false);
xmlHttpRequest.send(file);
}
let uploadWaitDialog;
let doUpload = true;
async function uploadFilesE(event) {
path = event.target.parentElement.getAttribute('data-path');
@ -842,6 +746,9 @@
label: "{{ translate('serverFiles', 'upload', data['lang']) }}",
className: "btn-default",
callback: async function () {
if ($("#files").get(0).files.length === 0) {
return hideUploadBox();
}
var height = files.files.length * 50;
var waitMessage = '<p class="text-center mb-0">' +
@ -858,54 +765,56 @@
});
let nFiles = files.files.length;
for (i = 0; i < nFiles; i++) {
if (!doUpload) {
doUpload = true;
hideUploadBox();
break;
}
const uploadPromises = [];
for (let i = 0; i < nFiles; i++) {
const file = files.files[i];
const progressHtml = `
<div style="width: 100%; min-width: 100%;">
${files.files[i].name}:
<br><div
id="upload-progress-bar-${i + 1}"
class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style="width: 100%; height: 10px;"
aria-valuenow="0"
aria-valuemin="0"
aria-valuemax="100"
></div>
</div><br>
`;
<div style="width: 100%; min-width: 100%;">
${file.name}:
<br><div
id="upload-progress-bar-${i + 1}"
class="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style="width: 100%; height: 10px;"
aria-valuenow="0"
aria-valuemin="0"
aria-valuemax="100"
></div>
</div><br>
`;
$('#upload-progress-bar-parent').append(progressHtml);
await sendFile(files.files[i], path, serverId, nFiles - i - 1, i, (progress) => {
const uploadPromise = uploadFile("server_upload", file, path, i, (progress) => {
$(`#upload-progress-bar-${i + 1}`).attr('aria-valuenow', progress)
$(`#upload-progress-bar-${i + 1}`).css('width', progress + '%');
});
uploadPromises.push(uploadPromise);
}
hideUploadBox();
//$('#upload_file').submit(); //.trigger('submit');
await Promise.all(uploadPromises);
setTimeout(() => {
hideUploadBox();
}, 2000);
}
}
}
});
var fileList = document.getElementById("files");
fileList.addEventListener("change", function (e) {
var list = "";
let files = Array.from(this.files)
files.forEach(file => {
list += "<li class='col-xs-12 file-list'>" + file.name + "</li>"
})
document.getElementById("fileList").innerHTML = list;
}, false);
});
}
async function calculateFileHash(file) {
const arrayBuffer = await file.arrayBuffer();
const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
return hashHex;
}
function getDirView(event) {
let path = event.target.parentElement.getAttribute("data-path");
if (document.getElementById(path).classList.contains('clicked')) {
@ -1211,5 +1120,5 @@
</script>
<script src="../../static/assets/js/shared/upload.js"></script>
{% end %}

View File

@ -79,6 +79,24 @@
<option id="command" value="command">{{ translate('serverScheduleConfig', 'custom' , data['lang'])
}}</option>
</select>
<div id="ifBackup" style="display: none;">
<br>
<label for="action_id">{{ translate('serverSchedules', 'actionId' , data['lang']) }}<small
class="text-muted ml-1"></small> </label><br>
<select id="action_id" name="action_id"
class="form-control form-control-lg select-css" value="{{ data['schedule']['action_id'] }}">
{% for backup in data["backups"] %}
{% if backup.backup_id == data["schedule"]["action_id"] %}
<option id="{{backup.backup_id}}" value="{{backup.backup_id}}">{{backup.backup_name}}</option>
{% end %}
{% end %}
{% for backup in data["backups"] %}
{% if backup.backup_id != data["schedule"]["action_id"] %}
<option id="{{backup.backup_id}}" value="{{backup.backup_id}}">{{backup.backup_name}}</option>
{% end %}
{% end %}
</select>
</div>
</div>
<div id="ifBasic">
<div class="form-group">
@ -232,7 +250,7 @@
}
function replacer(key, value) {
if (key != "start_time" && key != "cron_string" && key != "interval_type") {
if (key != "start_time" && key != "cron_string" && key != "interval_type" && key != "action_id") {
if (typeof value == "boolean") {
return value
}
@ -247,7 +265,7 @@
}
} else if (value === "" && key == "start_time"){
return "00:00";
}else{
}else {
return value;
}
}
@ -281,6 +299,11 @@
// Format the plain form data as JSON
let formDataJsonString = JSON.stringify(formDataObject, replacer);
let data = JSON.parse(formDataJsonString)
if (data["action"] === "backup" && !data["action_id"]){
return bootbox.alert("Validation Failed")
}
let res = await fetch(`/api/v2/servers/${serverId}/tasks/`, {
method: 'POST',
headers: {
@ -358,6 +381,14 @@
document.getElementById("ifYes").style.display = "none";
document.getElementById("command_input").required = false;
}
if (document.getElementById('action').value == "backup"){
document.getElementById("ifBackup").style.display = "block";
document.getElementById("action_id").required = true;
} else {
document.getElementById("ifBackup").style.display = "none";
document.getElementById("action_id").required = false;
$("#action_id").val(null);
}
}
function basicAdvanced() {
if (document.getElementById('difficulty').value == "advanced") {

View File

@ -1,5 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<html lang="{{ data.get('lang_page', 'en') }}" class="{{data['user_data'].get('theme', 'default')}}"
data-username="{{data['user_data'].get('username', None)}}">
<head>
<!-- Required meta tags -->
@ -60,6 +61,11 @@
<b>{{ translate('error', 'hereIsTheError', data['lang']) }}: {{data['error']}}</b><br /><br />
That's all the help I can give you - Godspeed
<br /><br />
<a class="d-inline font-weight-medium" href="/panel/dashboard"><button class="btn btn-info">{{
translate('error', 'return',
data['lang'])}}</button></a>
<br>
<br>
<a class="d-inline font-weight-medium" href="https://discord.gg/9VJPhCE"> {{ translate('error',
'contact', data['lang']) }}</a>
</p>

View File

@ -171,7 +171,6 @@
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
console.log(formDataObject)
let res = await fetch(`/login`, {
method: 'POST',
headers: {

View File

@ -64,7 +64,7 @@
<span class="text-warning"><i class="fas fa-exclamation-triangle"></i></span>
</td>
<td id="server_motd_{{ server['stats']['server_id']['server_id'] }}">
<span class="text-warning">Crafty can't get infos from this Server </span>
<span class="text-warning"><i class="fa-solid fa-link-slash"></i> </span>
</td>
<td id="server_version_{{ server['stats']['server_id']['server_id'] }}">
<span class="text-warning"><i class="fas fa-question"></i></i></span>
@ -148,7 +148,7 @@
<div class="row">
<div class="col-12">
<div id="m_server_motd_{{ server['stats']['server_id']['server_id'] }}">
<span class="text-warning"><i class="fas fa-exclamation-triangle"></i> Crafty can't get infos from
<span class="text-warning"><i class="fas fa-exclamation-triangle"></i> Crafty can't get info from
this Server </span>
</div>
<div id="m_server_version_{{ server['stats']['server_id']['server_id'] }}"></div>
@ -223,9 +223,9 @@
}
else {
server_players.innerHTML = `<span class="text-warning"><i class="fas fa-exclamation-triangle"></i></span>`;
server_motd.innerHTML = `<span class="text-warning">Crafty can't get infos from this Server </span>`;
server_motd.innerHTML = `<span class="text-warning"><i class="fa-solid fa-link-slash"></i> </span>`;
server_version.innerHTML = `<span class="text-warning"><i class="fas fa-question"></i></i></span>`;
m_server_motd.innerHTML = `<span class="text-warning"><i class="fas fa-exclamation-triangle"></i> Crafty can't get infos from this Server </span>`;
m_server_motd.innerHTML = `<span class="text-warning"><i class="fas fa-exclamation-triangle"></i> <i class="fa-solid fa-link-slash"></i> </span>`;
}
/* Update Online Status */

View File

@ -3,7 +3,7 @@
{% block title %}Crafty Controller - {{ translate('serverWizard', 'newServer', data['lang']) }}{% end %}
{% block content %}
<link rel="stylesheet" href="../../static/assets/css/vendors/bootstrap-select-1.13.18.css">
<div class="content-wrapper">
<ul class="nav nav-pills tab-simple-styled " role="tablist">
<li class="nav-item term-nav-item">
@ -62,29 +62,24 @@
</div>
<div class="form-group">
<div id="accordion-1">
<div class="card">
<div class="card-header p-2" id="Role-1">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-1" aria-expanded="true"
aria-controls="collapseRole-1">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang']) }}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-1" class="collapse" aria-labelledby="Role-1" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input name="{{ r['role_id'] }}"
type="checkbox">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true" name="server_roles"
multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
<button onclick="eula_confirm()" type="button" class="btn btn-primary mr-2">{{ translate('serverWizard',
'buildServer',
@ -146,30 +141,27 @@
<input type="number" class="form-control" id="port2" name="port" value="19132" step="1" min="1"
max="65535" required>
</div>
<div class="form-group">
<div id="accordion-2">
<div class="card">
<div class="card-header p-2" id="Role-2">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-2" aria-expanded="true"
aria-controls="collapseRole-2">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang']) }}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-2" class="collapse" aria-labelledby="Role-2" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input name="{{ r['role_id'] }}" type="checkbox">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select" data-styleBase="form-control"
data-style="form-control" data-live-search="true" name="server_roles" multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
<button type="submit" class="btn btn-primary mr-2">{{ translate('serverWizard', 'importServerButton',
data['lang']) }}</button>
<button type="reset" class="btn btn-danger mr-2">{{ translate('serverWizard', 'resetForm', data['lang'])
@ -222,34 +214,25 @@
<input type="number" class="form-control" id="port3" name="port" value="19132" step="1" min="1"
max="65535" required>
</div>
<div class="form-group">
<div id="accordion-3">
<div class="card">
<div class="card-header p-2" id="Role-3">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-3" aria-expanded="true"
aria-controls="collapseRole-3">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang'])
}} <small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-3" class="collapse" aria-labelledby="Role-3" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input name="{{ r['role_id'] }}" type="checkbox">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
</div>
<div style="visibility: hidden;">
<div class="form-group">
<input type="text" class="form-control" id="zip_root_path" name="zip_root_path">
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select" data-styleBase="form-control"
data-style="form-control" data-live-search="true" name="server_roles" multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
<div class="modal fade" id="dir_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
aria-hidden="true">
@ -318,8 +301,8 @@
'labelZipFile', data['lang']) }}</label>
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info upload-button" id="upload-button" onclick="sendFile()"
disabled>{{ translate('serverWizard',
<button type="button" class="btn btn-info upload-button" id="upload-button"
onclick="uploadFile('import')" disabled>{{ translate('serverWizard',
'uploadButton', data['lang']) }}</button>
</div>
</div>
@ -353,35 +336,24 @@
</div>
<div class="form-group">
<div id="accordion-3">
<div class="card">
<div class="card-header p-2" id="Role-3">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-3" aria-expanded="true"
aria-controls="collapseRole-3">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole',
data['lang'])
}} <small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-3" class="collapse" aria-labelledby="Role-3" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input name="{{ r['role_id'] }}"
type="checkbox">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
</div>
<div style="visibility: hidden;">
<div class="form-group">
<input type="text" class="form-control" id="zip_root_path" name="zip_root_path">
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true" name="server_roles"
multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
<div class="modal fade" id="dir_upload_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
aria-hidden="true">
@ -523,61 +495,8 @@
{% end %}
{% block js%}
<script src="../../static/assets/js/shared/upload.js"></script>
<script>
var upload;
var file;
function sendFile() {
file = $("#file")[0].files[0]
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>'
let xmlHttpRequest = new XMLHttpRequest();
let token = getCookie("_xsrf")
let fileName = encodeURIComponent(file.name)
let target = '/upload'
let mimeType = file.type
let size = file.size
let type = 'server_import'
xmlHttpRequest.upload.addEventListener('progress', function (e) {
if (e.loaded <= size) {
var percent = Math.round(e.loaded / size * 100);
$(`#upload-progress-bar`).css('width', percent + '%');
$(`#upload-progress-bar`).html(percent + '%');
}
});
xmlHttpRequest.open('POST', target, true);
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', type);
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
xmlHttpRequest.addEventListener('load', (event) => {
if (event.target.responseText == 'success') {
console.log('Upload for file', file.name, 'was successful!')
$("#upload_input").html(`<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%;"><input value="${decodeURIComponent(fileName)}" type="text" id="file-uploaded" disabled></input> 🔒</div>`);
document.getElementById("lower_half").style.visibility = "visible";
}
else {
let response_text = JSON.parse(event.target.responseText);
var x = document.querySelector('.bootbox');
console.log(JSON.parse(event.target.responseText).info)
bootbox.alert({
message: JSON.parse(event.target.responseText).info,
callback: function () {
window.location.reload();
}
});
doUpload = false;
}
}, false);
xmlHttpRequest.addEventListener('error', (e) => {
console.error('Error while uploading file', file.name + '.', 'Event:', e)
}, false);
xmlHttpRequest.send(file);
}
document.getElementById("root_upload_button").addEventListener("click", function (event) {
if (file) {
upload = true;
@ -591,7 +510,7 @@
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
closeButton: false
});
setTimeout(function(){
setTimeout(function () {
getDirView();
}, 2000);
} else {
@ -640,7 +559,7 @@
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
closeButton: false
});
setTimeout(function(){
setTimeout(function () {
getDirView();
}, 2000);
} else {
@ -650,17 +569,6 @@
</script>
<script>
function dropDown(event) {
event.target.parentElement.children[1].classList.remove("d-none");
document.getElementById("overlay").classList.remove("d-none");
}
function hide(event) {
var items = document.getElementsByClassName('menu');
for (let i = 0; i < items.length; i++) {
items[i].classList.add("d-none");
}
document.getElementById("overlay").classList.add("d-none");
}
function wait_msg(importing) {
bootbox.alert({
@ -692,13 +600,15 @@
}
}
function calcRoles() {
let role_ids = $('.roles').map(function () {
if ($(this).is(':checked')) {
return $(this).val();
var combinedValues = [];
$('.roles_select').each(function () {
var selectedValues = $(this).val();
console.log(selectedValues)
if (selectedValues) {
combinedValues = combinedValues.concat(selectedValues);
}
}).get();
console.log(role_ids)
return role_ids
});
return combinedValues;
}
async function send_server(data) {
let token = getCookie("_xsrf")
@ -755,7 +665,6 @@
console.log(formDataJsonString);
send_server(formDataJsonString);
});
@ -796,7 +705,13 @@
wait_msg(true);
e.preventDefault();
let jarForm = document.getElementById("import-zip");
var checkedRadio = $('.root-input:checked');
let zip_root_path = ""
if (checkedRadio.length > 0) {
// Get the value of the checked radio button
var checkedValue = checkedRadio.val();
zip_root_path = checkedValue; // Return the checked value if needed
}
let formData = new FormData(jarForm);
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
@ -813,7 +728,7 @@
"minecraft_bedrock_create_data": {
"create_type": "import_server",
"import_server_create_data": {
"existing_server_path": formDataObject.root_path,
"existing_server_path": zip_root_path,
"executable": formDataObject.server_jar,
}
}
@ -834,6 +749,13 @@
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
console.log(formDataObject);
var checkedRadio = $('.root-input:checked');
let zip_root_path = ""
if (checkedRadio.length > 0) {
// Get the value of the checked radio button
var checkedValue = checkedRadio.val();
zip_root_path = checkedValue; // Return the checked value if needed
}
let send_data = {
"name": formDataObject.name,
"roles": calcRoles(),
@ -846,7 +768,7 @@
"minecraft_bedrock_create_data": {
"create_type": "import_server",
"import_server_create_data": {
"existing_server_path": formDataObject.root_path,
"existing_server_path": zip_root_path,
"executable": formDataObject.server_jar,
}
}
@ -860,4 +782,5 @@
});
</script>
<script type="text/javascript" src="../../static/assets/js/shared/root-dir.js"></script>
<script src="../../static/assets/js/shared/bootstrap-select-1.13.18.js"></script>
{% end %}

View File

@ -4,6 +4,7 @@
{% block content %}
<link rel="stylesheet" href="../../static/assets/css/vendors/bootstrap-select-1.13.18.css">
<div class="content-wrapper">
<ul class="nav nav-pills tab-simple-styled">
<li class="nav-item term-nav-item">
@ -20,11 +21,6 @@
<div class="col-sm-6 grid-margin stretch-card">
<div class="card" id="creation_wizard">
<div class="card-body">
{% if data["server_api"] and data["online"] %}
<a href="https://serverjars.com/" target="_blank" alt="serverjars icon"><img
src="../../static/assets/images/serverjars/ICON.svg"
style="float: right; width: 40px; position: relative;"></a>
{% end %}
<h4>{{ translate('serverWizard', 'newServer', data['lang']) }}</h4>
<br />
@ -67,16 +63,19 @@
{% end %}
{% raw xsrf_form_html() %}
<div class="row">
<div class="col-sm-12">
<div class="form-group">
<label for="server_jar">{{ translate('serverWizard', 'serverType', data['lang'])
}}</label>
<label for="server_jar">{{ translate('serverWizard', 'serverType', data['lang']) }}</label>
<div class="input-group">
<select required class="form-control form-control-lg select-css" id="server_jar"
name="server_jar" onchange="serverJarChange(this)">
<option value="None">{{ translate('serverWizard', 'selectType', data['lang']) }}</option>
<select required class="form-control form-control-lg select-css" id="server_jar" name="type"
onchange="serverJarChange(this)">
<option value="">{{ translate('serverWizard', 'selectServer', data['lang']) }}</option>
{% for s in data['server_types'] %}
<option value="{{ s }}">{{ s.capitalize() }}</option>
{% if data['server_types'][s].get("enabled", False) %}
<option value="{{ s }}">{{ data["server_types"][s].get("friendly_name", s).capitalize() }}
{% end %}
</option>
{% end %}
</select>
{% if data['super_user'] %}
@ -108,7 +107,8 @@
</div>
</div>
<span data-html="true" class="version-hint text-center"
data-content="⚠️ {{ translate('serverWizard', 'unsupported', data['lang']) }} ⚠️" , data-placement="right"></span>
data-content="⚠️ {{ translate('serverWizard', 'unsupported', data['lang']) }} ⚠️" ,
data-placement="right"></span>
<div class="col-sm-12">
<div class="form-group">
<label for="server_name">{{ translate('serverWizard', 'serverName', data['lang']) }}</label>
@ -153,30 +153,24 @@
</div>
<div class="col-sm-12">
<div class="form-group">
<div id="accordion-1">
<div class="card">
<div class="card-header p-2" id="Role-1">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-1"
aria-expanded="true" aria-controls="collapseRole-1">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-1" class="collapse" aria-labelledby="Role-1" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input class="roles" name="{{ r['role_id'] }}"
type="checkbox" value="{{ r['role_id'] }}">&nbsp; {{ r['role_name'].capitalize()
}}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true"
name="server_roles" multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
</div>
@ -191,10 +185,12 @@
{% if not data["server_api"] and data["online"] %}
<div class="api-alert" style="position: absolute; top: -5px; z-index: 100; opacity: .99;">
<p style="color: white !important;"><i class="fas fa-exclamation-triangle" style="color: red;"></i>&nbsp;{{
translate('error', 'serverJars1', data['lang']) }}<a style="color: red;" ;
href="https://status.craftycontrol.com/status/craftycontrol" target="_blank" rel="noopener">&nbsp;{{ translate('error',
translate('error', 'bigBucket1', data['lang']) }}<a style="color: red;" ;
href="https://status.craftycontrol.com/status/craftycontrol" target="_blank" rel="noopener">&nbsp;{{
translate('error',
'craftyStatus', data['lang']) }}</a>
&nbsp;{{ translate('error', 'serverJars2', data['lang']) }}</p>
&nbsp;{{ translate('error', 'bigBucket2', data['lang']) }}</br></br><small>{{ translate('error', 'selfHost',
data['lang'])}}</small></p>
</div>
{% end %}
{% if not data["online"] %}
@ -284,29 +280,24 @@
</div>
<div class="col-sm-12">
<div class="form-group">
<div id="accordion-2">
<div class="card">
<div class="card-header p-2" id="Role-2">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-2" aria-expanded="true"
aria-controls="collapseRole-2">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang']) }}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-2" class="collapse" aria-labelledby="Role-2" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input class="roles" name="{{ r['role_id'] }}"
type="checkbox" value="{{ r['role_id'] }}">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true" name="server_roles"
multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
</div>
</div>
@ -411,34 +402,24 @@
<div class="col-sm-12">
<div class="form-group">
<div id="accordion-3">
<div class="card">
<div class="card-header p-2" id="Role-3">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-3" aria-expanded="true"
aria-controls="collapseRole-3">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole', data['lang'])
}} <small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-3" class="collapse" aria-labelledby="Role-3" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input class="roles" name="{{ r['role_id'] }}"
type="checkbox" value="{{ r['role_id'] }}">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="col-sm-12" style="visibility: hidden;" hidden>
<div class="form-group">
<input type="text" class="form-control" id="zip_root_path" name="zip_root_path">
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true"
name="server_roles" multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
</div>
<div class="modal fade" id="dir_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
@ -513,8 +494,8 @@
'labelZipFile', data['lang']) }}</label>
</div>
<div class="input-group-append">
<button type="button" class="btn btn-info upload-button" id="upload-button" onclick="sendFile()"
disabled>{{ translate('serverWizard',
<button type="button" class="btn btn-info upload-button" id="upload-button"
onclick="uploadFile('import')" disabled>{{ translate('serverWizard',
'uploadButton', data['lang']) }}</button>
</div>
</div>
@ -562,35 +543,24 @@
</div>
<div class="form-group">
<div id="accordion-3">
<div class="card">
<div class="card-header p-2" id="Role-3">
<p class="mb-0 p-0" data-toggle="collapse" data-target="#collapseRole-3" aria-expanded="true"
aria-controls="collapseRole-3">
<i class="fas fa-chevron-down"></i> {{ translate('serverWizard', 'addRole',
data['lang'])
}} <small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
</div>
<div id="collapseRole-3" class="collapse" aria-labelledby="Role-3" data-parent="">
<div class="card-body scroll">
<div class="form-group">
{% for r in data['roles'] %}
<span class="d-block menu-option"><label><input class="roles" name="{{ r['role_id'] }}"
type="checkbox" value="{{ r['role_id'] }}">&nbsp;
{{ r['role_name'].capitalize() }}</label></span>
{% end %}
</div>
</div>
</div>
</div>
</div>
</div>
<div style="visibility: hidden;">
<div class="form-group">
<input type="text" class="form-control" id="zip_root_path" name="zip_root_path">
</div>
<p class="mb-0 p-0" aria-expanded="true">{{ translate('serverWizard', 'addRole', data['lang'])
}}
<small style="text-transform: none;"> - {{ translate('serverWizard', 'autoCreate',
data['lang']) }}</small>
</p>
<select data-container="body"
data-none-selected-text="{{ translate('serverWizard', 'noneRoles', data['lang']) }}"
data-none-results-text="{{ translate('serverWizard', 'noRole', data['lang']) }} {0}"
class="selectpicker form-control form-control-lg select-css roles_select"
data-styleBase="form-control" data-style="form-control" data-live-search="true" name="server_roles"
multiple>
{% for r in data['roles'] %}
<option class="roles" type="checkbox" value="{{ r['role_id'] }}">
&nbsp; {{ r['role_name'].capitalize()
}}
</option>
{% end %}
</select>
</div>
<div class="modal fade" id="dir_upload_select" tabindex="-1" role="dialog" aria-labelledby="dir_select"
aria-hidden="true">
@ -814,6 +784,7 @@
{% end %}
{% block js %}
<script src="../../static/assets/js/shared/upload.js"></script>
<script>
document.getElementById("root_files_button").addEventListener("click", function (event) {
if (document.forms["zip"]["server_path"].value != "") {
@ -827,7 +798,7 @@
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
closeButton: false
});
setTimeout(function(){
setTimeout(function () {
getDirView();
}, 2000);
} else {
@ -845,61 +816,13 @@
message: '<p class="text-center mb-0"><i class="fa fa-spin fa-cog"></i> Please wait while we gather your files...</p>',
closeButton: false
});
setTimeout(function(){
getDirView();
}, 2000);
setTimeout(function () {
getDirView();
}, 2000);
});
var upload = false;
var file;
function sendFile() {
file = $("#file")[0].files[0]
document.getElementById("upload_input").innerHTML = '<div class="progress" style="width: 100%;"><div class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="100" aria-valuemin="0" aria-valuemax="100" style="width: 100%">&nbsp;<i class="fa-solid fa-spinner"></i></div></div>'
let xmlHttpRequest = new XMLHttpRequest();
let token = getCookie("_xsrf")
let fileName = file.name
let target = '/upload'
let mimeType = file.type
let size = file.size
let type = 'server_import'
xmlHttpRequest.upload.addEventListener('progress', function (e) {
if (e.loaded <= size) {
var percent = Math.round(e.loaded / size * 100);
$(`#upload-progress-bar`).css('width', percent + '%');
$(`#upload-progress-bar`).html(percent + '%');
}
});
xmlHttpRequest.open('POST', target, true);
xmlHttpRequest.setRequestHeader('X-Content-Type', mimeType);
xmlHttpRequest.setRequestHeader('X-XSRFToken', token);
xmlHttpRequest.setRequestHeader('X-Content-Length', size);
xmlHttpRequest.setRequestHeader('X-Content-Disposition', 'attachment; filename="' + fileName + '"');
xmlHttpRequest.setRequestHeader('X-Content-Upload-Type', type);
xmlHttpRequest.setRequestHeader('X-FileName', fileName);
xmlHttpRequest.addEventListener('load', (event) => {
if (event.target.responseText == 'success') {
console.log('Upload for file', file.name, 'was successful!')
$("#upload_input").html(`<div class="card-header header-sm d-flex justify-content-between align-items-center" style="width: 100%;"><input value="${fileName}" type="text" id="file-uploaded" disabled></input> 🔒</div>`);
document.getElementById("lower_half").style.visibility = "visible";
document.getElementById("lower_half").hidden = false;
}
else {
console.log(JSON.parse(event.target.responseText).info)
bootbox.alert({
message: JSON.parse(event.target.responseText).info,
callback: function () {
window.location.reload();
}
});
}
}, false);
xmlHttpRequest.addEventListener('error', (e) => {
console.error('Error while uploading file', file.name + '.', 'Event:', e)
}, false);
xmlHttpRequest.send(file);
}
</script>
<script type="text/javascript" src="../../static/assets/js/shared/root-dir.js"></script>
@ -919,13 +842,15 @@
}
}
function calcRoles() {
let role_ids = $('.roles').map(function () {
if ($(this).is(':checked')) {
return $(this).val();
var combinedValues = [];
$('.roles_select').each(function () {
var selectedValues = $(this).val();
console.log(selectedValues)
if (selectedValues) {
combinedValues = combinedValues.concat(selectedValues);
}
}).get();
console.log(role_ids)
return role_ids
});
return combinedValues;
}
async function send_server(data) {
let token = getCookie("_xsrf")
@ -985,7 +910,6 @@
console.log(formDataJsonString);
send_server(formDataJsonString);
});
@ -1034,6 +958,13 @@
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
console.log(formDataObject);
var checkedRadio = $('.root-input:checked');
let zip_root_path = ""
if (checkedRadio.length > 0) {
// Get the value of the checked radio button
var checkedValue = checkedRadio.val();
zip_root_path = checkedValue; // Return the checked value if needed
}
let send_data = {
"name": formDataObject.name,
"roles": calcRoles(),
@ -1046,7 +977,7 @@
"minecraft_java_create_data": {
"create_type": "import_server",
"import_server_create_data": {
"existing_server_path": formDataObject.root_path,
"existing_server_path": zip_root_path,
"jarfile": formDataObject.server_jar,
"mem_min": formDataObject.mem_min,
"mem_max": formDataObject.mem_max,
@ -1070,6 +1001,13 @@
//Create an object from the form data entries
let formDataObject = Object.fromEntries(formData.entries());
console.log(formDataObject);
var checkedRadio = $('.root-input:checked');
let zip_root_path = ""
if (checkedRadio.length > 0) {
// Get the value of the checked radio button
var checkedValue = checkedRadio.val();
zip_root_path = checkedValue; // Return the checked value if needed
}
let send_data = {
"name": formDataObject.name,
"roles": calcRoles(),
@ -1082,7 +1020,7 @@
"minecraft_java_create_data": {
"create_type": "import_server",
"import_server_create_data": {
"existing_server_path": formDataObject.root_path,
"existing_server_path": zip_root_path,
"jarfile": formDataObject.server_jar,
"mem_min": formDataObject.mem_min,
"mem_max": formDataObject.mem_max,
@ -1103,19 +1041,6 @@
$(".tree-reset").on("click", function () {
location.href = "/server/step1";
});
function dropDown(event) {
event.target.parentElement.children[1].classList.remove("d-none");
document.getElementById("overlay").classList.remove("d-none");
}
function hide(event) {
let items = document.getElementsByClassName('menu');
items.forEach(item => {
item.classList.add("d-none");
})
document.getElementById("overlay").classList.add("d-none");
}
$(document).ready(function () {
console.log('ready');
@ -1137,7 +1062,7 @@
function wait_msg(importing) {
bootbox.alert({
title: importing ? '{% raw translate("serverWizard", "importing", data["lang"]) %}' : '{% raw translate("serverWizard", "downloading", data["lang"]) %}',
message: importing ? '<i class="fas fa-cloud-download"></i> {% raw translate("serverWizard", "bePatient", data["lang"]) %}' : '<i class="fas fa-cloud-download"></i> {% raw translate("serverWizard", "bePatient", data["lang"]) %}<br><br><a href="https://serverjars.com" target="_blank" style="text-align: center;"><img src="../../static/assets/images/serverjars/FULL-WHITE.svg" alt="Powered by serverjars.com" width="40%"></a>',
message: importing ? '<i class="fas fa-cloud-download"></i> {% raw translate("serverWizard", "bePatient", data["lang"]) %}' : '<i class="fas fa-cloud-download"></i> {% raw translate("serverWizard", "bePatient", data["lang"]) %}',
});
}
@ -1197,33 +1122,28 @@
*/
function serverTypeChange(selectObj) {
// get the index of the selected option
let idx = document.getElementById('server_type').selectedIndex;
let idx = document.getElementById('server_type').value;
let idx_list = idx.split("|");
// get the value of the selected option
let cSelect = document.getElementById("server");
let which = {};
try {
which = document.getElementById('server_type').options[idx].value;
versions = Object.keys(serverTypesLists[idx_list[0]]["types"][idx_list[1]]["versions"]);
} catch {
while (cSelect.options.length > 0) {
cSelect.remove(0);
}
return;
}
let server_type = which.split('|')[0];
let server = which.split('|')[1];
// use the selected option value to retrieve the list of items from the serverTypesLists array
let cList = serverTypesLists[server_type];
// get the country select element via its known id
cSelect = document.getElementById("server");
// remove the current options from the country select
while (cSelect.options.length > 0) {
cSelect.remove(0);
}
let newOption;
$("#server option").each(function () {
$(this).remove()
})
// create new options ordered by ascending
cList[server].forEach(type => {
versions.forEach(type => {
newOption = document.createElement("option");
newOption.value = which + "|" + type; // assumes option string and value are the same
newOption.value = idx + "|" + type; // assumes option string and value are the same
newOption.text = type;
// add the new option
try {
@ -1234,17 +1154,18 @@
}
})
}
$("#server").change(function (){
$("#server").change(function () {
let selected_version = $("#server :selected").text().split(".");
if(parseInt(selected_version[0]) === 1 && parseInt(selected_version[1]) < 8 ){
if (parseInt(selected_version[0]) === 1 && parseInt(selected_version[1]) < 8) {
$('[data-toggle="popover"]').popover();
if ($(window).width() < 1000) {
$('.version-hint').attr("data-placement", "top")
if ($(window).width() < 1000) {
$('.version-hint').attr("data-placement", "top")
} else {
$('.version-hint').attr("data-placement", "right")
}
$('.version-hint').popover("show");
} else {
$('.version-hint').attr("data-placement", "right")
}
$('.version-hint').popover("show");
}else{
$('.version-hint').popover("hide");
}
});
@ -1253,6 +1174,15 @@
const type_select = document.getElementById('server_jar')
const tidx = type_select.selectedIndex;
const val = type_select.options[tidx].value;
if (!val) {
$("#server_type option").each(function () {
$(this).remove()
})
$("#server option").each(function () {
$(this).remove()
})
return;
}
let jcSelect = {};
if (val == 'None') {
jcSelect = document.getElementById("server_type");
@ -1267,7 +1197,7 @@
// get the value of the selected option
let jwhich = selectObj.options[jidx].value;
// use the selected option value to retrieve the list of items from the serverTypesLists array
let jcList = Object.keys(serverTypesLists[jwhich]);
let jcList = Object.keys(serverTypesLists[jwhich]["types"]);
// get the country select element via its known id
jcSelect = document.getElementById("server_type");
// remove the current options from the country select
@ -1291,4 +1221,6 @@
serverTypeChange(selectObj);
}
</script>
<script src="../../static/assets/js/shared/bootstrap-select-1.13.18.js"></script>
{% end %}

View File

@ -1,10 +1,11 @@
import peewee
import datetime
from app.classes.shared.helpers import Helpers
def migrate(migrator, database, **kwargs):
migrator.add_columns(
"users", valid_tokens_from=peewee.DateTimeField(default=datetime.datetime.now)
"users", valid_tokens_from=peewee.DateTimeField(default=Helpers.get_utc_now)
)
migrator.drop_columns("users", ["api_token"])

View File

@ -6,7 +6,6 @@ import logging
from app.classes.shared.console import Console
from app.classes.shared.migration import Migrator, MigrateHistory
from app.classes.models.management import (
AuditLog,
Webhooks,
Schedules,
Backups,
@ -61,17 +60,6 @@ def migrate(migrator: Migrator, database, **kwargs):
peewee.CharField(primary_key=True, default=str(uuid.uuid4())),
)
# Changes on Audit Log Table
migrator.alter_column_type(
AuditLog,
"server_id",
peewee.ForeignKeyField(
Servers,
backref="audit_server",
null=True,
field=peewee.CharField(primary_key=True, default=str(uuid.uuid4())),
),
)
# Changes on Webhook Table
migrator.alter_column_type(
Webhooks,
@ -109,13 +97,6 @@ def rollback(migrator: Migrator, database, **kwargs):
peewee.AutoField(),
)
# Changes on Audit Log Table
migrator.alter_column_type(
AuditLog,
"server_id",
peewee.IntegerField(default=None, index=True),
)
# Changes on Webhook Table
migrator.alter_column_type(
Webhooks,

View File

@ -5,14 +5,7 @@ import logging
from app.classes.shared.console import Console
from app.classes.shared.migration import Migrator, MigrateHistory
from app.classes.models.management import (
AuditLog,
Webhooks,
Schedules,
Backups,
)
from app.classes.models.server_permissions import RoleServers
from app.classes.models.base_model import BaseModel
from app.classes.models.roles import Roles
logger = logging.getLogger(__name__)
@ -54,6 +47,78 @@ def migrate(migrator: Migrator, database, **kwargs):
table_name = "servers"
database = db
# **********************************************************************************
# Role Servers Class
# **********************************************************************************
class RoleServers(peewee.Model):
role_id = peewee.ForeignKeyField(Roles, backref="role_server")
server_id = peewee.ForeignKeyField(Servers, backref="role_server")
permissions = peewee.CharField(default="00000000")
class Meta:
table_name = "role_servers"
primary_key = peewee.CompositeKey("role_id", "server_id")
database = db
# **********************************************************************************
# Webhooks Class
# **********************************************************************************
class Webhooks(peewee.Model):
id = peewee.AutoField()
server_id = peewee.ForeignKeyField(Servers, backref="webhook_server", null=True)
name = peewee.CharField(default="Custom Webhook", max_length=64)
url = peewee.CharField(default="")
webhook_type = peewee.CharField(default="Custom")
bot_name = peewee.CharField(default="Crafty Controller")
trigger = peewee.CharField(default="server_start,server_stop")
body = peewee.CharField(default="")
color = peewee.CharField(default="#005cd1")
enabled = peewee.BooleanField(default=True)
class Meta:
table_name = "webhooks"
database = db
# **********************************************************************************
# Schedules Class
# **********************************************************************************
class Schedules(peewee.Model):
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
server_id = peewee.ForeignKeyField(Servers, backref="schedule_server")
enabled = peewee.BooleanField()
action = peewee.CharField()
interval = peewee.IntegerField()
interval_type = peewee.CharField()
start_time = peewee.CharField(null=True)
command = peewee.CharField(null=True)
name = peewee.CharField()
one_time = peewee.BooleanField(default=False)
cron_string = peewee.CharField(default="")
parent = peewee.IntegerField(null=True)
delay = peewee.IntegerField(default=0)
next_run = peewee.CharField(default="")
class Meta:
table_name = "schedules"
database = db
# **********************************************************************************
# Backups Class
# **********************************************************************************
class Backups(peewee.Model):
excluded_dirs = peewee.CharField(null=True)
max_backups = peewee.IntegerField()
max_backups = peewee.IntegerField()
server_id = peewee.ForeignKeyField(Servers, backref="backups_server")
compress = peewee.BooleanField(default=False)
shutdown = peewee.BooleanField(default=False)
before = peewee.CharField(default="")
after = peewee.CharField(default="")
class Meta:
table_name = "backups"
database = db
this_migration = MigrateHistory.get_or_none(
MigrateHistory.name == "20240217_rework_servers_uuid_part2"
)
@ -71,22 +136,8 @@ def migrate(migrator: Migrator, database, **kwargs):
return
try:
logger.info("Migrating Data from Int to UUID (Foreign Keys)")
Console.info("Migrating Data from Int to UUID (Foreign Keys)")
# Changes on Audit Log Table
for audit_log in AuditLog.select():
old_server_id = audit_log.server_id_id
if old_server_id == "0" or old_server_id is None:
server_uuid = None
else:
try:
server = Servers.get_by_id(old_server_id)
server_uuid = server.server_uuid
except:
server_uuid = old_server_id
AuditLog.update(server_id=server_uuid).where(
AuditLog.audit_id == audit_log.audit_id
).execute()
logger.debug("Migrating Data from Int to UUID (Foreign Keys)")
Console.debug("Migrating Data from Int to UUID (Foreign Keys)")
# Changes on Webhooks Log Table
for webhook in Webhooks.select():
@ -137,8 +188,8 @@ def migrate(migrator: Migrator, database, **kwargs):
and RoleServers.server_id == old_server_id
).execute()
logger.info("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS")
Console.info("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS")
logger.debug("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS")
Console.debug("Migrating Data from Int to UUID (Foreign Keys) : SUCCESS")
except Exception as ex:
logger.error("Error while migrating Data from Int to UUID (Foreign Keys)")
@ -150,16 +201,16 @@ def migrate(migrator: Migrator, database, **kwargs):
return
try:
logger.info("Migrating Data from Int to UUID (Primary Keys)")
Console.info("Migrating Data from Int to UUID (Primary Keys)")
logger.debug("Migrating Data from Int to UUID (Primary Keys)")
Console.debug("Migrating Data from Int to UUID (Primary Keys)")
# Migrating servers from the old id type to the new one
for server in Servers.select():
Servers.update(server_id=server.server_uuid).where(
Servers.server_id == server.server_id
).execute()
logger.info("Migrating Data from Int to UUID (Primary Keys) : SUCCESS")
Console.info("Migrating Data from Int to UUID (Primary Keys) : SUCCESS")
logger.debug("Migrating Data from Int to UUID (Primary Keys) : SUCCESS")
Console.debug("Migrating Data from Int to UUID (Primary Keys) : SUCCESS")
except Exception as ex:
logger.error("Error while migrating Data from Int to UUID (Primary Keys)")
@ -218,9 +269,81 @@ def rollback(migrator: Migrator, database, **kwargs):
table_name = "servers"
database = db
# **********************************************************************************
# Role Servers Class
# **********************************************************************************
class RoleServers(peewee.Model):
role_id = peewee.ForeignKeyField(Roles, backref="role_server")
server_id = peewee.ForeignKeyField(Servers, backref="role_server")
permissions = peewee.CharField(default="00000000")
class Meta:
table_name = "role_servers"
primary_key = peewee.CompositeKey("role_id", "server_id")
database = db
# **********************************************************************************
# Webhooks Class
# **********************************************************************************
class Webhooks(peewee.Model):
id = peewee.AutoField()
server_id = peewee.ForeignKeyField(Servers, backref="webhook_server", null=True)
name = peewee.CharField(default="Custom Webhook", max_length=64)
url = peewee.CharField(default="")
webhook_type = peewee.CharField(default="Custom")
bot_name = peewee.CharField(default="Crafty Controller")
trigger = peewee.CharField(default="server_start,server_stop")
body = peewee.CharField(default="")
color = peewee.CharField(default="#005cd1")
enabled = peewee.BooleanField(default=True)
class Meta:
table_name = "webhooks"
database = db
# **********************************************************************************
# Schedules Class
# **********************************************************************************
class Schedules(peewee.Model):
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
server_id = peewee.ForeignKeyField(Servers, backref="schedule_server")
enabled = peewee.BooleanField()
action = peewee.CharField()
interval = peewee.IntegerField()
interval_type = peewee.CharField()
start_time = peewee.CharField(null=True)
command = peewee.CharField(null=True)
name = peewee.CharField()
one_time = peewee.BooleanField(default=False)
cron_string = peewee.CharField(default="")
parent = peewee.IntegerField(null=True)
delay = peewee.IntegerField(default=0)
next_run = peewee.CharField(default="")
class Meta:
table_name = "schedules"
database = db
# **********************************************************************************
# Backups Class
# **********************************************************************************
class Backups(peewee.Model):
excluded_dirs = peewee.CharField(null=True)
max_backups = peewee.IntegerField()
max_backups = peewee.IntegerField()
server_id = peewee.ForeignKeyField(Servers, backref="backups_server")
compress = peewee.BooleanField(default=False)
shutdown = peewee.BooleanField(default=False)
before = peewee.CharField(default="")
after = peewee.CharField(default="")
class Meta:
table_name = "backups"
database = db
try:
logger.info("Migrating Data from UUID to Int (Primary Keys)")
Console.info("Migrating Data from UUID to Int (Primary Keys)")
logger.debug("Migrating Data from UUID to Int (Primary Keys)")
Console.debug("Migrating Data from UUID to Int (Primary Keys)")
# Migrating servers from the old id type to the new one
new_id = 0
for server in Servers.select():
@ -232,8 +355,8 @@ def rollback(migrator: Migrator, database, **kwargs):
Servers.server_id == server.server_id
).execute()
logger.info("Migrating Data from UUID to Int (Primary Keys) : SUCCESS")
Console.info("Migrating Data from UUID to Int (Primary Keys) : SUCCESS")
logger.debug("Migrating Data from UUID to Int (Primary Keys) : SUCCESS")
Console.debug("Migrating Data from UUID to Int (Primary Keys) : SUCCESS")
except Exception as ex:
logger.error("Error while migrating Data from UUID to Int (Primary Keys)")
@ -245,23 +368,8 @@ def rollback(migrator: Migrator, database, **kwargs):
return
try:
logger.info("Migrating Data from UUID to Int (Foreign Keys)")
Console.info("Migrating Data from UUID to Int (Foreign Keys)")
# Changes on Audit Log Table
for audit_log in AuditLog.select():
old_server_id = audit_log.server_id_id
if old_server_id is None:
new_server_id = 0
else:
try:
server = Servers.get_or_none(Servers.server_uuid == old_server_id)
new_server_id = server.server_id
except:
new_server_id = old_server_id
AuditLog.update(server_id=new_server_id).where(
AuditLog.audit_id == audit_log.audit_id
).execute()
logger.debug("Migrating Data from UUID to Int (Foreign Keys)")
Console.debug("Migrating Data from UUID to Int (Foreign Keys)")
# Changes on Webhooks Log Table
for webhook in Webhooks.select():
old_server_id = webhook.server_id_id
@ -311,8 +419,8 @@ def rollback(migrator: Migrator, database, **kwargs):
and RoleServers.server_id == old_server_id
).execute()
logger.info("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS")
Console.info("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS")
logger.debug("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS")
Console.debug("Migrating Data from UUID to Int (Foreign Keys) : SUCCESS")
except Exception as ex:
logger.error("Error while migrating Data from UUID to Int (Foreign Keys)")

View File

@ -0,0 +1,266 @@
import os
import datetime
import uuid
import peewee
import logging
from app.classes.shared.helpers import Helpers
from app.classes.shared.console import Console
from app.classes.shared.migration import Migrator
from app.classes.shared.file_helpers import FileHelpers
logger = logging.getLogger(__name__)
def is_valid_backup(backup, all_servers):
try:
return str(backup.server_id) in all_servers
except (TypeError, peewee.DoesNotExist):
return False
def migrate(migrator: Migrator, database, **kwargs):
"""
Write your migrations here.
"""
db = database
Console.info("Starting Backups migrations")
Console.info(
"Migrations: Adding columns [backup_id, "
"backup_name, backup_location, enabled, default, action_id, backup_status]"
)
migrator.add_columns(
"backups",
backup_id=peewee.CharField(default=Helpers.create_uuid),
)
migrator.add_columns("backups", backup_name=peewee.CharField(default="Default"))
migrator.add_columns("backups", backup_location=peewee.CharField(default=""))
migrator.add_columns("backups", enabled=peewee.BooleanField(default=True))
migrator.add_columns("backups", default=peewee.BooleanField(default=False))
migrator.add_columns(
"backups",
status=peewee.CharField(default='{"status": "Standby", "message": ""}'),
)
migrator.add_columns(
"schedules", action_id=peewee.CharField(null=True, default=None)
)
class Servers(peewee.Model):
server_id = peewee.CharField(primary_key=True, default=str(uuid.uuid4()))
created = peewee.DateTimeField(default=datetime.datetime.now)
server_name = peewee.CharField(default="Server", index=True)
path = peewee.CharField(default="")
backup_path = peewee.CharField(default="")
executable = peewee.CharField(default="")
log_path = peewee.CharField(default="")
execution_command = peewee.CharField(default="")
auto_start = peewee.BooleanField(default=0)
auto_start_delay = peewee.IntegerField(default=10)
crash_detection = peewee.BooleanField(default=0)
stop_command = peewee.CharField(default="stop")
executable_update_url = peewee.CharField(default="")
server_ip = peewee.CharField(default="127.0.0.1")
server_port = peewee.IntegerField(default=25565)
logs_delete_after = peewee.IntegerField(default=0)
type = peewee.CharField(default="minecraft-java")
show_status = peewee.BooleanField(default=1)
created_by = peewee.IntegerField(default=-100)
shutdown_timeout = peewee.IntegerField(default=60)
ignored_exits = peewee.CharField(default="0")
class Meta:
table_name = "servers"
database = db
class Backups(peewee.Model):
backup_id = peewee.CharField(primary_key=True, default=Helpers.create_uuid)
backup_name = peewee.CharField(default="New Backup")
backup_location = peewee.CharField(default="")
excluded_dirs = peewee.CharField(null=True)
max_backups = peewee.IntegerField()
server_id = peewee.ForeignKeyField(Servers, backref="backups_server")
compress = peewee.BooleanField(default=False)
shutdown = peewee.BooleanField(default=False)
before = peewee.CharField(default="")
after = peewee.CharField(default="")
default = peewee.BooleanField(default=False)
status = peewee.CharField(default='{"status": "Standby", "message": ""}')
enabled = peewee.BooleanField(default=True)
class Meta:
table_name = "backups"
database = db
class NewBackups(peewee.Model):
backup_id = peewee.CharField(primary_key=True, default=Helpers.create_uuid)
backup_name = peewee.CharField(default="New Backup")
backup_location = peewee.CharField(default="")
excluded_dirs = peewee.CharField(null=True)
max_backups = peewee.IntegerField()
server_id = peewee.ForeignKeyField(Servers, backref="backups_server")
compress = peewee.BooleanField(default=False)
shutdown = peewee.BooleanField(default=False)
before = peewee.CharField(default="")
after = peewee.CharField(default="")
default = peewee.BooleanField(default=False)
status = peewee.CharField(default='{"status": "Standby", "message": ""}')
enabled = peewee.BooleanField(default=True)
class Meta:
table_name = "new_backups"
database = db
class Schedules(peewee.Model):
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
server_id = peewee.ForeignKeyField(Servers, backref="schedule_server")
enabled = peewee.BooleanField()
action = peewee.CharField()
interval = peewee.IntegerField()
interval_type = peewee.CharField()
start_time = peewee.CharField(null=True)
command = peewee.CharField(null=True)
action_id = peewee.CharField(null=True)
name = peewee.CharField()
one_time = peewee.BooleanField(default=False)
cron_string = peewee.CharField(default="")
parent = peewee.IntegerField(null=True)
delay = peewee.IntegerField(default=0)
next_run = peewee.CharField(default="")
class Meta:
table_name = "schedules"
database = db
class NewSchedules(peewee.Model):
schedule_id = peewee.IntegerField(unique=True, primary_key=True)
server_id = peewee.ForeignKeyField(Servers, backref="schedule_server")
enabled = peewee.BooleanField()
action = peewee.CharField()
interval = peewee.IntegerField()
interval_type = peewee.CharField()
start_time = peewee.CharField(null=True)
command = peewee.CharField(null=True)
action_id = peewee.CharField(null=True)
name = peewee.CharField()
one_time = peewee.BooleanField(default=False)
cron_string = peewee.CharField(default="")
parent = peewee.IntegerField(null=True)
delay = peewee.IntegerField(default=0)
next_run = peewee.CharField(default="")
class Meta:
table_name = "new_schedules"
database = db
migrator.create_table(NewBackups)
migrator.create_table(NewSchedules)
migrator.run()
all_servers = [
row.server_id for row in Servers.select(Servers.server_id).distinct()
]
all_backups = Backups.select()
Console.info("Cleaning up orphan backups for all servers")
valid_backups = [
backup for backup in all_backups if is_valid_backup(backup, all_servers)
]
# Copy data from the existing backups table to the new one
for backup in valid_backups:
Console.info(f"Trying to get server for backup migration {backup.server_id}")
# Fetch the related server entry from the Servers table
server = Servers.get(Servers.server_id == backup.server_id)
Console.info(f"Migrations: Migrating backup for server {server.server_name}")
# Create a new backup entry with data from the
# old backup entry and related server
new_backup = NewBackups.create(
backup_name=f"{server.server_name} Backup",
# Set backup_location equal to backup_path
backup_location=server.backup_path,
excluded_dirs=backup.excluded_dirs,
max_backups=backup.max_backups,
server_id=server.server_id,
compress=backup.compress,
shutdown=backup.shutdown,
before=backup.before,
after=backup.after,
default=True,
enabled=True,
)
Console.info(
f"New backup table created for {server.server_name} with id {new_backup.backup_id}"
)
Helpers.ensure_dir_exists(
os.path.join(server.backup_path, new_backup.backup_id)
)
try:
Console.info(
f"Moving old backups to new backup dir for {server.server_name}"
)
for file in os.listdir(server.backup_path):
if not os.path.isdir(
os.path.join(os.path.join(server.backup_path, file))
):
FileHelpers.move_file(
os.path.join(server.backup_path, file),
os.path.join(server.backup_path, new_backup.backup_id, file),
)
except FileNotFoundError as why:
logger.error(
f"Could not move backups for {server.server_name} to new location with error {why}"
)
Console.debug("Migrations: Dropping old backup table")
# Drop the existing backups table
migrator.drop_table("backups")
Console.debug("Migrations: Renaming new_backups to backups")
# Rename the new table to backups
migrator.rename_table("new_backups", "backups")
Console.debug("Migrations: Dropping backup_path from servers table")
migrator.drop_columns("servers", ["backup_path"])
for schedule in Schedules.select():
action_id = None
if schedule.command == "backup_server":
Console.info(
f"Migrations: Adding backup ID to task with name {schedule.name}"
)
backup = NewBackups.get(NewBackups.server_id == schedule.server_id)
action_id = backup.backup_id
NewSchedules.create(
schedule_id=schedule.schedule_id,
server_id=schedule.server_id,
enabled=schedule.enabled,
action=schedule.action,
interval=schedule.interval,
interval_type=schedule.interval_type,
start_time=schedule.start_time,
command=schedule.command,
action_id=action_id,
name=schedule.name,
one_time=schedule.one_time,
cron_string=schedule.cron_string,
parent=schedule.parent,
delay=schedule.delay,
next_run=schedule.next_run,
)
Console.debug("Migrations: dropping old schedules table")
# Drop the existing backups table
migrator.drop_table("schedules")
Console.debug("Migrations: renaming new_schedules to schedules")
# Rename the new table to backups
migrator.rename_table("new_schedules", "schedules")
def rollback(migrator: Migrator, database, **kwargs):
"""
Write your rollback migrations here.
"""
db = database
migrator.drop_columns("backups", ["name", "backup_id", "backup_location"])
migrator.add_columns("servers", backup_path=peewee.CharField(default=""))

View File

@ -0,0 +1,17 @@
# Generated by database migrator
import peewee
def migrate(migrator, database, **kwargs):
migrator.rename_column("api_keys", "superuser", "full_access")
"""
Write your migrations here.
"""
def rollback(migrator, database, **kwargs):
migrator.rename_column("api_keys", "full_access", "superuser")
"""
Write your rollback migrations here.
"""

View File

@ -0,0 +1,34 @@
import peewee
import datetime
from peewee import (
AutoField,
DateTimeField,
CharField,
IntegerField,
ForeignKeyField,
TextField,
)
from app.classes.shared.server import Servers
def migrate(migrator, db):
migrator.drop_table("audit_log")
def rollback(migrator, db):
class AuditLog(peewee.Model):
audit_id = AutoField()
created = DateTimeField(default=datetime.datetime.now)
user_name = CharField(default="")
user_id = IntegerField(default=0, index=True)
source_ip = CharField(default="127.0.0.1")
server_id = ForeignKeyField(
Servers, backref="audit_server", null=True
) # When auditing global events, use server ID null
log_msg = TextField(default="")
class Meta:
table_name = "audit_log"
migrator.create_table(AuditLog)

View File

@ -20,6 +20,7 @@
"created": "Vytvořen",
"deleteKeyConfirmation": "Chcete tento API klíč odstranit? Tuto akci nelze vrátit zpět.",
"deleteKeyConfirmationTitle": "Odstranit klíč API ${keyId}?",
"fullAccess": "všechno",
"getToken": "Získat token",
"name": "Jméno",
"nameDesc": "Jak chcete nazvat tento token API? ",
@ -192,11 +193,14 @@
},
"thousands": " ",
"zeroRecords": "Nebyly nalezeny žádné odpovídající záznamy"
}
},
"loadingRecords": "Načítání..."
},
"error": {
"agree": "Souhlasím",
"bedrockError": "Stažení Bedrocku není dostupné. Prosím zkontrolujte",
"bigBucket1": "Big Bucket Kontrola stavu selhala. Prosím zkontrolujte jej",
"bigBucket2": "pro nejnovější informace.",
"cancel": "Zrušit",
"contact": "Kontaktujte podporu Crafty přes Discord",
"craftyStatus": "Crafty stav systémů",
@ -218,6 +222,8 @@
"not-downloaded": "Zdá se, že nemůžeme najít váš spustitelný soubor. Bylo jeho stahování dokončeno? Jsou oprávnění nastavena na spustitelný soubor?",
"portReminder": "Zjistili jsme, že server {} byl spuštěn poprvé. Ujistěte se, že jste přesměrovali port {} přes váš směrovač/firewall, aby byl tento port vzdáleně přístupný z internetu.",
"privMsg": "a ",
"return": "vrátit se na hlavní stránku",
"selfHost": "Pokud Hostujete sami toto uložiště prosím zkontrolujte adresu nebo si přečtěte náš průvodce odstraňováním problémů.",
"serverJars1": "Server JAR api je nepřístupná. Prosím zkontrolujte",
"serverJars2": "pro aktualní informace.",
"start-error": "Server {} se nepodařilo spustit s kódem chyby: {}",
@ -315,10 +321,12 @@
"serversDesc": "servery, ke kterým má tato role přístup"
},
"serverBackups": {
"actions": "Akce",
"after": "Spustit příkaz po záloze",
"backupAtMidnight": "Automatické zálohování o půlnoci?",
"backupNow": "Zálohovat nyní!",
"backupTask": "Bylo spuštěno zálohování.",
"backups": "Zálohy serverů",
"before": "Spustit příkaz před zálohou",
"cancel": "Zrušit",
"clickExclude": "Kliknutím vyberete výjimku",
@ -327,21 +335,34 @@
"confirmDelete": "Chcete tuto zálohu odstranit? Tuto akci nelze vrátit zpět.",
"confirmRestore": "Jste si jisti, že chcete provést obnovu z této zálohy. Všechny aktuální soubory serveru se změní na stav zálohy a nebude možné je obnovit.",
"currentBackups": "Aktuální zálohy",
"default": "Defaultní záloha",
"defaultExplain": "Tuto zálohu Crafty používalo před aktualizací. Nemůžete ji změnit nebo smazat",
"delete": "Smazat",
"destroyBackup": "Zničit zálohu \" + file_to_del + \"?",
"download": "Stáhnout",
"edit": "upravit",
"enabled": "Povoleno",
"excludedBackups": "Vyloučené cesty: ",
"excludedChoose": "Vyberte cesty, které chcete ze zálohování vyloučit.",
"exclusionsTitle": "Vyloučení ze zálohování",
"failed": "Selhalo",
"maxBackups": "Maximální počet záloh",
"maxBackupsDesc": "Crafty neuloží více než N záloh a odstraní nejstarší (zadejte 0 pro zachování všech).",
"myBackup": "Moje nová záloha",
"name": "Jméno",
"newBackup": "Vytvořit novou zálohu",
"no-backup": "Žádné zálohy. Pro vytvoření nové zálohy zmáčkněte prosím. Vytvořit novou zálohu",
"options": "Nastavení",
"path": "Cesta",
"restore": "Obnovit",
"restoring": "Obnovení zálohy. To může chvíli trvat. Buďte prosím trpěliví.",
"run": "Nastartovat zálohu",
"save": "Uložit",
"shutdown": "Vypnout server po dobu zálohování",
"size": "Velikost",
"standby": "V pohotovosti",
"status": "Stav",
"storage": "Lokace uložiště",
"storageLocation": "Umístění úložiště",
"storageLocationDesc": "Kam chcete ukládat zálohy?"
},
@ -506,6 +527,7 @@
},
"serverSchedules": {
"action": "Akce",
"actionId": "Vyberte zálohu na které se to má potvrdit!",
"areYouSure": "Odstranění naplánované úlohy?",
"cancel": "Zrušit",
"cannotSee": "Nevidíte všechno?",
@ -583,6 +605,8 @@
"minMem": "Minimální paměť",
"myNewServer": "Nový server",
"newServer": "Vytvořit nový server",
"noRole": "Žádna role nebyla nalezena z tím to vstupem",
"noneRoles": "Žádne role vybrány",
"quickSettings": "Rychlé nastavení",
"quickSettingsDescription": "Nebojte se, můžete je změnit později.",
"resetForm": "Obnovit nastavení formuláře",
@ -612,13 +636,13 @@
"credits": "Zásluhy",
"dashboard": "Ovládací panel",
"documentation": "Dokumentace",
"inApp": "V app dokumentaci",
"navigation": "Navigace",
"inApp": "V lokalní dokumentaci",
"newServer": "Vytvořit nový server",
"servers": "Servery"
},
"startup": {
"almost": "Dokončuji. Držte se...",
"cache": "Znovu načítam mezipaměť Big Bucket",
"internals": "Nastavuji a startuji Crafty interní komponenty",
"internet": "Kontroluju připojení k internetu",
"server": "Konfigurace ",
@ -666,6 +690,9 @@
"userTheme": "Motiv UI",
"uses": "Počet povolených použití (-1==bez omezení)"
},
"validators": {
"passLength": "Heslo je příliš krátké. Minimální délka je 8 znaků"
},
"webhooks": {
"areYouSureDel": "Seš si jistý že chceš smazat tento webhook?",
"areYouSureRun": "Seš si jistý že chceš otestovat tento webhook?",

View File

@ -20,6 +20,7 @@
"created": "Erstellt",
"deleteKeyConfirmation": "Möchten Sie diesen API Schlüssel löschen? Diese Aktion kann nicht rückgängig gemacht werden.",
"deleteKeyConfirmationTitle": "Folgenden API Schlüssel löschen: ${keyId}?",
"fullAccess": "Vollzugriff",
"getToken": "Schlüssel erhalten",
"name": "Name",
"nameDesc": "Wie soll der API Schlüssel genannt werden? ",
@ -177,11 +178,14 @@
},
"thousands": ".",
"zeroRecords": "Keine passenden Einträge gefunden"
}
},
"loadingRecords": "Laden..."
},
"error": {
"agree": "Zustimmen",
"bedrockError": "Bedrock-Downloads sind nicht verfügbar. Bitte überprüfen Sie",
"bigBucket1": "Big Bucket Zustandsprüfung fehlgeschlagen. Bitte Überprüfen",
"bigBucket2": "für die aktuellsten Informationen.",
"cancel": "Abbrechen",
"contact": "Kontaktieren Sie den Crafty Control Support über Discord",
"craftyStatus": "Crafty-Statusseite",
@ -203,6 +207,8 @@
"not-downloaded": "Crafty kann die auszuführende Datei nicht finden. Ist der Download abgeschlossen? Sind die Berechtigungen für Crafty korrekt?",
"portReminder": "Wir haben festgestellt, dass dies das erste Mal ist, dass {} ausgeführt wurde. Stellen Sie sicher, dass Sie Port {} durch Ihren Router/Firewall weiterleiten, um den Fernzugriff aus dem Internet zu ermöglichen.",
"privMsg": "und der/die/das ",
"return": "Zurück zum Dashboard",
"selfHost": "Wenn Sie dieses Repo selbst hosten, überprüfen Sie bitte Ihre Adresse oder konsultieren Sie unsere Anleitung zur Fehlerbehebung.",
"serverJars1": "Server-JAR-API nicht erreichbar. Bitte überprüfen Sie ",
"serverJars2": "um die aktuellsten Informationen zu erhalten.",
"start-error": "Der Server {} konnte wegen dem Fehlercode: {} nicht gestartet werden",
@ -295,10 +301,12 @@
"serversDesc": "Server, auf die Nutzer mit dieser Rolle zugreifen darf"
},
"serverBackups": {
"actions": "Aktionen",
"after": "Befehl nach dem Backup ausführen",
"backupAtMidnight": "Automatisches Backup um 24:00 Uhr?",
"backupNow": "Jetzt sichern!",
"backupTask": "Ein Backup-Auftrag wurde gestartet.",
"backups": "Server-Backups",
"before": "Befehl vor dem Backup ausführen",
"cancel": "Abbrechen",
"clickExclude": "Auswählen, um Ausnahmen zu markieren",
@ -307,21 +315,34 @@
"confirmDelete": "Möchten Sie diese Backup-Datei löschen? Dies kann nicht rückgängig gemacht werden.",
"confirmRestore": "Sicher, dass dieses Backup wiederherstellgestellt werden soll? Alle aktuellen Serverdateien werden in den Zustand von diesem Backup versetzt und können nicht wiederhergestellt werden.",
"currentBackups": "Aktuelle Backups",
"default": "Standard-Backup",
"defaultExplain": "Das Backup, welches Crafty vor Updates verwendet. Dies kann nicht geändert oder gelöscht werden.",
"delete": "Löschen",
"destroyBackup": "Backup löschen \" + file_to_del + \"?",
"download": "Herunterladen",
"edit": "Bearbeiten",
"enabled": "Aktiviert",
"excludedBackups": "Ausgeschlossene Verzeichnisse: ",
"excludedChoose": "Verzeichnisse auswählen, die nicht gesichert werden sollen",
"exclusionsTitle": "Backup Ausnahmen",
"failed": "Fehlgeschlagen",
"maxBackups": "Maximale Backups",
"maxBackupsDesc": "Crafty speichert nicht mehr als N Backups, wodurch das älteste gelöscht wird (geben Sie 0 ein, um alle zu behalten)",
"myBackup": "Mein Neues Backup",
"name": "Name",
"newBackup": "Neues Backup erstellen",
"no-backup": "Keine Backups. Um eine neue Backup-Konfiguration zu erstellen, bitte auf 'Neues Backup erstellen' klicken.",
"options": "Optionen",
"path": "Pfad",
"restore": "Wiederherstellen",
"restoring": "Backup wiederherstellen. Dies kann eine Weile dauern.",
"run": "Backup erstellen",
"save": "Speichern",
"shutdown": "Server für die Dauer des Backups stoppen",
"size": "Größe",
"standby": "Bereitschaft",
"status": "Status",
"storage": "Speicherort",
"storageLocation": "Speicherort",
"storageLocationDesc": "Wo wollen Sie die Backups speichern?"
},
@ -486,6 +507,7 @@
},
"serverSchedules": {
"action": "Aktion",
"actionId": "Aktion auswählen",
"areYouSure": "Geplante Aufgabe löschen?",
"cancel": "Abbrechen",
"cannotSee": "Nicht alles sichtbar?",
@ -564,6 +586,8 @@
"minMem": "Minimaler RAM",
"myNewServer": "Mein neuer Server",
"newServer": "Neuen Server erstellen",
"noRole": "Keine Rolle mit aktuellem Suchparameter gefunden",
"noneRoles": "Keine Rollen ausgewählt",
"quickSettings": "Schnelleinstellungen",
"quickSettingsDescription": "Keine Sorge, Änderungen können später immer noch vorgenommen werden.",
"resetForm": "Konfiguration zurücksetzen",
@ -594,12 +618,12 @@
"dashboard": "Dashboard",
"documentation": "Dokumentation",
"inApp": "In-App-Dokumentation",
"navigation": "Navigation",
"newServer": "Neuen Server erstellen",
"servers": "Server"
},
"startup": {
"almost": "Nur noch einen Moment, fast geschafft",
"cache": "Aktualisieren der Big Bucket-Cache-Datei",
"internals": "Crafty's interne Komponneten initialisieren und starten",
"internet": "Verbindung zum Internet überprüfen",
"server": "initialisieren ",
@ -647,6 +671,9 @@
"userTheme": "Design für die Benutzeroberfläche",
"uses": "Anzahl der erlaubten Verwendungen (-1==Keine Begrenzung)"
},
"validators": {
"passLength": "Passwort zu kurz. Mindestlänge: 8"
},
"webhooks": {
"areYouSureDel": "Sind Sie sicher, dass Sie diesen Webhook löschen möchten?",
"areYouSureRun": "Sind Sie sicher, dass Sie diesen Webhook testen möchten?",

View File

@ -20,6 +20,7 @@
"created": "Created",
"deleteKeyConfirmation": "Do you want to delete this API key? This cannot be undone.",
"deleteKeyConfirmationTitle": "Remove API key ${keyId}?",
"fullAccess": "Full Access",
"getToken": "Get A Token",
"name": "Name",
"nameDesc": "What would you like to call this API token? ",
@ -28,7 +29,6 @@
"permName": "Permission Name",
"perms": "Permissions",
"server": "Server: ",
"superUser": "Super User",
"yes": "Yes"
},
"base": {
@ -177,11 +177,14 @@
},
"thousands": ",",
"zeroRecords": "No matching records found"
}
},
"loadingRecords": "Loading..."
},
"error": {
"agree": "Agree",
"bedrockError": "Bedrock downloads unavailable. Please check",
"bigBucket1": "Big Bucket Health Check Failed. Please check",
"bigBucket2": "for the most up to date information.",
"cancel": "Cancel",
"contact": "Contact Crafty Control Support via Discord",
"craftyStatus": "Crafty's status page",
@ -203,8 +206,8 @@
"not-downloaded": "We can't seem to find your executable file. Has it finished downloading? Are the permissions set to executable?",
"portReminder": "We have detected this is the first time {} has been run. Make sure to forward port {} through your router/firewall to make this remotely accessible from the internet.",
"privMsg": "and the ",
"serverJars1": "Server JARs API unreachable. Please check",
"serverJars2": "for the most up to date information.",
"return": "Return to Dashboard",
"selfHost": "If you are self-hosting this repo please check your address or consult our troubleshooting guide.",
"start-error": "Server {} failed to start with error code: {}",
"superError": "You must be a super user to complete this action.",
"terribleFailure": "What a Terrible Failure!"
@ -295,10 +298,12 @@
"serversDesc": "servers this role is allowed to access"
},
"serverBackups": {
"actions": "Actions",
"after": "Run command after backup",
"backupAtMidnight": "Auto-backup at midnight?",
"backupNow": "Backup Now!",
"backupTask": "A backup task has been started.",
"backups": "Server Backups",
"before": "Run command before backup",
"cancel": "Cancel",
"clickExclude": "Click to select Exclusions",
@ -307,21 +312,34 @@
"confirmDelete": "Do you want to delete this backup? This cannot be undone.",
"confirmRestore": "Are you sure you want to restore from this backup. All current server files will changed to backup state and will be unrecoverable.",
"currentBackups": "Current Backups",
"default": "Default Backup",
"defaultExplain": "The backup that Crafty will use before updates. This cannot be changed or deleted.",
"delete": "Delete",
"destroyBackup": "Destroy backup \" + file_to_del + \"?",
"download": "Download",
"edit": "Edit",
"enabled": "Enabled",
"excludedBackups": "Excluded Paths: ",
"excludedChoose": "Choose the paths you wish to exclude from your backups",
"exclusionsTitle": "Backup Exclusions",
"failed": "Failed",
"maxBackups": "Max Backups",
"maxBackupsDesc": "Crafty will not store more than N backups, deleting the oldest (enter 0 to keep all)",
"myBackup": "My New Backup",
"name": "Name",
"newBackup": "Create New Backup",
"no-backup": "No Backups. To make a new backup configuration please press. New Backup",
"options": "Options",
"path": "Path",
"restore": "Restore",
"restoring": "Restoring Backup. This may take a while. Please be patient.",
"run": "Run Backup",
"save": "Save",
"shutdown": "Shutdown server for duration of backup",
"size": "Size",
"standby": "Standby",
"status": "Status",
"storage": "Storage Location",
"storageLocation": "Storage Location",
"storageLocationDesc": "Where do you want to store backups?"
},
@ -486,6 +504,7 @@
},
"serverSchedules": {
"action": "Action",
"actionId": "Select Action Child",
"areYouSure": "Delete Scheduled Task?",
"cancel": "Cancel",
"cannotSee": "Not seeing everything?",
@ -563,6 +582,8 @@
"minMem": "Minimum Memory",
"myNewServer": "My New Server",
"newServer": "Create New Server",
"noRole": "No role found with current search parameter",
"noneRoles": "No Roles Selected",
"quickSettings": "Quick Settings",
"quickSettingsDescription": "Don't worry, you can change these later",
"resetForm": "Reset Form",
@ -593,12 +614,12 @@
"dashboard": "Dashboard",
"documentation": "Documentation",
"inApp": "In App Docs",
"navigation": "Navigation",
"newServer": "Create New Server",
"servers": "Servers"
},
"startup": {
"almost": "Finishing up. Hang on tight...",
"cache": "Refreshing Big Bucket cache file",
"internals": "Configuring and starting Crafty's internal components",
"internet": "Checking for internet connection",
"server": "Initializing ",
@ -646,6 +667,9 @@
"userTheme": "UI Theme",
"uses": "Number of uses allowed (-1==No Limit)"
},
"validators": {
"passLength": "Password Too Short. Minimum Length: 8"
},
"webhooks": {
"areYouSureDel": "Are you sure you want to delete this webhook?",
"areYouSureRun": "Are you sure you want to test this webhook?",

View File

@ -20,6 +20,7 @@
"created": "Creado",
"deleteKeyConfirmation": "¿Quieres eliminar esta clave de API? Esto no se puede deshacer.",
"deleteKeyConfirmationTitle": "¿Eliminar la clave API ${keyId}?",
"fullAccess": "Acceso completo",
"getToken": "Conseguir un Token",
"name": "Nombre",
"nameDesc": "¿Como te gustaría llamar a este Token de API? ",
@ -177,11 +178,14 @@
},
"thousands": ",",
"zeroRecords": "No se encontraron registros que coincidan"
}
},
"loadingRecords": "Cargando..."
},
"error": {
"agree": "Aceptar",
"bedrockError": "Descargas de Bedrock no disponibles. por favor, compruebe",
"bigBucket1": "La verificación de estado de Big Bucket ha fallado. Por favor, verifica",
"bigBucket2": "para obtener la información más actualizada.",
"cancel": "Cancelar",
"contact": "Contacta el soporte de Crafty Control desde Discord",
"craftyStatus": "Página de estados de Crafty",
@ -203,6 +207,8 @@
"not-downloaded": "No podemos encontrar el archivo ejecutable. ¿Ha terminado de descargarse? ¿Están los permisos puestos como ejecutable?",
"portReminder": "Detectamos que es la primera vez que se inicia {}. Asegúrese de configurar el puerto {} a través de su router/firewall para hacer el servidor accesible por Internet.",
"privMsg": "y el ",
"return": "Volver al panel de control",
"selfHost": "Si estás autoalojando este repositorio, revisa tu dirección o consulta nuestra guía de solución de problemas.",
"serverJars1": "API de Servidor JAR no disponible. por favor, compruebe",
"serverJars2": "para la información más actualizada.",
"start-error": "Servidor {} fallo al iniciar con código de error: {}",
@ -222,7 +228,7 @@
"login": "Iniciar Sesión",
"password": "Contraseña",
"username": "Usuario",
"viewStatus": "View Public Status Page"
"viewStatus": "Ver página de estado público"
},
"notify": {
"activityLog": "Registros de actividad",
@ -295,10 +301,12 @@
"serversDesc": "Servidores a los que este grupo puede acceder"
},
"serverBackups": {
"actions": "Acciones",
"after": "Comando ejecutado después del respaldo",
"backupAtMidnight": "¿Copia de seguridad automática a medianoche?",
"backupNow": "¡Respalde ahora!",
"backupTask": "Se ha iniciado una tarea de copia de seguridad.",
"backups": "Copias de seguridad del servidor",
"before": "Comando ejecutado antes del respaldo",
"cancel": "Cancelar",
"clickExclude": "Click para seleccionar las Exclusiones",
@ -307,21 +315,34 @@
"confirmDelete": "¿Quieres eliminar esta copia de seguridad? Esto no se puede deshacer.",
"confirmRestore": "¿Seguro que quiere restaurar desde este respaldo?. Todos los archivos del servidor actuales serán cambiados al estado del respaldo y serán irrecuperables.",
"currentBackups": "Copias de seguridad actuales",
"default": "Copia de seguridad predeterminada",
"defaultExplain": "La copia de seguridad que Crafty usará antes de actualizar. No se puede cambiar ni eliminar.",
"delete": "Eliminar",
"destroyBackup": "¿Destruir copia de seguridad \" + file_to_del + \"?",
"download": "Descargar",
"edit": "Editar",
"enabled": "Habilitado",
"excludedBackups": "Rutas Excluidas: ",
"excludedChoose": "Elige las rutas que desea excluir de los respaldos",
"exclusionsTitle": "Exclusiones en respaldos.",
"failed": "Fallido",
"maxBackups": "Cantidad máxima de respaldos",
"maxBackupsDesc": "Crafty no almacenará más de N copias de seguridad, eliminando la más antigua. (Sin límite: 0)",
"myBackup": "Mi Nueva Copia",
"name": "Nombre",
"newBackup": "Crear Nueva Copia de Seguridad",
"no-backup": "No hay copias de seguridad. Para crear una nueva configuración de copias de seguridad, presiona Crear nueva copia",
"options": "Opciones",
"path": "Ruta",
"restore": "Restaurar",
"restoring": "Restaurando copia de seguridad. Esto puede tomar un tiempo. Sea paciente.",
"run": "Ejecutar Copia de seguridad",
"save": "Guardar",
"shutdown": "Apagar el servidor durante la duración de la copia del respaldo.",
"size": "Tamaño",
"standby": "En espera",
"status": "Estado",
"storage": "Ubicación del almacenamiento",
"storageLocation": "Ubicación de almacenamiento",
"storageLocationDesc": "¿Dónde quieres almacenar las copias de seguridad?"
},
@ -486,6 +507,7 @@
},
"serverSchedules": {
"action": "Acción",
"actionId": "Seleccionar acción secundaria",
"areYouSure": "¿Borrar tarea programada?",
"cancel": "Cancelar",
"cannotSee": "¿No puede ver todo?",
@ -564,6 +586,8 @@
"minMem": "Memoria mínima",
"myNewServer": "Mi nuevo Servidor",
"newServer": "Crear Servidor",
"noRole": "No se encontró ningún rol con el parámetro de búsqueda actual",
"noneRoles": "No hay roles seleccionados",
"quickSettings": "Ajustes rápidos",
"quickSettingsDescription": "No te preocupes, puedes cambiarlos más tarde.",
"resetForm": "Limpiar formulario",
@ -594,12 +618,12 @@
"dashboard": "Panel de control",
"documentation": "Documentación",
"inApp": "Documentación de la Aplicación",
"navigation": "Navegación",
"newServer": "Crear nuevo Servidor",
"servers": "Servidores"
},
"startup": {
"almost": "Terminando. Espera un momento...",
"cache": "Actualizando el archivo de caché de Big Bucket",
"internals": "Configurando e inicializando los componentes internos de Crafty",
"internet": "Verificando conexion a internet",
"server": "Inicializando ",
@ -647,6 +671,9 @@
"userTheme": "Tema de Interfaz",
"uses": "Número de usos permitidos. (Sin límite: -1)"
},
"validators": {
"passLength": "Contraseña demasiado corta. Longitud mínima: 8"
},
"webhooks": {
"areYouSureDel": "¿Estás seguro de que quieres eliminar este webhook?",
"areYouSureRun": "¿Estás seguro de que quieres probar este webhook?",

View File

@ -100,6 +100,7 @@
"welcome": "Tervetuloa Crafty Controller"
},
"datatables": {
"loadingRecords": "Ladataan...",
"i18n": {
"aria": {
"sortAscending": ": lajittele sarake nousevasti",
@ -519,7 +520,6 @@
"credits": "Hyvitykset",
"dashboard": "Kojelauta",
"documentation": "Dokumentaatio",
"navigation": "Navigaatio",
"newServer": "Luo uusi palvelin",
"servers": "Palvelimet"
},
@ -560,4 +560,4 @@
"userSettings": "Käyttäjäasetukset",
"uses": "Sallittujen käyttäkertojen määtä (-1 == Ei rajaa)"
}
}
}

View File

@ -20,6 +20,7 @@
"created": "Crée",
"deleteKeyConfirmation": "Es-tu sûr de vouloir supprimer cette clé API? Tu ne pourras plus revenir en arrière.",
"deleteKeyConfirmationTitle": "Supprimer la clé API ${keyId}?",
"fullAccess": "Accès Complet",
"getToken": "Obtenir un Jeton",
"name": "Nom",
"nameDesc": "Comment appeler ce Jeton d'API ? ",
@ -177,11 +178,14 @@
},
"thousands": ",",
"zeroRecords": "Aucun enregistrement correspondant trouvcé"
}
},
"loadingRecords": "Chargement ..."
},
"error": {
"agree": "Agree",
"bedrockError": "Téléchargement Bedrock non disponible. Merci de vérifier",
"bigBucket1": "Echec de vérification de l'état de Big Bucket. Veuillez vérifier",
"bigBucket2": " pour l'information la plus à jour.",
"cancel": "Annuler",
"contact": "Contacter le Support de Crafty Control via Discord",
"craftyStatus": "Page de statut de Crafty",
@ -203,6 +207,8 @@
"not-downloaded": "Nous ne parvenons pas à trouver le fichier exécutable. A-t-il fini de Télécharger ? Les permissions permettent elles l'exécution ?",
"portReminder": "Nous avons détecté que c'est la première fois que {} est exécuté. Assurez-vous de transférer le port {} via votre routeur/pare-feu pour le rendre accessible à distance depuis Internet.",
"privMsg": "et le ",
"return": "Revenir au Tableau de Bord",
"selfHost": "Si vous hébergez vous-même ce repo, veuillez vérifier votre adresse et votre guide de dépannage.",
"serverJars1": "l'API Server JARs est inaccessible. Merci de vérifier",
"serverJars2": "pour les informations les plus à jour.",
"start-error": "Le serveur {} n'a pas pu démarrer avec le code d'erreur : {}",
@ -295,10 +301,12 @@
"serversDesc": "Les serveurs auquels ce rôle a accès"
},
"serverBackups": {
"actions": "Actions",
"after": "Exécuter une commande après la sauvegarde",
"backupAtMidnight": "Sauvegarde Automatique à minuit ?",
"backupNow": "Sauvegarder Maintenant !",
"backupTask": "Une sauvegarde vient de démarrer.",
"backups": "Sauvegarde de Serveur",
"before": "Exécuter une commande avant la sauvegarde",
"cancel": "Annuler",
"clickExclude": "Cliquer pour sélectionner les Exclusions",
@ -307,21 +315,34 @@
"confirmDelete": "Es-tu sûr de vouloir supprimer cette sauvegarde ? Tu ne pourras pas revenir en arrière.",
"confirmRestore": "Êtes-vous sûr de vouloir restaurer à partir de cette sauvegarde. Tous les fichiers du serveur actuel passeront à l'état de sauvegarde et seront irrécupérables.",
"currentBackups": "Sauvegardes Actuelles",
"default": "Sauvegarde par Défaut",
"defaultExplain": "La sauvegarde que Crafty utilisera avant la mise à jour. Cela ne peut être changé ou modifié.",
"delete": "Supprimer",
"destroyBackup": "Supprimer la sauvegarde \" + file_to_del + \" ?",
"download": "Télécharger",
"edit": "Modifier",
"enabled": "Activé",
"excludedBackups": "Dossiers Exclus : ",
"excludedChoose": "Choisir les dossiers à exclure de la sauvegarde",
"exclusionsTitle": "Exclusions de Sauvegarde",
"failed": "Echec",
"maxBackups": "Sauvergardes Max",
"maxBackupsDesc": "Crafty ne fera pas plus de N sauvegardes, supprimant les plus anciennes (entrer 0 pour toutes les garder)",
"myBackup": "Ma Nouvelle Sauvegarde",
"name": "Nom",
"newBackup": "Créer une Nouvelle Sauvegarde",
"no-backup": "Aucune Sauvegarde. Pour aouter une nouvelle configuration de sauvegarde, il faut clicker sur ",
"options": "Options",
"path": "Chemin",
"restore": "Restaurer",
"restoring": "Restauration de la sauvegarde. Cela peut prendre un peu de temps. S'il vous plaît soyez patient.",
"run": "Lancer la Sauvegarde",
"save": "Sauvegarder",
"shutdown": "Extinction du serveur pendant la durée de la sauvegarde",
"size": "Taille",
"standby": "Attente",
"status": "Statut",
"storage": "Emplacement de la Sauvegarde",
"storageLocation": "Emplacement de Sauvegarde",
"storageLocationDesc": "Où veux-tu enregister tes sauvegardes ?"
},
@ -486,6 +507,7 @@
},
"serverSchedules": {
"action": "Action",
"actionId": "Sélectionner une configuration de sauvegarde",
"areYouSure": "Supprimer la Tâche Planifiée ?",
"cancel": "Annuler",
"cannotSee": "Tu ne peux pas tout voir ?",
@ -536,7 +558,7 @@
"importing": "Importation ...",
"installing": "Installation ...",
"restart": "Redémarrer",
"sendCommand": "Envoiyer commande",
"sendCommand": "Envoyer commande",
"start": "Démarrer",
"starting": "Démarrage retardé",
"stop": "Arrêter",
@ -564,8 +586,10 @@
"minMem": "Mémoire Minimum",
"myNewServer": "Mon Nouveau Serveur",
"newServer": "Créer un Nouveau Serveur",
"noRole": "Aucun rôle trouvé avec les paramètres de recherche suivants",
"noneRoles": "Aucun Rôle Sélectionné ",
"quickSettings": "Paramètres Rapides",
"quickSettingsDescription": "Pas d'Inquiétude, tu peux changer tout ça polus tard",
"quickSettingsDescription": "Pas d'Inquiétude, tu peux changer tout ça plus tard",
"resetForm": "Réinitialiser Formulaire",
"save": "Sauvegarder",
"selectRole": "Sélectionnez le rôle(s)",
@ -575,7 +599,7 @@
"selectVersion": "Selectionner une Version",
"selectZipDir": "Selectionner le dossier de l'archive depuis lequel extraire les fichiers",
"serverJar": "Fichier Jar du Serveur",
"serverName": "Non du Serveur",
"serverName": "Nom du Serveur",
"serverPath": "Chemin du Serveur",
"serverPort": "Port du Serveur",
"serverSelect": "Sélectionner un Serveur",
@ -594,12 +618,12 @@
"dashboard": "Tableau de Bord",
"documentation": "Documentation",
"inApp": "Documentation Interne",
"navigation": "Navigation",
"newServer": "Créer un Nouveau Serveur",
"servers": "Serveurs"
},
"startup": {
"almost": "Finalisation. Patienter ...",
"cache": "Mise à jour du fichier cache de Big Bucket",
"internals": "Configuration et Démarrage des composants internes de Crafty",
"internet": "Vérification de la connexion à Internet",
"server": "Initialisation ",
@ -647,6 +671,9 @@
"userTheme": "Theme d'Interface Utilisateur",
"uses": "Nombre d'utilisation Authorisé (-1 == Illimité)"
},
"validators": {
"passLength": "Mot de passe trop court. Longueur minimum : 8"
},
"webhooks": {
"areYouSureDel": "Es-tu sûr de vouloir supprimer ce webhook ?",
"areYouSureRun": "Es-tu sûr de vouloir tester ce webhook ?",

View File

@ -99,6 +99,7 @@
"welcome": "Wolkom by Crafty Controller"
},
"datatables": {
"loadingRecords": "Laden...",
"i18n": {
"aria": {
"sortAscending": ": aktivearje om kolom oprinnend te sortearjen",
@ -488,7 +489,6 @@
"credits": "Credits",
"dashboard": "Dashboard",
"documentation": "Dokumintaasje",
"navigation": "Navigaasje",
"newServer": "Nije server oanmeitsje",
"servers": "Servers"
},
@ -529,4 +529,4 @@
"userSettings": "Brûkersynstellingen",
"uses": "Oantal gebrûk tastien (-1==Gjin limyt)"
}
}
}

View File

@ -20,6 +20,7 @@
"created": "נוצר",
"deleteKeyConfirmation": "האם ברצונך למחוק מפתח API זה? אי אפשר לבטל את זה.",
"deleteKeyConfirmationTitle": "? ${keyId} API-להסיר את מפתח ה",
"fullAccess": "גישה מלאה להכל",
"getToken": "קבלת אסימון",
"name": "שם",
"nameDesc": "הזה API-איך תרצו לקרוא לאסימון ה",
@ -177,11 +178,14 @@
},
"thousands": ",",
"zeroRecords": "לא נמצאו תוצאות תואמות"
}
},
"loadingRecords": "...טוען"
},
"error": {
"agree": "מסכים",
"bedrockError": "הורדות Bedrock אינן זמינות. אנא בדוק",
"bigBucket1": "בדיקת הבריאות של Big Bucket נכשלה. אנא בדוק",
"bigBucket2": "כדי לקבל את המידע המעודכן ביותר.",
"cancel": "בטל",
"contact": "בבקשה צרו קשר עם תמיכת פאנל קראפטי באמצעות דיסקורד",
"craftyStatus": "דף המצב של Crafty",
@ -203,6 +207,8 @@
"not-downloaded": "לא הצלחנו למצוא את קובץ ההפעלה שלך. האם זה סיים להוריד? האם ההרשאות מוגדרות בשביל הפעלה?",
"portReminder": "זיהינו שזו הפעם הראשונה ש-{} מופעל. הקפידו להעביר את היציאה {} דרך הנתב/חומת האש שלכם כדי להפוך אותה לנגישה מרחוק מהאינטרנט.",
"privMsg": "וה",
"return": "חזרה לפאנל",
"selfHost": "אם אתה מארח בעצמך את הריפו הזה, אנא בדוק את הכתובת שלך או התייעץ עם מדריך פתרון הבעיות שלנו.",
"serverJars1": "API של צנצנות השרת אינו נגיש. אנא בדוק",
"serverJars2": "למידע מעודכן ביותר.",
"start-error": "השרת {} לא הצליח להתחיל עם קוד שגיאה: {}",
@ -295,10 +301,12 @@
"serversDesc": "לשרתים מותר לגשת לתפקיד זה"
},
"serverBackups": {
"actions": "פעולות",
"after": "הרץ פקודה לאחר הגיבוי",
"backupAtMidnight": "גיבוי אוטומטי בחצות?",
"backupNow": "!גיבוי עכשיו",
"backupTask": "החלה משימת גיבוי.",
"backups": "גיבויי שרת",
"before": "הרץ פקודה לפני הגיבוי",
"cancel": "לבטל",
"clickExclude": "לחצו כדי לבחור מה לא יהיה בגיבוי",
@ -307,21 +315,34 @@
"confirmDelete": "האם ברצונכם למחוק את הגיבוי הזה? אי אפשר לבטל את זה.",
"confirmRestore": "האם אתם בטוחים שברצונכם לשחזר מגיבוי זה. כל קבצי השרת הנוכחיים ישתנו למצב גיבוי ולא יהיה אפשר לשחזר.",
"currentBackups": "גיבויים נוכחיים",
"default": "גיבוי ברירת מחדל",
"defaultExplain": "הגיבוי ש-Crafty ישתמש בו לפני עדכונים. לא ניתן לשנות או למחוק.",
"delete": "למחוק",
"destroyBackup": "?\" + file_to_del + \" להרוס גיבוי",
"download": "הורדה",
"edit": "ערוך",
"enabled": "מופעל",
"excludedBackups": "נתיבים שלא נכללו: ",
"excludedChoose": "בחרו את הנתיבים שברצונכם לא לכלול בגיבויים",
"exclusionsTitle": "אי הכללות גיבוי",
"failed": "נכשל",
"maxBackups": "מקסימום גיבויים",
"maxBackupsDesc": "גיבויים, ימחק את הישן ביותר (הזן 0 כדי לשמור את כולם) N-קראפטי לא יאחסן יותר מ",
"myBackup": "הגיבוי החדש שלי",
"name": "שם",
"newBackup": "צור גיבוי חדש",
"no-backup": "אין גיבויים. כדי ליצור תצורת גיבוי חדשה אנא לחץ על גיבוי חדש",
"options": "אפשרויות",
"path": "נתיב",
"restore": "לשחזר",
"restoring": "שחזור גיבוי. זה עשוי לקחת זמן. אנא חכו בסבלנות.",
"run": "הפעל גיבוי",
"save": "שמירה",
"shutdown": "כיבוי שרת למשך הגיבוי",
"size": "גודל",
"standby": "בהמתנה",
"status": "סטטוס",
"storage": "מיקום אחסון",
"storageLocation": "מקום איחסון",
"storageLocationDesc": "איפו אתם רוצים לאחסן גיבויים?"
},
@ -486,6 +507,7 @@
},
"serverSchedules": {
"action": "פעולה",
"actionId": "בחר פעולה משנית",
"areYouSure": "למחוק משימה מתוזמנת?",
"cancel": "לבטל",
"cannotSee": "לא רואים הכל?",
@ -564,6 +586,8 @@
"minMem": "מינימום זיכרון",
"myNewServer": "השרת החדש שלי",
"newServer": "צור שרת חדש",
"noRole": "לא נמצא תפקיד עם פרמטר החיפוש הנוכחי",
"noneRoles": "לא נבחרו תפקידים",
"quickSettings": "הגדרות מהירות",
"quickSettingsDescription": "אל תדאג, אתה יכול לשנות את אלה מאוחר יותר",
"resetForm": "אפס טופס",
@ -594,12 +618,12 @@
"dashboard": "פאנל",
"documentation": "ויקיפדייה",
"inApp": "מסמכים באפליקציה",
"navigation": "ניווט",
"newServer": "צור שרת חדש",
"servers": "שרתים"
},
"startup": {
"almost": "מסיימים. תחזיקו חזק...",
"cache": "מרענן את קובץ המטמון של Big Bucket",
"internals": "הגדרה והפעלה של הרכיבים הפנימיים של Crafty",
"internet": "בודק את חיבור האינטרנט",
"server": "אתחול ",
@ -647,6 +671,9 @@
"userTheme": "ערכת נושא UI",
"uses": "מספר השימושים המותרים (-1==ללא הגבלה)"
},
"validators": {
"passLength": "סיסמא קצרה מדי. אורך מינימלי: 8"
},
"webhooks": {
"areYouSureDel": "האם אתה בטוח שברצונך למחוק את ה-Webhook הזה?",
"areYouSureRun": "האם אתה בטוח שברצונך לבדוק את ה-Webhook הזה?",

View File

@ -99,6 +99,7 @@
"welcome": "Dobrodošli u Crafty Controller"
},
"datatables": {
"loadingRecords": "Učitavanje...",
"i18n": {
"aria": {
"sortAscending": ": aktiviraj za sortiranje stupca uzlazno",
@ -488,7 +489,6 @@
"credits": "Zasluge",
"dashboard": "Upravljačka ploča",
"documentation": "Dokumentacija",
"navigation": "Navigacija",
"newServer": "Stvorite novi poslužitelj",
"servers": "Poslužitelji"
},
@ -529,4 +529,4 @@
"userSettings": "Korisničke postavke",
"uses": "Broj dopuštenih upotreba (-1==Bez ograničenja)"
}
}
}

Some files were not shown because too many files have changed in this diff Show More