2020-08-23 22:43:28 +00:00
|
|
|
import json
|
2021-08-21 18:25:39 +00:00
|
|
|
import threading
|
2020-08-23 22:43:28 +00:00
|
|
|
import time
|
2020-08-27 22:30:56 +00:00
|
|
|
import shutil
|
2020-08-23 22:43:28 +00:00
|
|
|
import logging
|
|
|
|
from datetime import datetime
|
2022-04-11 10:14:32 +00:00
|
|
|
import requests
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2022-04-14 02:10:25 +00:00
|
|
|
from app.classes.controllers.servers_controller import ServersController
|
|
|
|
from app.classes.models.server_permissions import PermissionsServers
|
2023-08-09 21:47:53 +00:00
|
|
|
from app.classes.shared.websocket_manager import WebSocketManager
|
2020-08-23 22:43:28 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2024-01-12 16:23:35 +00:00
|
|
|
PAPERJARS = ["paper", "folia"]
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2022-04-11 10:08:36 +00:00
|
|
|
|
2022-03-23 02:50:12 +00:00
|
|
|
class ServerJars:
|
2022-04-11 05:23:55 +00:00
|
|
|
def __init__(self, helper):
|
|
|
|
self.helper = helper
|
2020-12-25 20:40:55 +00:00
|
|
|
self.base_url = "https://serverjars.com"
|
2024-01-12 16:23:35 +00:00
|
|
|
self.paper_base = "https://api.papermc.io"
|
2020-12-25 20:40:55 +00:00
|
|
|
|
2024-01-12 00:35:08 +00:00
|
|
|
@staticmethod
|
2024-01-12 16:23:35 +00:00
|
|
|
def get_paper_jars():
|
|
|
|
return PAPERJARS
|
2024-01-12 00:35:08 +00:00
|
|
|
|
2020-08-23 22:43:28 +00:00
|
|
|
def _get_api_result(self, call_url: str):
|
2022-01-26 01:45:30 +00:00
|
|
|
full_url = f"{self.base_url}{call_url}"
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2020-12-25 20:40:55 +00:00
|
|
|
try:
|
2022-04-14 02:10:25 +00:00
|
|
|
response = requests.get(full_url, timeout=2)
|
2022-05-05 00:32:09 +00:00
|
|
|
response.raise_for_status()
|
2022-04-14 02:10:25 +00:00
|
|
|
api_data = json.loads(response.content)
|
2020-08-23 22:43:28 +00:00
|
|
|
except Exception as e:
|
2022-05-05 00:32:09 +00:00
|
|
|
logger.error(f"Unable to load {full_url} api due to error: {e}")
|
2020-08-23 22:43:28 +00:00
|
|
|
return {}
|
|
|
|
|
2022-03-23 02:50:12 +00:00
|
|
|
api_result = api_data.get("status")
|
|
|
|
api_response = api_data.get("response", {})
|
2020-08-23 22:43:28 +00:00
|
|
|
|
|
|
|
if api_result != "success":
|
2022-01-26 01:45:30 +00:00
|
|
|
logger.error(f"Api returned a failed status: {api_result}")
|
2020-08-23 22:43:28 +00:00
|
|
|
return {}
|
|
|
|
|
|
|
|
return api_response
|
|
|
|
|
2024-01-12 00:20:50 +00:00
|
|
|
def get_paper_versions(self, project):
|
|
|
|
try:
|
|
|
|
response = requests.get(
|
2024-01-12 16:23:35 +00:00
|
|
|
f"{self.paper_base}/v2/projects/{project}/", timeout=2
|
2024-01-12 00:20:50 +00:00
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
api_data = json.loads(response.content)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(
|
|
|
|
f"Unable to load https://api.papermc.io/v2/projects/{project}/"
|
|
|
|
f"api due to error: {e}"
|
|
|
|
)
|
|
|
|
return {}
|
|
|
|
versions = api_data.get("versions", [])
|
|
|
|
versions.reverse()
|
|
|
|
return versions
|
|
|
|
|
2024-01-12 16:23:35 +00:00
|
|
|
def get_paper_build(self, project, version):
|
|
|
|
try:
|
|
|
|
response = requests.get(
|
|
|
|
f"{self.paper_base}/v2/projects/{project}/versions/{version}/builds/",
|
|
|
|
timeout=2,
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
api_data = json.loads(response.content)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(
|
|
|
|
f"Unable to load https://api.papermc.io/v2/projects/{project}/"
|
|
|
|
f"api due to error: {e}"
|
|
|
|
)
|
|
|
|
return {}
|
|
|
|
build = api_data.get("builds", [])[-1]
|
|
|
|
return build
|
|
|
|
|
2022-04-11 05:23:55 +00:00
|
|
|
def _read_cache(self):
|
|
|
|
cache_file = self.helper.serverjar_cache
|
2020-08-23 22:43:28 +00:00
|
|
|
cache = {}
|
|
|
|
try:
|
2022-03-23 02:50:12 +00:00
|
|
|
with open(cache_file, "r", encoding="utf-8") as f:
|
2020-08-23 22:43:28 +00:00
|
|
|
cache = json.load(f)
|
|
|
|
|
|
|
|
except Exception as e:
|
2022-01-26 01:45:30 +00:00
|
|
|
logger.error(f"Unable to read serverjars.com cache file: {e}")
|
2020-08-23 22:43:28 +00:00
|
|
|
|
|
|
|
return cache
|
|
|
|
|
|
|
|
def get_serverjar_data(self):
|
|
|
|
data = self._read_cache()
|
2022-08-03 01:08:53 +00:00
|
|
|
return data.get("types")
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2020-12-25 20:40:55 +00:00
|
|
|
def _check_api_alive(self):
|
2020-08-23 22:43:28 +00:00
|
|
|
logger.info("Checking serverjars.com API status")
|
|
|
|
|
2022-01-26 01:45:30 +00:00
|
|
|
check_url = f"{self.base_url}/api/fetchTypes"
|
2020-12-25 20:40:55 +00:00
|
|
|
try:
|
2022-04-14 02:10:25 +00:00
|
|
|
response = requests.get(check_url, timeout=2)
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2022-04-14 02:10:25 +00:00
|
|
|
if response.status_code in [200, 201]:
|
2020-12-25 20:40:55 +00:00
|
|
|
logger.info("Serverjars.com API is alive")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
2022-01-26 01:45:30 +00:00
|
|
|
logger.error(f"Unable to connect to serverjar.com api due to error: {e}")
|
2020-12-25 20:40:55 +00:00
|
|
|
return {}
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2020-12-25 20:40:55 +00:00
|
|
|
logger.error("unable to contact serverjars.com api")
|
2020-08-23 22:43:28 +00:00
|
|
|
return False
|
|
|
|
|
2022-08-03 19:38:44 +00:00
|
|
|
def manual_refresh_cache(self):
|
|
|
|
cache_file = self.helper.serverjar_cache
|
|
|
|
|
|
|
|
# debug override
|
|
|
|
# cache_old = True
|
|
|
|
|
|
|
|
# if the API is down... we bomb out
|
|
|
|
if not self._check_api_alive():
|
|
|
|
return False
|
|
|
|
|
|
|
|
logger.info("Manual Refresh requested.")
|
|
|
|
now = datetime.now()
|
|
|
|
data = {
|
|
|
|
"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
|
|
|
|
"types": {},
|
|
|
|
}
|
|
|
|
|
|
|
|
jar_types = self._get_server_type_list()
|
|
|
|
data["types"].update(jar_types)
|
|
|
|
for s in data["types"]:
|
|
|
|
data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
|
|
|
|
for j in data["types"].get(s):
|
|
|
|
versions = self._get_jar_details(j, s)
|
|
|
|
data["types"][s].update({j: versions})
|
2024-01-12 16:23:35 +00:00
|
|
|
for item in PAPERJARS:
|
2024-01-12 00:20:50 +00:00
|
|
|
data["types"]["servers"][item] = self.get_paper_versions(item)
|
2022-08-03 19:38:44 +00:00
|
|
|
# save our cache
|
|
|
|
try:
|
|
|
|
with open(cache_file, "w", encoding="utf-8") as f:
|
|
|
|
f.write(json.dumps(data, indent=4))
|
|
|
|
logger.info("Cache file refreshed")
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
|
|
|
|
2020-08-23 22:43:28 +00:00
|
|
|
def refresh_cache(self):
|
2022-04-11 05:23:55 +00:00
|
|
|
cache_file = self.helper.serverjar_cache
|
|
|
|
cache_old = self.helper.is_file_older_than_x_days(cache_file)
|
2020-08-23 22:43:28 +00:00
|
|
|
|
|
|
|
# debug override
|
|
|
|
# cache_old = True
|
|
|
|
|
|
|
|
# if the API is down... we bomb out
|
|
|
|
if not self._check_api_alive():
|
|
|
|
return False
|
|
|
|
|
|
|
|
logger.info("Checking Cache file age")
|
|
|
|
# if file is older than 1 day
|
|
|
|
|
|
|
|
if cache_old:
|
|
|
|
logger.info("Cache file is over 1 day old, refreshing")
|
|
|
|
now = datetime.now()
|
2022-08-03 01:08:53 +00:00
|
|
|
data = {
|
|
|
|
"last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"),
|
|
|
|
"types": {},
|
|
|
|
}
|
2020-08-23 22:43:28 +00:00
|
|
|
|
|
|
|
jar_types = self._get_server_type_list()
|
2022-08-03 01:08:53 +00:00
|
|
|
data["types"].update(jar_types)
|
|
|
|
for s in data["types"]:
|
|
|
|
data["types"].update({s: dict.fromkeys(data["types"].get(s), {})})
|
|
|
|
for j in data["types"].get(s):
|
|
|
|
versions = self._get_jar_details(j, s)
|
|
|
|
data["types"][s].update({j: versions})
|
2024-01-12 16:23:35 +00:00
|
|
|
for item in PAPERJARS:
|
2024-01-12 00:20:50 +00:00
|
|
|
data["types"]["servers"][item] = self.get_paper_versions()
|
2020-08-23 22:43:28 +00:00
|
|
|
# save our cache
|
|
|
|
try:
|
2022-03-23 02:50:12 +00:00
|
|
|
with open(cache_file, "w", encoding="utf-8") as f:
|
2020-08-23 22:43:28 +00:00
|
|
|
f.write(json.dumps(data, indent=4))
|
|
|
|
logger.info("Cache file refreshed")
|
|
|
|
|
|
|
|
except Exception as e:
|
2022-01-26 01:45:30 +00:00
|
|
|
logger.error(f"Unable to update serverjars.com cache file: {e}")
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2022-08-03 01:08:53 +00:00
|
|
|
def _get_jar_details(self, server_type, jar_type="servers"):
|
|
|
|
url = f"/api/fetchAll/{jar_type}/{server_type}"
|
2020-08-23 22:43:28 +00:00
|
|
|
response = self._get_api_result(url)
|
|
|
|
temp = []
|
|
|
|
for v in response:
|
2022-03-23 02:50:12 +00:00
|
|
|
temp.append(v.get("version"))
|
|
|
|
time.sleep(0.5)
|
2020-08-23 22:43:28 +00:00
|
|
|
return temp
|
|
|
|
|
|
|
|
def _get_server_type_list(self):
|
2022-03-23 02:50:12 +00:00
|
|
|
url = "/api/fetchTypes/"
|
2020-08-23 22:43:28 +00:00
|
|
|
response = self._get_api_result(url)
|
2023-06-07 00:44:59 +00:00
|
|
|
if "bedrock" in response.keys():
|
|
|
|
# remove pocketmine from options
|
|
|
|
del response["bedrock"]
|
2020-08-23 22:43:28 +00:00
|
|
|
return response
|
|
|
|
|
2022-08-03 01:08:53 +00:00
|
|
|
def download_jar(self, jar, server, version, path, server_id):
|
2022-03-23 02:50:12 +00:00
|
|
|
update_thread = threading.Thread(
|
2022-04-11 05:23:55 +00:00
|
|
|
name=f"server_download-{server_id}-{server}-{version}",
|
2022-03-23 02:50:12 +00:00
|
|
|
target=self.a_download_jar,
|
|
|
|
daemon=True,
|
2022-08-03 01:08:53 +00:00
|
|
|
args=(jar, server, version, path, server_id),
|
2022-03-23 02:50:12 +00:00
|
|
|
)
|
2021-08-21 18:25:39 +00:00
|
|
|
update_thread.start()
|
|
|
|
|
2022-08-03 01:08:53 +00:00
|
|
|
def a_download_jar(self, jar, server, version, path, server_id):
|
2022-03-23 02:50:12 +00:00
|
|
|
# delaying download for server register to finish
|
2022-03-04 00:36:36 +00:00
|
|
|
time.sleep(3)
|
2024-01-12 16:23:35 +00:00
|
|
|
if server not in PAPERJARS:
|
2024-01-12 00:20:50 +00:00
|
|
|
fetch_url = f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}"
|
|
|
|
else:
|
2024-01-12 16:23:35 +00:00
|
|
|
build = self.get_paper_build(server, version).get("build", None)
|
|
|
|
if not build:
|
|
|
|
return
|
2024-01-12 00:20:50 +00:00
|
|
|
fetch_url = (
|
2024-01-12 16:23:35 +00:00
|
|
|
f"{self.paper_base}/v2/projects"
|
|
|
|
f"/{server}/versions/{version}/builds/{build}/downloads/"
|
|
|
|
f"{server}-{version}-{build}.jar"
|
2024-01-12 00:20:50 +00:00
|
|
|
)
|
2022-04-14 02:10:25 +00:00
|
|
|
server_users = PermissionsServers.get_server_user_list(server_id)
|
2022-03-04 00:36:36 +00:00
|
|
|
|
2022-03-23 06:06:13 +00:00
|
|
|
# We need to make sure the server is registered before
|
|
|
|
# we submit a db update for it's stats.
|
2022-03-04 00:36:36 +00:00
|
|
|
while True:
|
|
|
|
try:
|
2022-08-17 18:48:05 +00:00
|
|
|
ServersController.set_import(server_id)
|
2022-03-04 00:36:36 +00:00
|
|
|
for user in server_users:
|
2023-08-09 21:47:53 +00:00
|
|
|
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
2022-03-04 00:36:36 +00:00
|
|
|
|
|
|
|
break
|
2022-04-11 05:23:55 +00:00
|
|
|
except Exception as ex:
|
|
|
|
logger.debug(f"server not registered yet. Delaying download - {ex}")
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2020-08-27 22:30:56 +00:00
|
|
|
# open a file stream
|
2020-12-25 20:40:55 +00:00
|
|
|
with requests.get(fetch_url, timeout=2, stream=True) as r:
|
2022-11-19 18:45:18 +00:00
|
|
|
success = False
|
2020-08-23 22:43:28 +00:00
|
|
|
try:
|
2022-03-23 02:50:12 +00:00
|
|
|
with open(path, "wb") as output:
|
2020-08-27 22:30:56 +00:00
|
|
|
shutil.copyfileobj(r.raw, output)
|
2022-11-19 18:45:18 +00:00
|
|
|
# If this is the newer forge version we will run the installer
|
2022-12-24 01:14:41 +00:00
|
|
|
if server == "forge":
|
2022-11-19 18:45:18 +00:00
|
|
|
ServersController.finish_import(server_id, True)
|
|
|
|
else:
|
|
|
|
ServersController.finish_import(server_id)
|
|
|
|
|
|
|
|
success = True
|
2020-08-23 22:43:28 +00:00
|
|
|
except Exception as e:
|
2022-01-26 01:45:30 +00:00
|
|
|
logger.error(f"Unable to save jar to {path} due to error:{e}")
|
2022-08-17 21:22:03 +00:00
|
|
|
ServersController.finish_import(server_id)
|
2022-04-14 02:10:25 +00:00
|
|
|
server_users = PermissionsServers.get_server_user_list(server_id)
|
2020-08-23 22:43:28 +00:00
|
|
|
|
2022-11-19 18:45:18 +00:00
|
|
|
for user in server_users:
|
2023-08-09 21:47:53 +00:00
|
|
|
WebSocketManager().broadcast_user(
|
2022-11-19 18:45:18 +00:00
|
|
|
user, "notification", "Executable download finished"
|
|
|
|
)
|
|
|
|
time.sleep(3)
|
2023-08-09 21:47:53 +00:00
|
|
|
WebSocketManager().broadcast_user(user, "send_start_reload", {})
|
2022-11-19 18:45:18 +00:00
|
|
|
return success
|