From 0fab5cd0f399b185d5eb5c6180cd01be4399794c Mon Sep 17 00:00:00 2001 From: Zedifus Date: Fri, 5 Apr 2024 20:01:37 +0100 Subject: [PATCH 1/4] Refactor for temporary hardcoded types, use new health check response. api.serverjars.com not providing fetchTypes route currently, Will revert when available. NOTE: New api does not incl Spigot / Bukkit. Announce this. --- app/classes/minecraft/serverjars.py | 66 +++++++++++++++++++-------- app/classes/shared/main_controller.py | 2 +- app/classes/web/server_handler.py | 2 +- 3 files changed, 48 insertions(+), 22 deletions(-) diff --git a/app/classes/minecraft/serverjars.py b/app/classes/minecraft/serverjars.py index 83731b52..903ce046 100644 --- a/app/classes/minecraft/serverjars.py +++ b/app/classes/minecraft/serverjars.py @@ -12,13 +12,15 @@ from app.classes.shared.file_helpers import FileHelpers from app.classes.shared.websocket_manager import WebSocketManager logger = logging.getLogger(__name__) +# Temp type var, see line(s) #215 #257 #285 +SERVERJARS_TYPES = ["modded", "proxies", "servers", "vanilla"] PAPERJARS = ["paper", "folia"] class ServerJars: def __init__(self, helper): self.helper = helper - self.base_url = "https://serverjars.com" + self.base_url = "https://api.serverjars.com" self.paper_base = "https://api.papermc.io" @staticmethod @@ -168,21 +170,28 @@ class ServerJars: data = self._read_cache() return data.get("types") - def _check_api_alive(self): + def _check_sjars_api_alive(self): logger.info("Checking serverjars.com API status") - check_url = f"{self.base_url}/api/fetchTypes" + check_url = f"{self.base_url}" try: response = requests.get(check_url, timeout=2) - - if response.status_code in [200, 201]: - logger.info("Serverjars.com API is alive") + response_json = response.json() + + if ( + response.status_code in [200, 201] + and response_json.get("status") == "success" + and response_json.get("response", {}).get("status") == "ok" + ): + logger.info("Serverjars.com API is alive and responding as expected") return True except Exception as e: - logger.error(f"Unable to connect to serverjar.com api due to error: {e}") - return {} + logger.error(f"Unable to connect to serverjar.com API due to error: {e}") + return False - logger.error("unable to contact serverjars.com api") + logger.error( + "Serverjars.com API is not responding as expected or unable to contact" + ) return False def manual_refresh_cache(self): @@ -192,7 +201,7 @@ class ServerJars: # cache_old = True # if the API is down... we bomb out - if not self._check_api_alive(): + if not self._check_sjars_api_alive(): return False logger.info("Manual Refresh requested.") @@ -202,7 +211,14 @@ class ServerJars: "types": {}, } - jar_types = self._get_server_type_list() + # jar_types = self._get_server_type_list() + jar_types = { + type_: ( + {paperjar: [] for paperjar in PAPERJARS} if type_ == "servers" else {} + ) + for type_ in SERVERJARS_TYPES + } + data["types"].update(jar_types) for s in data["types"]: data["types"].update({s: dict.fromkeys(data["types"].get(s), {})}) @@ -228,7 +244,7 @@ class ServerJars: # cache_old = True # if the API is down... we bomb out - if not self._check_api_alive(): + if not self._check_sjars_api_alive(): return False logger.info("Checking Cache file age") @@ -242,7 +258,16 @@ class ServerJars: "types": {}, } - jar_types = self._get_server_type_list() + # jar_types = self._get_server_type_list() + jar_types = { + type_: ( + {paperjar: [] for paperjar in PAPERJARS} + if type_ == "servers" + else {} + ) + for type_ in SERVERJARS_TYPES + } + data["types"].update(jar_types) for s in data["types"]: data["types"].update({s: dict.fromkeys(data["types"].get(s), {})}) @@ -269,13 +294,14 @@ class ServerJars: time.sleep(0.5) return temp - def _get_server_type_list(self): - url = "/api/fetchTypes/" - response = self._get_api_result(url) - if "bedrock" in response.keys(): - # remove pocketmine from options - del response["bedrock"] - return response + # Disabled temporarily until api.serverjars.com resolve their fetchTypes route + # def _get_server_type_list(self): + # url = "/api/fetchTypes/" + # response = self._get_api_result(url) + # if "bedrock" in response.keys(): + # # remove pocketmine from options + # del response["bedrock"] + # return response def download_jar(self, jar, server, version, path, server_id): update_thread = threading.Thread( diff --git a/app/classes/shared/main_controller.py b/app/classes/shared/main_controller.py index b6e824a5..47e4f6ce 100644 --- a/app/classes/shared/main_controller.py +++ b/app/classes/shared/main_controller.py @@ -575,7 +575,7 @@ class Controller: ): server_obj = self.servers.get_server_obj(new_server_id) url = ( - "https://serverjars.com/api/fetchJar/" + "https://api.serverjars.com/api/fetchJar/" f"{create_data['category']}" f"/{create_data['type']}/{create_data['version']}" ) diff --git a/app/classes/web/server_handler.py b/app/classes/web/server_handler.py index 545029aa..62b76f3c 100644 --- a/app/classes/web/server_handler.py +++ b/app/classes/web/server_handler.py @@ -147,7 +147,7 @@ class ServerHandler(BaseHandler): page_data["server_api"] = False if page_data["online"]: page_data["server_api"] = self.helper.check_address_status( - "https://serverjars.com/api/fetchTypes" + "https://api.serverjars.com" ) page_data["server_types"] = self.controller.server_jars.get_serverjar_data() page_data["js_server_types"] = json.dumps( -- GitLab From cc67ebef76cbb0af9e89c4122a342839a940ebf1 Mon Sep 17 00:00:00 2001 From: Zedifus Date: Sat, 6 Apr 2024 01:38:45 +0100 Subject: [PATCH 2/4] A further clean up and refactor of server jars cache logic Reordered code, and tidy'd based on DRY (Don't Repeat Yourself) --- app/classes/minecraft/serverjars.py | 315 ++++++++++++++-------------- 1 file changed, 157 insertions(+), 158 deletions(-) diff --git a/app/classes/minecraft/serverjars.py b/app/classes/minecraft/serverjars.py index 903ce046..4a9883a9 100644 --- a/app/classes/minecraft/serverjars.py +++ b/app/classes/minecraft/serverjars.py @@ -12,7 +12,7 @@ from app.classes.shared.file_helpers import FileHelpers from app.classes.shared.websocket_manager import WebSocketManager logger = logging.getLogger(__name__) -# Temp type var, see line(s) #215 #257 #285 +# Temp type var until sjars restores generic fetchTypes SERVERJARS_TYPES = ["modded", "proxies", "servers", "vanilla"] PAPERJARS = ["paper", "folia"] @@ -84,76 +84,6 @@ class ServerJars: builds = api_data.get("builds", []) return builds[-1] if builds else None - def get_fetch_url(self, jar, server, version): - """ - Constructs the URL for downloading a server JAR file based on the server type. - - Supports two main types of server JAR sources: - - ServerJars API for servers not in PAPERJARS. - - Paper API for servers available through the Paper project. - - Parameters: - jar (str): Name of the JAR file. - server (str): Server software name (e.g., "paper"). - version (str): Server version. - - Returns: - str or None: URL for downloading the JAR file, or None if URL cannot be - constructed or an error occurs. - """ - try: - # Check if the server type is not specifically handled by Paper. - if server not in PAPERJARS: - return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}" - - # For Paper servers, attempt to get the build for the specified version. - paper_build_info = self.get_paper_build(server, version) - if paper_build_info is None: - # Log an error or handle the case where paper_build_info is None - logger.error( - "Error: Unable to get build information for server:" - f" {server}, version: {version}" - ) - return None - - build = paper_build_info.get("build") - if not build: - # Log an error or handle the case where build is None or not found - logger.error( - f"Error: Build number not found for server:" - f" {server}, version: {version}" - ) - return None - - # Construct and return the URL for downloading the Paper server JAR. - return ( - f"{self.paper_base}/v2/projects/{server}/versions/{version}/" - f"builds/{build}/downloads/{server}-{version}-{build}.jar" - ) - except Exception as e: - logger.error(f"An error occurred while constructing fetch URL: {e}") - return None - - def _get_api_result(self, call_url: str): - full_url = f"{self.base_url}{call_url}" - - try: - response = requests.get(full_url, timeout=2) - response.raise_for_status() - api_data = json.loads(response.content) - except Exception as e: - logger.error(f"Unable to load {full_url} api due to error: {e}") - return {} - - api_result = api_data.get("status") - api_response = api_data.get("response", {}) - - if api_result != "success": - logger.error(f"Api returned a failed status: {api_result}") - return {} - - return api_response - def _read_cache(self): cache_file = self.helper.serverjar_cache cache = {} @@ -194,114 +124,183 @@ class ServerJars: ) return False - def manual_refresh_cache(self): - cache_file = self.helper.serverjar_cache + def _fetch_projects_for_type(self, server_type): + """ + Fetches projects for a given server type from the ServerJars API. + """ + try: + response = requests.get( + f"{self.base_url}/api/fetchTypes/{server_type}", timeout=5 + ) + response.raise_for_status() # Ensure HTTP errors are caught + data = response.json() + if data.get("status") == "success": + return data["response"].get("servers", []) + except requests.RequestException as e: + print(f"Error fetching projects for type {server_type}: {e}") + return [] + + def _get_server_type_list(self): + """ + Builds the type structure with projects fetched for each type. + """ + type_structure = {} + for server_type in SERVERJARS_TYPES: + projects = self._fetch_projects_for_type(server_type) + type_structure[server_type] = {project: [] for project in projects} + return type_structure - # debug override - # cache_old = True + def _get_jar_versions(self, server_type, project_name): + """ + Grabs available versions per project + """ + url = f"{self.base_url}/api/fetchAll/{server_type}/{project_name}" + try: + response = requests.get(url, timeout=5) + response.raise_for_status() # Ensure HTTP errors are caught + data = response.json() + logger.debug(f"Received data for {server_type}/{project_name}: {data}") + + if data.get("status") == "success": + versions = [ + item.get("version") + for item in data.get("response", []) + if "version" in item + ] + logger.debug(f"Versions extracted: {versions}") + return versions + except requests.RequestException as e: + logger.error( + f"Error fetching jar versions for {server_type}/{project_name}: {e}" + ) - # if the API is down... we bomb out - if not self._check_sjars_api_alive(): - return False + return [] - logger.info("Manual Refresh requested.") + def _refresh_cache(self): + """ + Contains the shared logic for refreshing the cache. + This method is called by both manual_refresh_cache and refresh_cache methods. + """ now = datetime.now() - data = { + cache_data = { "last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"), - "types": {}, + "types": self._get_server_type_list(), } - # jar_types = self._get_server_type_list() - jar_types = { - type_: ( - {paperjar: [] for paperjar in PAPERJARS} if type_ == "servers" else {} + for server_type, projects in cache_data["types"].items(): + for project_name in projects: + versions = self._get_jar_versions(server_type, project_name) + cache_data["types"][server_type][project_name] = versions + + for paper_project in PAPERJARS: + cache_data["types"]["servers"][paper_project] = self.get_paper_versions( + paper_project ) - for type_ in SERVERJARS_TYPES - } - data["types"].update(jar_types) - for s in data["types"]: - data["types"].update({s: dict.fromkeys(data["types"].get(s), {})}) - for j in data["types"].get(s): - versions = self._get_jar_details(j, s) - data["types"][s].update({j: versions}) - for item in PAPERJARS: - data["types"]["servers"][item] = self.get_paper_versions(item) - # save our cache - try: - with open(cache_file, "w", encoding="utf-8") as f: - f.write(json.dumps(data, indent=4)) - logger.info("Cache file refreshed") + return cache_data + def manual_refresh_cache(self): + """ + Manually triggers the cache refresh process. + """ + if not self._check_sjars_api_alive(): + logger.error("ServerJars API is not available.") + return False + + logger.info("Manual cache refresh requested.") + cache_data = self._refresh_cache() + + # Save the updated cache data + try: + with open(self.helper.serverjar_cache, "w", encoding="utf-8") as cache_file: + json.dump(cache_data, cache_file, indent=4) + logger.info("Cache file successfully refreshed manually.") except Exception as e: - logger.error(f"Unable to update serverjars.com cache file: {e}") + logger.error(f"Failed to update cache file manually: {e}") def refresh_cache(self): - cache_file = self.helper.serverjar_cache - cache_old = self.helper.is_file_older_than_x_days(cache_file) + """ + Automatically trigger cache refresh process based age. + + This method checks if the cache file is older than a specified number of days + before deciding to refresh. + """ + cache_file_path = self.helper.serverjar_cache + + # Determine if the cache is old and needs refreshing + cache_old = self.helper.is_file_older_than_x_days(cache_file_path) # debug override # cache_old = True - # if the API is down... we bomb out if not self._check_sjars_api_alive(): + logger.error("ServerJars API is not available.") return False - logger.info("Checking Cache file age") - # if file is older than 1 day - - if cache_old: - logger.info("Cache file is over 1 day old, refreshing") - now = datetime.now() - data = { - "last_refreshed": now.strftime("%m/%d/%Y, %H:%M:%S"), - "types": {}, - } - - # jar_types = self._get_server_type_list() - jar_types = { - type_: ( - {paperjar: [] for paperjar in PAPERJARS} - if type_ == "servers" - else {} + if not cache_old: + logger.info("Cache file is not old enough to require automatic refresh.") + return False + + logger.info("Automatic cache refresh initiated due to old cache.") + cache_data = self._refresh_cache() + + # Save the updated cache data + try: + with open(cache_file_path, "w", encoding="utf-8") as cache_file: + json.dump(cache_data, cache_file, indent=4) + logger.info("Cache file successfully refreshed automatically.") + except Exception as e: + logger.error(f"Failed to update cache file automatically: {e}") + + def get_fetch_url(self, jar, server, version): + """ + Constructs the URL for downloading a server JAR file based on the server type. + + Supports two main types of server JAR sources: + - ServerJars API for servers not in PAPERJARS. + - Paper API for servers available through the Paper project. + + Parameters: + jar (str): Name of the JAR file. + server (str): Server software name (e.g., "paper"). + version (str): Server version. + + Returns: + str or None: URL for downloading the JAR file, or None if URL cannot be + constructed or an error occurs. + """ + try: + # Check if the server type is not specifically handled by Paper. + if server not in PAPERJARS: + return f"{self.base_url}/api/fetchJar/{jar}/{server}/{version}" + + # For Paper servers, attempt to get the build for the specified version. + paper_build_info = self.get_paper_build(server, version) + if paper_build_info is None: + # Log an error or handle the case where paper_build_info is None + logger.error( + "Error: Unable to get build information for server:" + f" {server}, version: {version}" ) - for type_ in SERVERJARS_TYPES - } - - data["types"].update(jar_types) - for s in data["types"]: - data["types"].update({s: dict.fromkeys(data["types"].get(s), {})}) - for j in data["types"].get(s): - versions = self._get_jar_details(j, s) - data["types"][s].update({j: versions}) - for item in PAPERJARS: - data["types"]["servers"][item] = self.get_paper_versions(item) - # save our cache - try: - with open(cache_file, "w", encoding="utf-8") as f: - f.write(json.dumps(data, indent=4)) - logger.info("Cache file refreshed") - - except Exception as e: - logger.error(f"Unable to update serverjars.com cache file: {e}") - - def _get_jar_details(self, server_type, jar_type="servers"): - url = f"/api/fetchAll/{jar_type}/{server_type}" - response = self._get_api_result(url) - temp = [] - for v in response: - temp.append(v.get("version")) - time.sleep(0.5) - return temp - - # Disabled temporarily until api.serverjars.com resolve their fetchTypes route - # def _get_server_type_list(self): - # url = "/api/fetchTypes/" - # response = self._get_api_result(url) - # if "bedrock" in response.keys(): - # # remove pocketmine from options - # del response["bedrock"] - # return response + return None + + build = paper_build_info.get("build") + if not build: + # Log an error or handle the case where build is None or not found + logger.error( + f"Error: Build number not found for server:" + f" {server}, version: {version}" + ) + return None + + # Construct and return the URL for downloading the Paper server JAR. + return ( + f"{self.paper_base}/v2/projects/{server}/versions/{version}/" + f"builds/{build}/downloads/{server}-{version}-{build}.jar" + ) + except Exception as e: + logger.error(f"An error occurred while constructing fetch URL: {e}") + return None def download_jar(self, jar, server, version, path, server_id): update_thread = threading.Thread( -- GitLab From 20d5f856020273152a7f00ae17ea515fb0935473 Mon Sep 17 00:00:00 2001 From: Zedifus Date: Sat, 6 Apr 2024 20:20:45 +0100 Subject: [PATCH 3/4] Increase max versions returned --- app/classes/minecraft/serverjars.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/app/classes/minecraft/serverjars.py b/app/classes/minecraft/serverjars.py index 4a9883a9..d0e90024 100644 --- a/app/classes/minecraft/serverjars.py +++ b/app/classes/minecraft/serverjars.py @@ -12,7 +12,7 @@ from app.classes.shared.file_helpers import FileHelpers from app.classes.shared.websocket_manager import WebSocketManager logger = logging.getLogger(__name__) -# Temp type var until sjars restores generic fetchTypes +# Temp type var until sjars restores generic fetchTypes0 SERVERJARS_TYPES = ["modded", "proxies", "servers", "vanilla"] PAPERJARS = ["paper", "folia"] @@ -150,11 +150,19 @@ class ServerJars: type_structure[server_type] = {project: [] for project in projects} return type_structure - def _get_jar_versions(self, server_type, project_name): + def _get_jar_versions(self, server_type, project_name, max_ver=50): """ - Grabs available versions per project + Grabs available versions for specified project + + Args: + server_type (str): Server Type Category (modded, servers, etc) + project_name (str): Target project (paper, forge, magma, etc) + max (int, optional): Max versions returned. Defaults to 50. + + Returns: + list: An array of versions """ - url = f"{self.base_url}/api/fetchAll/{server_type}/{project_name}" + url = f"{self.base_url}/api/fetchAll/{server_type}/{project_name}?max={max_ver}" try: response = requests.get(url, timeout=5) response.raise_for_status() # Ensure HTTP errors are caught -- GitLab From 5d9a3b57ec458cb67a7dce904a740679a642a4b1 Mon Sep 17 00:00:00 2001 From: Zedifus Date: Sat, 6 Apr 2024 21:08:10 +0100 Subject: [PATCH 4/4] Update changelog !744 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0c03e23..b8450118 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## --- [4.3.2] - 2024/TBD ### New features TBD +### Refactor +- Refactor ServerJars caching and move to api.serverjars.com ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/744)) ### Bug fixes - Fix migrator issue when jumping versions ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/734)) - Fix backend issue causing error when restoring backups in 4.3.x ([Merge Request](https://gitlab.com/crafty-controller/crafty-4/-/merge_requests/736)) -- GitLab