From: Roland Häder Date: Wed, 6 Sep 2023 08:58:23 +0000 (+0200) Subject: Continued: X-Git-Url: https://git.mxchange.org/?a=commitdiff_plain;h=c9d13a7ea6337b861a893e041c423f2f14be10f1;p=fba.git Continued: - added last_response_time which is a float that stores the last response time - you have to run following SQL statement on your blocks.db: ALTER TABLE instances ADD last_response_time FLOAT NULL DEFAULT NULL; --- diff --git a/blocks_empty.db b/blocks_empty.db index 56402d5..e6c495f 100644 Binary files a/blocks_empty.db and b/blocks_empty.db differ diff --git a/daemon.py b/daemon.py index bfa0e41..b16e9e9 100755 --- a/daemon.py +++ b/daemon.py @@ -438,7 +438,7 @@ def infos(request: Request, domain: str): tformat = config.get("timestamp_format") instance = dict() for key in domain_data.keys(): - if key in ["last_nodeinfo", "last_blocked", "first_seen", "last_updated", "last_instance_fetch"] and isinstance(domain_data[key], float): + if key in ["last_nodeinfo", "last_blocked", "first_seen", "last_updated", "last_instance_fetch", "last_response_time"] and isinstance(domain_data[key], float): # Timestamps instance[key] = datetime.utcfromtimestamp(domain_data[key]).strftime(tformat) else: diff --git a/fba/commands.py b/fba/commands.py index b7c967e..cb3200c 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -290,18 +290,18 @@ def fetch_blocks(args: argparse.Namespace) -> int: # Re-check single software logger.debug("Querying database for args.software='%s' ...", args.software) database.cursor.execute( - "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC", [args.software] + "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC", [args.software] ) elif args.force: # Re-check all logger.debug("Re-checking all instances ...") database.cursor.execute( - "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC" + "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC" ) else: # Re-check after "timeout" (aka. minimum interval) database.cursor.execute( - "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND (last_blocked IS NULL OR last_blocked < ?) AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC", [time.time() - config.get("recheck_block")] + "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND (last_blocked IS NULL OR last_blocked < ?) AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_block")] ) rows = database.cursor.fetchall() @@ -960,7 +960,7 @@ def fetch_instances(args: argparse.Namespace) -> int: # Loop through some instances database.cursor.execute( - "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_updated ASC", [time.time() - config.get("recheck_instance")] + "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_instance")] ) rows = database.cursor.fetchall() diff --git a/fba/http/csrf.py b/fba/http/csrf.py index eadbc0e..f629cb8 100644 --- a/fba/http/csrf.py +++ b/fba/http/csrf.py @@ -43,8 +43,9 @@ def determine(domain: str, headers: dict) -> dict: # Fetch / to check for meta tag indicating csrf logger.debug("Fetching / from domain='%s' for CSRF check ...", domain) - response = reqto.get( - f"https://{domain}/", + response = network.fetch_response( + domain, + "/", headers=network.web_headers, timeout=(config.get("connection_timeout"), config.get("read_timeout")) ) diff --git a/fba/http/network.py b/fba/http/network.py index 2f307fe..982a1b8 100644 --- a/fba/http/network.py +++ b/fba/http/network.py @@ -18,6 +18,7 @@ import logging import reqto import requests +import time import urllib3 from fba import utils @@ -75,6 +76,7 @@ def post_json_api(domain: str, path: str, data: str = "", headers: dict = dict() try: logger.debug("Sending POST to domain='%s',path='%s',data='%s',headers(%d)='%s'", domain, path, data, len(headers), headers) + start = time.perf_counter() response = reqto.post( f"https://{domain}{path}", data=data, @@ -83,8 +85,12 @@ def post_json_api(domain: str, path: str, data: str = "", headers: dict = dict() cookies=cookies.get_all(domain), allow_redirects=False ) + response_time = time.perf_counter() - start + logger.debug("response_time=%s", response_time) - logger.debug("response.ok='%s',response.status_code=%d,response.reason='%s'", response.ok, response.status_code, response.reason) + instances.set_last_response_time(domain, response_time) + + logger.debug("response.ok='%s',response.status_code=%d,response.reason='%s',response_time=%s", response.ok, response.status_code, response.reason, response_time) if response.ok and response.status_code == 200: logger.debug("Parsing JSON response from domain='%s',path='%s' ...", domain, path) json_reply["json"] = json_helper.from_response(response) @@ -253,6 +259,7 @@ def fetch_response(domain: str, path: str, headers: dict, timeout: tuple, allow_ try: logger.debug("Sending GET request to '%s%s' ...", domain, path) + start = time.perf_counter() response = reqto.get( f"https://{domain}{path}", headers=headers, @@ -260,6 +267,12 @@ def fetch_response(domain: str, path: str, headers: dict, timeout: tuple, allow_ cookies=cookies.get_all(domain), allow_redirects=allow_redirects ) + response_time = time.perf_counter() - start + logger.debug("response_time=%s", response_time) + + instances.set_last_response_time(domain, response_time) + + logger.debug("response.ok='%s',response.status_code=%d,response.reason='%s',response_time=%s", response.ok, response.status_code, response.reason, response_time) except exceptions as exception: logger.debug("Fetching path='%s' from domain='%s' failed. exception[%s]='%s'", path, domain, type(exception), str(exception)) diff --git a/fba/models/instances.py b/fba/models/instances.py index fe72a66..52d88f3 100644 --- a/fba/models/instances.py +++ b/fba/models/instances.py @@ -62,6 +62,8 @@ _pending = { "last_blocked" : {}, # Last nodeinfo (fetched) "last_nodeinfo" : {}, + # Last response time + "last_response_time" : {}, # Last status code "last_status_code" : {}, # Last error details @@ -398,6 +400,19 @@ def set_last_instance_fetch(domain: str): _set_data("last_instance_fetch", domain, time.time()) logger.debug("EXIT!") +def set_last_response_time(domain: str, response_time: float): + logger.debug("domain='%s',response_time=%d - CALLED!", domain, response_time) + domain_helper.raise_on(domain) + + if not isinstance(response_time, float): + raise ValueException(f"response_time[]='{type(response_time)}' is not of type 'float'") + elif response_time < 0: + raise ValueException(f"response_time={response_time} is below zero") + + # Set timestamp + _set_data("last_response_time", domain, response_time) + logger.debug("EXIT!") + def set_total_peers(domain: str, peers: list): logger.debug("domain='%s',peers()=%d - CALLED!", domain, len(peers)) domain_helper.raise_on(domain)