From 176b6a5b76fdb96087f23b88b567bba33e3d3353 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Roland=20H=C3=A4der?= Date: Fri, 25 Aug 2023 03:02:21 +0200 Subject: [PATCH] Continued: - renamed update_data() to update() --- fba/commands.py | 20 ++++++++++---------- fba/helpers/processing.py | 4 ++-- fba/http/federation.py | 6 +++--- fba/models/instances.py | 6 +++--- fba/networks/pleroma.py | 2 +- 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/fba/commands.py b/fba/commands.py index 1e8da89..e21a057 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -444,7 +444,7 @@ def fetch_blocks(args: argparse.Namespace) -> int: logger.debug("Checking if blocker='%s' has pending updates ...", blocker) if instances.has_pending(blocker): logger.debug("Flushing updates for blocker='%s' ...", blocker) - instances.update_data(blocker) + instances.update(blocker) logger.debug("Invoking commit() ...") database.connection.commit() @@ -645,7 +645,7 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int: logger.debug("Checking if blocker='%s' has pending updates ...", blocker) if instances.has_pending(blocker): logger.debug("Flushing updates for blocker='%s' ...", blocker) - instances.update_data(blocker) + instances.update(blocker) logger.debug("Success! - EXIT!") return 0 @@ -752,7 +752,7 @@ def fetch_cs(args: argparse.Namespace): logger.debug("Checking if blocker='%s' has pending updates ...", blocker) if instances.has_pending(blocker): logger.debug("Flushing updates for blocker='%s' ...", blocker) - instances.update_data(blocker) + instances.update(blocker) logger.debug("Success! - EXIT!") return 0 @@ -944,7 +944,7 @@ def fetch_instances(args: argparse.Namespace) -> int: except network.exceptions as exception: logger.warning("Exception '%s' during fetching instances (fetch_instances) from args.domain='%s'", type(exception), args.domain) instances.set_last_error(args.domain, exception) - instances.update_data(args.domain) + instances.update(args.domain) return 100 if args.single: @@ -1115,7 +1115,7 @@ def fetch_oliphant(args: argparse.Namespace) -> int: logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"]) if instances.has_pending(block["blocker"]): logger.debug("Flushing updates for block[blocker]='%s' ...", block["blocker"]) - instances.update_data(block["blocker"]) + instances.update(block["blocker"]) logger.debug("Invoking commit() ...") database.connection.commit() @@ -1498,7 +1498,7 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int: if instances.has_pending(blocker): logger.debug("Flushing updates for blocker='%s' ...", blocker) - instances.update_data(blocker) + instances.update(blocker) logger.debug("Invoking commit() ...") database.connection.commit() @@ -1621,7 +1621,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int: if instances.has_pending(row["domain"]): logger.debug("Flushing updates for blocker='%s' ...", row["domain"]) - instances.update_data(row["domain"]) + instances.update(row["domain"]) logger.debug("Invoking commit() ...") database.connection.commit() @@ -1764,7 +1764,7 @@ def update_nodeinfo(args: argparse.Namespace) -> int: instances.set_last_error(row["domain"], exception) instances.set_last_nodeinfo(row["domain"]) - instances.update_data(row["domain"]) + instances.update(row["domain"]) cnt = cnt + 1 logger.debug("Success! - EXIT!") @@ -1884,7 +1884,7 @@ def fetch_relays(args: argparse.Namespace) -> int: logger.warning("Exception '%s' during fetching from relay '%s': '%s'", type(exception), row["domain"], str(exception)) instances.set_last_error(row["domain"], exception) instances.set_last_instance_fetch(row["domain"]) - instances.update_data(row["domain"]) + instances.update(row["domain"]) continue doc = bs4.BeautifulSoup(raw, features="html.parser") @@ -1991,7 +1991,7 @@ def fetch_relays(args: argparse.Namespace) -> int: instances.set_total_peers(row["domain"], peers) logger.debug("Flushing data for row[domain]='%s'", row["domain"]) - instances.update_data(row["domain"]) + instances.update(row["domain"]) logger.info("Checking %d domains ...", len(domains)) for row in domains: diff --git a/fba/helpers/processing.py b/fba/helpers/processing.py index 3567292..fd7be65 100644 --- a/fba/helpers/processing.py +++ b/fba/helpers/processing.py @@ -45,7 +45,7 @@ def domain(name: str, blocker: str, command: str) -> bool: logger.debug("name='%s' - DEOBFUSCATED!", name) if instances.has_pending(blocker): logger.debug("Flushing updates for blocker='%s' ...", blocker) - instances.update_data(blocker) + instances.update(blocker) if not domain_helper.is_wanted(name): logger.debug("name='%s' is not wanted - SKIPPED!", name) @@ -66,7 +66,7 @@ def domain(name: str, blocker: str, command: str) -> bool: logger.debug("Checking if name='%s' has pending updates ...", name) if instances.has_pending(name): logger.debug("Flushing updates for name='%s' ...", name) - instances.update_data(name) + instances.update(name) logger.debug("processed='%s' - EXIT!", processed) return processed diff --git a/fba/http/federation.py b/fba/http/federation.py index ba6ac67..8440f40 100644 --- a/fba/http/federation.py +++ b/fba/http/federation.py @@ -107,7 +107,7 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) + instances.update(domain) logger.debug("Invoking cookies.clear(%s) ...", domain) cookies.clear(domain) @@ -151,7 +151,7 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("Checking if domain='%s' has pending updates ...", domain) if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) + instances.update(domain) logger.debug("instance='%s',origin='%s',_DEPTH=%d reached!", instance, origin, _DEPTH) if _DEPTH <= config.get("max_crawl_depth") and len(peerlist) >= config.get("min_peers_length"): @@ -167,7 +167,7 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("Checking if domain='%s' has pending updates ...", domain) if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) + instances.update(domain) _DEPTH = _DEPTH - 1 logger.debug("EXIT!") diff --git a/fba/models/instances.py b/fba/models/instances.py index 1bd2884..36191bb 100644 --- a/fba/models/instances.py +++ b/fba/models/instances.py @@ -41,7 +41,7 @@ logger = logging.getLogger(__name__) # Found info from node, such as nodeinfo URL, detection mode that needs to be # written to database. Both arrays must be filled at the same time or else -# update_data() will fail +# update() will fail _pending = { # Detection mode # NULL means all detection methods have failed (maybe still reachable instance) @@ -103,7 +103,7 @@ def has_pending(domain: str) -> bool: logger.debug("has='%s' - EXIT!", has) return has -def update_data(domain: str): +def update(domain: str): logger.debug("domain='%s' - CALLED!", domain) domain_helper.raise_on(domain) if not has_pending(domain): @@ -217,7 +217,7 @@ def add(domain: str, origin: str, command: str, path: str = None, software: str logger.debug("Checking if domain='%s' has pending updates ...", domain) if has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - update_data(domain) + update(domain) logger.debug("EXIT!") diff --git a/fba/networks/pleroma.py b/fba/networks/pleroma.py index bd0ae04..2a447a4 100644 --- a/fba/networks/pleroma.py +++ b/fba/networks/pleroma.py @@ -68,7 +68,7 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list: if "error_message" in rows: logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s'", rows["error_message"], domain, nodeinfo_url) instances.set_last_error(domain, rows) - instances.update_data(domain) + instances.update(domain) logger.debug("Returning empty list ... - EXIT!") return list() -- 2.39.5