From 27ce6a581c57c698b0e9424fe05ea03ec9ec022c Mon Sep 17 00:00:00 2001 From: =?utf8?q?Roland=20H=C3=A4der?= Date: Thu, 22 Jun 2023 21:25:50 +0200 Subject: [PATCH] Continued: - these fetch_blocks() must return a list() --- fba/commands.py | 2 +- fba/networks/friendica.py | 4 ++-- fba/networks/lemmy.py | 2 +- fba/networks/mastodon.py | 2 +- fba/networks/misskey.py | 2 +- fba/networks/peertube.py | 2 +- fba/networks/pleroma.py | 8 ++++---- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/fba/commands.py b/fba/commands.py index e62970d..83934f3 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -209,7 +209,7 @@ def fetch_blocks(args: argparse.Namespace): else: logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software) - logger.info("Checking %s entries from blocker='%s',software='%s' ...", len(blocking), blocker, software) + logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software) for block in blocking: logger.debug("blocked='%s',block_level='%s',reason='%s'", block['blocked'], block['block_level'], block['reason']) diff --git a/fba/networks/friendica.py b/fba/networks/friendica.py index b8fcdf3..83ab193 100644 --- a/fba/networks/friendica.py +++ b/fba/networks/friendica.py @@ -56,12 +56,12 @@ def fetch_blocks(domain: str) -> list: except network.exceptions as exception: logger.warning("Exception '%s' during fetching instances from domain='%s'", type(exception), domain) instances.set_last_error(domain, exception) - return blocklist + return list() # Prevents exceptions: if block_tag is None: logger.debug("Instance has no block list: domain='%s'", domain) - return blocklist + return list() table = block_tag.find("table") diff --git a/fba/networks/lemmy.py b/fba/networks/lemmy.py index a48b3c0..b4e4ea1 100644 --- a/fba/networks/lemmy.py +++ b/fba/networks/lemmy.py @@ -51,7 +51,7 @@ def fetch_peers(domain: str) -> list: except network.exceptions as exception: logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) instances.set_last_error(domain, exception) - return peers + return list() try: logger.debug("Fetching '/api/v3/site' from domain='%s' ...", domain) diff --git a/fba/networks/mastodon.py b/fba/networks/mastodon.py index 47f19c0..b705ea6 100644 --- a/fba/networks/mastodon.py +++ b/fba/networks/mastodon.py @@ -101,7 +101,7 @@ def fetch_blocks_from_about(domain: str) -> dict: logger.debug("doc[]='%s'", type(doc)) if doc is None: logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain) - return blocklist + return list() for header in doc.find_all("h3"): header_text = tidyup.reason(header.text) diff --git a/fba/networks/misskey.py b/fba/networks/misskey.py index 85bfc46..58612ae 100644 --- a/fba/networks/misskey.py +++ b/fba/networks/misskey.py @@ -50,7 +50,7 @@ def fetch_peers(domain: str) -> list: except network.exceptions as exception: logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) instances.set_last_error(domain, exception) - return peers + return list() # iterating through all "suspended" (follow-only in its terminology) # instances page-by-page, since that troonware doesn't support diff --git a/fba/networks/peertube.py b/fba/networks/peertube.py index d89e7dd..67b59b7 100644 --- a/fba/networks/peertube.py +++ b/fba/networks/peertube.py @@ -45,7 +45,7 @@ def fetch_peers(domain: str) -> list: except network.exceptions as exception: logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) instances.set_last_error(domain, exception) - return peers + return list() for mode in ["followers", "following"]: logger.debug(f"domain='{domain}',mode='{mode}'") diff --git a/fba/networks/pleroma.py b/fba/networks/pleroma.py index a9d8adb..8bc2883 100644 --- a/fba/networks/pleroma.py +++ b/fba/networks/pleroma.py @@ -62,13 +62,13 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list: if rows is None: logger.warning("Could not fetch nodeinfo from domain='%s'", domain) - return + return list() elif "metadata" not in rows: logger.warning("rows()=%d does not have key 'metadata', domain='%s'", len(rows), domain) - return + return list() elif "federation" not in rows["metadata"]: logger.warning("rows()=%d does not have key 'federation', domain='%s'", len(rows['metadata']), domain) - return + return list() data = rows["metadata"]["federation"] found = False @@ -484,7 +484,7 @@ def fetch_blocks_from_about(domain: str) -> dict: logger.debug("doc[]='%s'", type(doc)) if doc is None: logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain) - return blocklist + return list() for header in doc.find_all("h2"): header_text = tidyup.reason(header.text) -- 2.39.5