]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Sat, 7 Oct 2023 12:42:50 +0000 (14:42 +0200)
committerRoland Häder <roland@mxchange.org>
Sat, 7 Oct 2023 12:42:50 +0000 (14:42 +0200)
- moved fba/deprecated.py to deprecated/commands.py
- fixed many issues pylint has reported

deprecated/commands.py [new file with mode: 0644]
fba/commands.py
fba/deprecated.py [deleted file]
fba/helpers/processing.py
fba/http/federation.py
fba/http/network.py
fba/models/instances.py
fba/networks/mastodon.py
fba/networks/misskey.py

diff --git a/deprecated/commands.py b/deprecated/commands.py
new file mode 100644 (file)
index 0000000..d79d36f
--- /dev/null
@@ -0,0 +1,194 @@
+# Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
+# Copyright (C) 2023 Free Software Foundation
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+def fetch_joinfediverse(args: argparse.Namespace) -> int:
+    logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    source_domain = "joinfediverse.wiki"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 1
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
+    raw = utils.fetch_url(
+        f"https://{source_domain}/FediBlock",
+        network.web_headers,
+        (config.get("connection_timeout"), config.get("read_timeout"))
+    ).text
+    logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
+
+    doc = bs4.BeautifulSoup(raw, "html.parser")
+    logger.debug("doc[]='%s'", type(doc))
+
+    tables = doc.findAll("table", {"class": "wikitable"})
+
+    logger.info("Analyzing %d table(s) ...", len(tables))
+    blocklist = list()
+    for table in tables:
+        logger.debug("table[]='%s'", type(table))
+
+        rows = table.findAll("tr")
+        logger.info("Checking %d row(s) ...", len(rows))
+        block_headers = dict()
+        for row in rows:
+            logger.debug("row[%s]='%s'", type(row), row)
+
+            headers = row.findAll("th")
+            logger.debug("Found headers()=%d header(s)", len(headers))
+            if len(headers) > 1:
+                block_headers = dict()
+                cnt = 0
+                for header in headers:
+                    cnt = cnt + 1
+                    logger.debug("header[]='%s',cnt=%d", type(header), cnt)
+                    text = header.contents[0]
+
+                    logger.debug("text[]='%s'", type(text))
+                    if not isinstance(text, str):
+                        logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
+                        continue
+                    elif validators.domain(text.strip()):
+                        logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
+                        continue
+
+                    text = tidyup.domain(text.strip())
+                    logger.debug("text='%s' - AFTER!", text)
+                    if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
+                        logger.debug("Found header: '%s'=%d", text, cnt)
+                        block_headers[cnt] = text
+
+            elif len(block_headers) == 0:
+                logger.debug("row is not scrapable - SKIPPED!")
+                continue
+            elif len(block_headers) > 0:
+                logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
+                cnt = 0
+                block = dict()
+
+                for element in row.find_all(["th", "td"]):
+                    cnt = cnt + 1
+                    logger.debug("element[]='%s',cnt=%d", type(element), cnt)
+                    if cnt in block_headers:
+                        logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
+
+                        text = element.text.strip()
+                        key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
+
+                        logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
+                        if key in ["domain", "instance"]:
+                            block[key] = text
+                        elif key == "reason":
+                            block[key] = tidyup.reason(text)
+                        elif key == "subdomain(s)":
+                            block[key] = list()
+                            if text != "":
+                                block[key] = text.split("/")
+                        else:
+                            logger.debug("key='%s'", key)
+                            block[key] = text
+
+                logger.debug("block()=%d ...", len(block))
+                if len(block) > 0:
+                    logger.debug("Appending block()=%d ...", len(block))
+                    blocklist.append(block)
+
+    logger.debug("blocklist()=%d", len(blocklist))
+
+    database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
+    domains = database.cursor.fetchall()
+
+    logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
+    blocking = list()
+    for block in blocklist:
+        logger.debug("block='%s'", block)
+        if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
+            origin = block["blocked"]
+            logger.debug("origin='%s'", origin)
+            for subdomain in block["subdomain(s)"]:
+                block["blocked"] = subdomain + "." + origin
+                logger.debug("block[blocked]='%s'", block["blocked"])
+                blocking.append(block)
+        else:
+            blocking.append(block)
+
+    logger.debug("blocking()=%d", blocking)
+    for block in blocking:
+        logger.debug("block[]='%s'", type(block))
+        if "blocked" not in block:
+            raise KeyError(f"block()={len(block)} does not have element 'blocked'")
+
+        block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
+        logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
+        if block["blocked"] == "":
+            logger.debug("block[blocked] is empty - SKIPPED!")
+            continue
+        elif not domain_helper.is_wanted(block["blocked"]):
+            logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
+            continue
+        elif instances.is_recent(block["blocked"]):
+            logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
+            continue
+
+        logger.debug("Proccessing blocked='%s' ...", block["blocked"])
+        processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
+
+    blockdict = list()
+    for blocker in domains:
+        blocker = blocker[0]
+        logger.debug("blocker[%s]='%s'", type(blocker), blocker)
+        instances.set_last_blocked(blocker)
+
+        for block in blocking:
+            logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
+            block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
+
+            logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
+            if block["blocked"] == "":
+                logger.debug("block[blocked] is empty - SKIPPED!")
+                continue
+            elif not domain_helper.is_wanted(block["blocked"]):
+                logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+                continue
+
+            logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
+            if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
+                logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
+                blockdict.append({
+                    "blocked": block["blocked"],
+                    "reason" : block["reason"],
+                })
+
+        if instances.has_pending(blocker):
+            logger.debug("Flushing updates for blocker='%s' ...", blocker)
+            instances.update(blocker)
+
+        logger.debug("Invoking commit() ...")
+        database.connection.commit()
+
+        logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
+        if config.get("bot_enabled") and len(blockdict) > 0:
+            logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
+            network.send_bot_post(blocker, blockdict)
+
+    logger.debug("Success! - EXIT!")
+    return 0
index 5589af7198a8d2629a5708581f7e37dba3ef125e..2142bccddaffb4b784cc880bcef0e6e47c9f42dc 100644 (file)
@@ -383,7 +383,7 @@ def fetch_blocks(args: argparse.Namespace) -> int:
                 obfuscated = obfuscated + 1
 
                 # Some friendica servers also obscure domains without hash
-                row = instances.deobfuscate("*", block["blocked"], block["hash"] if "hash" in block else None)
+                row = instances.deobfuscate("*", block["blocked"], block["digest"] if "digest" in block else None)
 
                 logger.debug("row[]='%s'", type(row))
                 if row is None:
@@ -400,7 +400,7 @@ def fetch_blocks(args: argparse.Namespace) -> int:
                 obfuscated = obfuscated + 1
 
                 # Some obscure them with question marks, not sure if that's dependent on version or not
-                row = instances.deobfuscate("?", block["blocked"], block["hash"] if "hash" in block else None)
+                row = instances.deobfuscate("?", block["blocked"], block["digest"] if "digest" in block else None)
 
                 logger.debug("row[]='%s'", type(row))
                 if row is None:
@@ -541,7 +541,7 @@ def fetch_observer(args: argparse.Namespace) -> int:
         for item in items:
             logger.debug("item[]='%s'", type(item))
             domain = item.decode_contents()
-            domain = tidyup.domain(domain) if domain != None and domain != "" else None
+            domain = tidyup.domain(domain) if domain not in [None, ""] else None
             logger.debug("domain='%s' - AFTER!", domain)
 
             if domain is None or domain == "":
@@ -799,7 +799,7 @@ def fetch_fba_rss(args: argparse.Namespace) -> int:
         for item in rss.items:
             logger.debug("item[%s]='%s'", type(item), item)
             domain = item.link.split("=")[1]
-            domain = tidyup.domain(domain) if domain != None and domain != "" else None
+            domain = tidyup.domain(domain) if domain not in[None, ""] else None
 
             logger.debug("domain='%s' - AFTER!", domain)
             if domain is None or domain == "":
@@ -883,7 +883,7 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
                 logger.debug("element[]='%s'", type(element))
                 for href in element["href"].split(","):
                     logger.debug("href[%s]='%s' - BEFORE!", type(href), href)
-                    domain = tidyup.domain(href) if href != None and href != "" else None
+                    domain = tidyup.domain(href) if href not in [None, ""] else None
 
                     logger.debug("domain='%s' - AFTER!", domain)
                     if domain is None or domain == "":
@@ -1078,7 +1078,7 @@ def fetch_txt(args: argparse.Namespace) -> int:
             logger.info("Processing %d domains ...", len(domains))
             for domain in domains:
                 logger.debug("domain='%s' - BEFORE!", domain)
-                domain = tidyup.domain(domain) if domain != None and domain != "" else None
+                domain = tidyup.domain(domain) if domain not in[None, ""] else None
 
                 logger.debug("domain='%s' - AFTER!", domain)
                 if domain is None or domain == "":
@@ -1134,7 +1134,7 @@ def fetch_fedipact(args: argparse.Namespace) -> int:
         logger.info("Checking %d row(s) ...", len(rows))
         for row in rows:
             logger.debug("row[]='%s'", type(row))
-            domain = tidyup.domain(row.contents[0]) if row.contents[0] != None and row.contents[0] != "" else None
+            domain = tidyup.domain(row.contents[0]) if row.contents[0] not in [None, ""] else None
 
             logger.debug("domain='%s' - AFTER!", domain)
             if domain is None or domain == "":
@@ -1331,7 +1331,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
             elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
                 logger.debug("block='%s' is obfuscated.", block["blocked"])
                 obfuscated = obfuscated + 1
-                blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+                blocked = utils.deobfuscate(block["blocked"], row["domain"], block["digest"] if "digest" in block else None)
             elif not domain_helper.is_wanted(block["blocked"]):
                 logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
                 continue
@@ -1432,7 +1432,7 @@ def fetch_fedilist(args: argparse.Namespace) -> int:
             continue
 
         logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
-        domain = tidyup.domain(row["hostname"]) if row["hostname"] != None and row["hostname"] != "" else None
+        domain = tidyup.domain(row["hostname"]) if row["hostname"] not in [None, ""] else None
         logger.debug("domain='%s' - AFTER!", domain)
 
         if domain is None or domain == "":
@@ -1572,7 +1572,7 @@ def fetch_instances_social(args: argparse.Namespace) -> int:
     logger.info("Checking %d row(s) ...", len(rows))
     for row in rows:
         logger.debug("row[]='%s'", type(row))
-        domain = tidyup.domain(row["name"]) if row["name"] != None and row["name"] != "" else None
+        domain = tidyup.domain(row["name"]) if row["name"] not in [None, ""] else None
         logger.debug("domain='%s' - AFTER!", domain)
 
         if domain is None and domain == "":
@@ -1644,7 +1644,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                     instances.set_last_instance_fetch(row["domain"])
                     instances.update(row["domain"])
                     continue
-                elif not "json" in raw:
+                elif "json" not in raw:
                     logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
                     continue
                 elif not "metadata" in raw["json"]:
@@ -1700,7 +1700,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                         continue
 
                     logger.debug("domain='%s' - BEFORE!", domain)
-                    domain = tidyup.domain(domain) if domain != None and domain != "" else None
+                    domain = tidyup.domain(domain) if domain not in[None, ""] else None
                     logger.debug("domain='%s' - AFTER!", domain)
 
                     if domain is None or domain == "":
@@ -1741,7 +1741,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                 domain = components.netloc.lower().split(":")[0]
 
                 logger.debug("domain='%s' - BEFORE!", domain)
-                domain = tidyup.domain(domain) if domain != None and domain != "" else None
+                domain = tidyup.domain(domain) if domain not in[None, ""] else None
                 logger.debug("domain='%s' - AFTER!", domain)
 
                 if domain is None or domain == "":
@@ -1764,7 +1764,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
             logger.debug("Checking %d peer(s) row[domain]='%s' ...", len(raw["json"]["metadata"]["peers"]), row["domain"])
             for domain in raw["json"]["metadata"]["peers"]:
                 logger.debug("domain='%s' - BEFORE!", domain)
-                domain = tidyup.domain(domain) if domain != None and domain != "" else None
+                domain = tidyup.domain(domain) if domain not in[None, ""] else None
                 logger.debug("domain='%s' - AFTER!", domain)
 
                 if domain is None or domain == "":
diff --git a/fba/deprecated.py b/fba/deprecated.py
deleted file mode 100644 (file)
index d79d36f..0000000
+++ /dev/null
@@ -1,194 +0,0 @@
-# Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
-# Copyright (C) 2023 Free Software Foundation
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program.  If not, see <https://www.gnu.org/licenses/>.
-
-def fetch_joinfediverse(args: argparse.Namespace) -> int:
-    logger.debug("args[]='%s' - CALLED!", type(args))
-
-    logger.debug("Invoking locking.acquire() ...")
-    locking.acquire()
-
-    source_domain = "joinfediverse.wiki"
-    if sources.is_recent(source_domain):
-        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
-        return 1
-    else:
-        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
-        sources.update(source_domain)
-
-    logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
-    raw = utils.fetch_url(
-        f"https://{source_domain}/FediBlock",
-        network.web_headers,
-        (config.get("connection_timeout"), config.get("read_timeout"))
-    ).text
-    logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
-
-    doc = bs4.BeautifulSoup(raw, "html.parser")
-    logger.debug("doc[]='%s'", type(doc))
-
-    tables = doc.findAll("table", {"class": "wikitable"})
-
-    logger.info("Analyzing %d table(s) ...", len(tables))
-    blocklist = list()
-    for table in tables:
-        logger.debug("table[]='%s'", type(table))
-
-        rows = table.findAll("tr")
-        logger.info("Checking %d row(s) ...", len(rows))
-        block_headers = dict()
-        for row in rows:
-            logger.debug("row[%s]='%s'", type(row), row)
-
-            headers = row.findAll("th")
-            logger.debug("Found headers()=%d header(s)", len(headers))
-            if len(headers) > 1:
-                block_headers = dict()
-                cnt = 0
-                for header in headers:
-                    cnt = cnt + 1
-                    logger.debug("header[]='%s',cnt=%d", type(header), cnt)
-                    text = header.contents[0]
-
-                    logger.debug("text[]='%s'", type(text))
-                    if not isinstance(text, str):
-                        logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
-                        continue
-                    elif validators.domain(text.strip()):
-                        logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
-                        continue
-
-                    text = tidyup.domain(text.strip())
-                    logger.debug("text='%s' - AFTER!", text)
-                    if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
-                        logger.debug("Found header: '%s'=%d", text, cnt)
-                        block_headers[cnt] = text
-
-            elif len(block_headers) == 0:
-                logger.debug("row is not scrapable - SKIPPED!")
-                continue
-            elif len(block_headers) > 0:
-                logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
-                cnt = 0
-                block = dict()
-
-                for element in row.find_all(["th", "td"]):
-                    cnt = cnt + 1
-                    logger.debug("element[]='%s',cnt=%d", type(element), cnt)
-                    if cnt in block_headers:
-                        logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
-
-                        text = element.text.strip()
-                        key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
-
-                        logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
-                        if key in ["domain", "instance"]:
-                            block[key] = text
-                        elif key == "reason":
-                            block[key] = tidyup.reason(text)
-                        elif key == "subdomain(s)":
-                            block[key] = list()
-                            if text != "":
-                                block[key] = text.split("/")
-                        else:
-                            logger.debug("key='%s'", key)
-                            block[key] = text
-
-                logger.debug("block()=%d ...", len(block))
-                if len(block) > 0:
-                    logger.debug("Appending block()=%d ...", len(block))
-                    blocklist.append(block)
-
-    logger.debug("blocklist()=%d", len(blocklist))
-
-    database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
-    domains = database.cursor.fetchall()
-
-    logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
-    blocking = list()
-    for block in blocklist:
-        logger.debug("block='%s'", block)
-        if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
-            origin = block["blocked"]
-            logger.debug("origin='%s'", origin)
-            for subdomain in block["subdomain(s)"]:
-                block["blocked"] = subdomain + "." + origin
-                logger.debug("block[blocked]='%s'", block["blocked"])
-                blocking.append(block)
-        else:
-            blocking.append(block)
-
-    logger.debug("blocking()=%d", blocking)
-    for block in blocking:
-        logger.debug("block[]='%s'", type(block))
-        if "blocked" not in block:
-            raise KeyError(f"block()={len(block)} does not have element 'blocked'")
-
-        block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
-        logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
-
-        if block["blocked"] == "":
-            logger.debug("block[blocked] is empty - SKIPPED!")
-            continue
-        elif not domain_helper.is_wanted(block["blocked"]):
-            logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
-            continue
-        elif instances.is_recent(block["blocked"]):
-            logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
-            continue
-
-        logger.debug("Proccessing blocked='%s' ...", block["blocked"])
-        processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
-
-    blockdict = list()
-    for blocker in domains:
-        blocker = blocker[0]
-        logger.debug("blocker[%s]='%s'", type(blocker), blocker)
-        instances.set_last_blocked(blocker)
-
-        for block in blocking:
-            logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
-            block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
-
-            logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
-            if block["blocked"] == "":
-                logger.debug("block[blocked] is empty - SKIPPED!")
-                continue
-            elif not domain_helper.is_wanted(block["blocked"]):
-                logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
-                continue
-
-            logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
-            if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
-                logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
-                blockdict.append({
-                    "blocked": block["blocked"],
-                    "reason" : block["reason"],
-                })
-
-        if instances.has_pending(blocker):
-            logger.debug("Flushing updates for blocker='%s' ...", blocker)
-            instances.update(blocker)
-
-        logger.debug("Invoking commit() ...")
-        database.connection.commit()
-
-        logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
-        if config.get("bot_enabled") and len(blockdict) > 0:
-            logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
-            network.send_bot_post(blocker, blockdict)
-
-    logger.debug("Success! - EXIT!")
-    return 0
index 0face7f3317b19b2a178cad29c81a59a81981bfe..d70280781502b53d135ab69a22425df92837aa85 100644 (file)
@@ -147,11 +147,11 @@ def csv_block(blocker: str, url: str, command: str):
         reject_media = reject_reports = False
 
         if "#domain" in row:
-            domain = tidyup.domain(row["#domain"]) if row["#domain"] != None and row["#domain"] != "" else None
+            domain = tidyup.domain(row["#domain"]) if row["#domain"] not in [None, ""] else None
         elif "domain" in row:
-            domain = tidyup.domain(row["domain"]) if row["domain"] != None and row["domain"] != "" else None
+            domain = tidyup.domain(row["domain"]) if row["domain"] not in [None, ""] else None
         elif "Domain" in row:
-            domain = tidyup.domain(row["Domain"]) if row["Domain"] != None and row["Domain"] != "" else None
+            domain = tidyup.domain(row["Domain"]) if row["Domain"] not in [None, ""] else None
         else:
             logger.warning("row='%s' does not contain domain column - SKIPPED!", row)
             continue
@@ -165,11 +165,11 @@ def csv_block(blocker: str, url: str, command: str):
             severity = "reject"
 
         if "reason" in row:
-            reason = tidyup.reason(row["reason"]) if row["reason"] != None and row["reason"] != "" else None
+            reason = tidyup.reason(row["reason"]) if row["reason"] not in [None, ""] else None
         elif "comment" in row:
-            reason = tidyup.reason(row["comment"]) if row["comment"] != None and row["comment"] != "" else None
+            reason = tidyup.reason(row["comment"]) if row["comment"] not in [None, ""] else None
         elif "Comment" in row:
-            reason = tidyup.reason(row["Comment"]) if row["Comment"] != None and row["Comment"] != "" else None
+            reason = tidyup.reason(row["Comment"]) if row["Comment"] not in [None, ""] else None
         else:
             logger.debug("row='%s' has no reason/comment key provided", row)
 
index c18d5430deca9166494dbbb00f3aef9d50ec4338..46c1da60d70966f45592c250fe21026e0c883d6c 100644 (file)
@@ -87,7 +87,7 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path:
         logger.debug("software='%s'", software)
         if software_helper.is_relay(software):
             logger.debug("software='%s' is a relay software - EXIT!", software)
-            return list()
+            return
 
     logger.debug("Updating last_instance_fetch for domain='%s' ...", domain)
     instances.set_last_instance_fetch(domain)
@@ -618,7 +618,7 @@ def fetch_blocks(domain: str) -> list:
                 blocklist.append({
                     "blocker"    : domain,
                     "blocked"    : block["domain"],
-                    "hash"       : block["digest"] if "digest" in block else None,
+                    "digest"     : block["digest"] if "digest" in block else None,
                     "reason"     : reason,
                     "block_level": blocks.alias_block_level(block["severity"]),
                 })
index 982a1b8ce103da62d650ac3ade56b2d7ea3b4dda..3b5542ee3a9da47c4fd45c9b119ee0b00ee6f2c2 100644 (file)
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 import logging
+import time
 
 import reqto
 import requests
-import time
 import urllib3
 
 from fba import utils
index 4a10f8bb669377db59185e761af41c06d7e93021..f8e28b3f1608dade747fc687c73a4de8844966d1 100644 (file)
@@ -410,9 +410,9 @@ def set_last_response_time(domain: str, response_time: float):
     domain_helper.raise_on(domain)
 
     if not isinstance(response_time, float):
-        raise ValueException(f"response_time[]='{type(response_time)}' is not of type 'float'")
+        raise ValueError(f"response_time[]='{type(response_time)}' is not of type 'float'")
     elif response_time < 0:
-        raise ValueException(f"response_time={response_time} is below zero")
+        raise ValueError(f"response_time={response_time} is below zero")
 
     # Set timestamp
     _set_data("last_response_time", domain, response_time)
index 88fd35fdd8648ab0fd5f4a96cd10ded3de40d89d..85cd5c714c0c26e479758529ce4f9b18ee7ec381 100644 (file)
@@ -23,7 +23,6 @@ from fba.helpers import config
 from fba.helpers import domain as domain_helper
 from fba.helpers import tidyup
 
-from fba.http import federation
 from fba.http import network
 
 from fba.models import blocks
@@ -116,7 +115,7 @@ def fetch_blocks_from_about(domain: str) -> dict:
             # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
             for line in header.find_all_next("table")[0].find_all("tr")[1:]:
                 domain = line.find("span").text
-                hash   = line.find("span")["title"][9:]
+                digest = line.find("span")["title"][9:]
                 reason = line.find_all("td")[1].text
 
                 logger.debug("domain='%s',reason='%s' - BEFORE!", domain, reason)
@@ -128,10 +127,10 @@ def fetch_blocks_from_about(domain: str) -> dict:
                     logger.warning("domain='%s' is empty,line='%s' - SKIPPED!", domain, line)
                     continue
 
-                logger.debug("Appending domain='%s',hash='%s',reason='%s' to blocklist header_text='%s' ...", domain, hash, reason, blocklist)
+                logger.debug("Appending domain='%s',digest='%s',reason='%s' to blocklist header_text='%s' ...", domain, digest, reason, blocklist)
                 blocklist[header_text].append({
                     "domain": domain,
-                    "hash"  : hash,
+                    "digest": digest,
                     "reason": reason,
                 })
         else:
@@ -188,7 +187,7 @@ def fetch_blocks(domain: str) -> list:
             blocklist.append({
                 "blocker"    : domain,
                 "blocked"    : block["domain"],
-                "hash"       : block["digest"] if "digest" in block else None,
+                "digest"     : block["digest"] if "digest" in block else None,
                 "reason"     : reason,
                 "block_level": blocks.alias_block_level(block["severity"]),
             })
index 7217b860db2c6f6b50c2310487374bc8ad351168..b57c252711c7e02fe55e1d1f15301076449ea351 100644 (file)
@@ -203,8 +203,8 @@ def fetch_blocks(domain: str) -> list:
                     logger.warning("instance(%d)='%s' has no key 'host' - SKIPPED!", len(instance), instance)
                     continue
                 elif instance["host"] is None or instance["host"] == "":
-                   logger.debug("instance[host]='%s' is None or empty - SKIPPED!", instance["host"])
-                   continue
+                    logger.debug("instance[host]='%s' is None or empty - SKIPPED!", instance["host"])
+                    continue
 
                 logger.debug("instance[host]='%s' - BEFORE!", instance["host"])
                 blocked = tidyup.domain(instance["host"])