]> git.mxchange.org Git - fba.git/blobdiff - fba/commands.py
Continued:
[fba.git] / fba / commands.py
index b000e3dcc980073a2ac4a34ba4b50d6dd876fefb..7886d379a30c87f1c0dd0189560690a64ff9cba5 100644 (file)
@@ -148,8 +148,8 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int:
             if "domain" not in row:
                 logger.warning("row='%s' does not contain element 'domain' - SKIPPED!", row)
                 continue
-            elif row["domain"] == "":
-                logger.debug("row[domain] is empty - SKIPPED!")
+            elif row["domain"] is None or row["domain"] == "":
+                logger.debug("row[domain]='%s' is empty - SKIPPED!", row["domain"])
                 continue
 
             logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
@@ -224,8 +224,8 @@ def fetch_bkali(args: argparse.Namespace) -> int:
             if "domain" not in entry:
                 logger.warning("entry()=%d does not contain 'domain' - SKIPPED!", len(entry))
                 continue
-            elif entry["domain"] == "":
-                logger.debug("entry[domain] is empty - SKIPPED!")
+            elif entry["domain"] is None or entry["domain"] == "":
+                logger.debug("entry[domain]='%s' is empty - SKIPPED!", entry["domain"])
                 continue
             elif not domain_helper.is_wanted(entry["domain"]):
                 logger.debug("entry[domain]='%s' is not wanted - SKIPPED!", entry["domain"])
@@ -365,12 +365,15 @@ def fetch_blocks(args: argparse.Namespace) -> int:
             block["reason"]  = tidyup.reason(block["reason"]) if block["reason"] is not None and block["reason"] != "" else None
             logger.debug("blocked='%s',reason='%s' - AFTER!", block["blocked"], block["reason"])
 
-            if block["blocked"] == "":
-                logger.warning("blocked is empty, blocker='%s'", blocker)
+            if block["blocked"] is None or block["blocked"] == "":
+                logger.warning("block[blocked]='%s' is empty, blocker='%s'", block["blocked"], blocker)
                 continue
             elif block["blocked"].endswith(".onion"):
                 logger.debug("blocked='%s' is a TOR .onion domain - SKIPPED", block["blocked"])
                 continue
+            elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true":
+                logger.debug("blocked='%s' is an I2P .onion domain - SKIPPED", block["blocked"])
+                continue
             elif block["blocked"].endswith(".arpa"):
                 logger.debug("blocked='%s' is a reverse IP address - SKIPPED", block["blocked"])
                 continue
@@ -412,9 +415,9 @@ def fetch_blocks(args: argparse.Namespace) -> int:
                 origin           = row["origin"]
                 nodeinfo_url     = row["nodeinfo_url"]
 
-            logger.debug("Looking up instance by domainm, blocked='%s'", block["blocked"])
-            if block["blocked"] == "":
-                logger.debug("block[blocked] is empty - SKIPPED!")
+            logger.debug("Looking up instance by domain, blocked='%s'", block["blocked"])
+            if block["blocked"] is None or block["blocked"] == "":
+                logger.debug("block[blocked]='%s' is empty - SKIPPED!", block["blocked"])
                 continue
 
             logger.debug("block[blocked]='%s' - BEFORE!", block["blocked"])
@@ -541,6 +544,7 @@ def fetch_observer(args: argparse.Namespace) -> int:
         for item in items:
             logger.debug("item[]='%s'", type(item))
             domain = item.decode_contents()
+            logger.debug("domain[%s]='%s'", type(domain), domain)
             domain = tidyup.domain(domain) if domain not in [None, ""] else None
             logger.debug("domain='%s' - AFTER!", domain)
 
@@ -878,6 +882,7 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
             logger.debug("entry[]='%s'", type(entry))
             doc = bs4.BeautifulSoup(entry.content.value, "html.parser")
             logger.debug("doc[]='%s'", type(doc))
+
             for element in doc.findAll("a"):
                 logger.debug("element[]='%s'", type(element))
                 for href in element["href"].split(","):
@@ -1258,11 +1263,11 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
     locking.acquire()
 
     if isinstance(args.domain, str) and args.domain != "" and domain_helper.is_wanted(args.domain):
-        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND domain = ?", [args.domain])
+        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND domain = ?", [args.domain])
     elif isinstance(args.software, str) and args.software != "" and validators.domain(args.software) == args.software:
-        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND software = ?", [args.software])
+        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND software = ?", [args.software])
     else:
-        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1")
+        database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 OR has_obfuscation IS NULL")
 
     rows = database.cursor.fetchall()
     logger.info("Checking %d domains ...", len(rows))
@@ -1313,15 +1318,18 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
             if block["blocked"] == "":
                 logger.debug("block[blocked] is empty - SKIPPED!")
                 continue
+            elif block["blocked"].endswith(".onion"):
+                logger.debug("blocked='%s' is a TOR onion domain name - SKIPPED!", block["blocked"])
+                continue
+            elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true":
+                logger.debug("blocked='%s' is an I2P onion domain name - SKIPPED!", block["blocked"])
+                continue
             elif block["blocked"].endswith(".arpa"):
                 logger.debug("blocked='%s' is a reversed IP address - SKIPPED!", block["blocked"])
                 continue
             elif block["blocked"].endswith(".tld"):
                 logger.debug("blocked='%s' is a fake domain name - SKIPPED!", block["blocked"])
                 continue
-            elif block["blocked"].endswith(".onion"):
-                logger.debug("blocked='%s' is a TOR onion domain name - SKIPPED!", block["blocked"])
-                continue
             elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
                 logger.debug("block='%s' is obfuscated.", block["blocked"])
                 obfuscated = obfuscated + 1
@@ -1359,6 +1367,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
                     })
 
         logger.debug("Setting obfuscated=%d for row[domain]='%s' ...", obfuscated, row["domain"])
+        instances.set_has_obfuscation(row["domain"], (obfuscated > 0))
         instances.set_obfuscated_blocks(row["domain"], obfuscated)
 
         logger.info("domain='%s' has %d obfuscated domain(s)", row["domain"], obfuscated)
@@ -1545,8 +1554,8 @@ def fetch_instances_social(args: argparse.Namespace) -> int:
     fetched = network.get_json_api(
         source_domain,
         "/api/1.0/instances/list?count=0&sort_by=name",
-        headers,
-        (config.get("connection_timeout"), config.get("read_timeout"))
+        headers=headers,
+        timeout=(config.get("connection_timeout"), config.get("read_timeout"))
     )
     logger.debug("fetched[]='%s'", type(fetched))
 
@@ -1639,8 +1648,8 @@ def fetch_relaylist(args: argparse.Namespace) -> int:
     for row in fetched["json"]:
         logger.debug("row[]='%s'", type(row))
         domain = urlparse(row["url"]).netloc.lower().split(":")[0]
-
         logger.debug("domain='%s' - AFTER!", domain)
+
         if domain is None and domain == "":
             logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
             continue
@@ -1776,6 +1785,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                         logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
                         peers.append(domain)
 
+                    logger.debug("domains()=%d,domain='%s'", len(domains), domain)
                     if dict_helper.has_key(domains, "domain", domain):
                         logger.debug("domain='%s' already added", domain)
                         continue
@@ -1817,6 +1827,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                     logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
                     peers.append(domain)
 
+                logger.debug("domains()=%d,domain='%s'", len(domains), domain)
                 if dict_helper.has_key(domains, "domain", domain):
                     logger.debug("domain='%s' already added", domain)
                     continue
@@ -1840,11 +1851,12 @@ def fetch_relays(args: argparse.Namespace) -> int:
                     logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
                     peers.append(domain)
 
+                logger.debug("domains()=%d,domain='%s'", len(domains), domain)
                 if dict_helper.has_key(domains, "domain", domain):
                     logger.debug("domain='%s' already added", domain)
                     continue
 
-                logger.debug("Appending domain='%s',origin='%s',software='%s'", domain, row["domain"], row["software"])
+                logger.debug("Appending domain='%s',origin='%s',software='%s' ...", domain, row["domain"], row["software"])
                 domains.append({
                     "domain": domain,
                     "origin": row["domain"],