]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Sat, 24 Jun 2023 07:24:01 +0000 (09:24 +0200)
committerRoland Häder <roland@mxchange.org>
Sat, 24 Jun 2023 07:24:01 +0000 (09:24 +0200)
- multi-fetch for fetch_txt
- always write pending updates after a domain has been worked on

fba/commands.py
fba/http/federation.py
fba/networks/friendica.py
fba/utils.py

index 0c88e21cbcfbd11f6bffc35514b9776293500430..5ba69abc1afdd0ae671cde9c91a6c4bdb660e022 100644 (file)
@@ -370,7 +370,7 @@ def fetch_blocks(args: argparse.Namespace) -> int:
 def fetch_observer(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
     types = [
-        "akoma",
+        "akkoma",
         "birdsitelive",
         "bookwyrm",
         "calckey",
@@ -825,14 +825,15 @@ def fetch_txt(args: argparse.Namespace) -> int:
     locking.acquire()
 
     # Static URLs
-    urls = (
-        "https://seirdy.one/pb/bsl.txt",
-    )
+    urls = ({
+        "blocker": "seirdy.one",
+        "url"    : "https://seirdy.one/pb/bsl.txt",
+    },)
 
     logger.info("Checking %d text file(s) ...", len(urls))
-    for url in urls:
-        logger.debug("Fetching url='%s' ...", url)
-        response = utils.fetch_url(url, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+    for row in urls:
+        logger.debug("Fetching row[url]='%s' ...", row['url'])
+        response = utils.fetch_url(row['url'], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
         if response.ok and response.status_code < 300 and response.text != "":
@@ -849,8 +850,8 @@ def fetch_txt(args: argparse.Namespace) -> int:
                     logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
                     continue
 
-                logger.debug("domain='%s'", domain)
-                processed = utils.process_domain(domain, 'seirdy.one', inspect.currentframe().f_code.co_name)
+                logger.debug("domain='%s',row[blocker]='%s'", domain, row['blocker'])
+                processed = utils.process_domain(domain, row['blocker'], inspect.currentframe().f_code.co_name)
 
                 logger.debug("processed='%s'", processed)
                 if not processed:
index ee2fadcb747e9eba73ae1c9014d37769eacfae40..e3b93b1e5c6bf9d46da084ff76a98d70c7d867ce 100644 (file)
@@ -91,12 +91,13 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path:
         logger.debug("Invoking instances.set_total_peerlist(%s,%d) ...", domain, len(peerlist))
         instances.set_total_peers(domain, peerlist)
 
+    if instances.has_pending(domain):
+        logger.debug("domain='%s' has pending nodeinfo data, flushing ...", domain)
+        instances.update_data(domain)
+
     if peerlist is None:
         logger.warning("Cannot fetch peers: domain='%s'", domain)
         return
-    elif instances.has_pending(domain):
-        logger.debug("domain='%s' has pending nodeinfo data, flushing ...", domain)
-        instances.update_data(domain)
 
     logger.info("Checking %d instance(s) from domain='%s',software='%s' ...", len(peerlist), domain, software)
     for instance in peerlist:
index e8c5b4ed44d3e60d9fce624c918353bd9cdf8dbf..459f94872aca8057cfbbf09afad2b8429ac4a04e 100644 (file)
@@ -79,7 +79,10 @@ def fetch_blocks(domain: str) -> list:
         logger.debug("blocked='%s',reason='%s'", blocked, reason)
 
         if blocked == "":
-            logger.debug("line[]='%s' returned empty blocked domain - SKIPPED!")
+            logger.debug("line[]='%s' returned empty blocked domain - SKIPPED!", type(line))
+            continue
+        elif not utils.is_domain_wanted(blocked):
+            logger.debug("blocked='%s' is not wanted - SKIPPED!", domain)
             continue
 
         blocked = utils.deobfuscate_domain(blocked, domain)
index bdda2072c31c5587c4325dbdea7a3c2414813514..6f12fb75a0a554a8992e3db31d5810f676d88eff 100644 (file)
@@ -138,6 +138,10 @@ def process_domain(domain: str, blocker: str, command: str) -> bool:
         logger.warning("Exception '%s' during fetching instances (fetch_oliphant) from domain='%s'", type(exception), domain)
         instances.set_last_error(domain, exception)
 
+    if instances.has_pending(domain):
+        logger.debug("domain='%s' has pending data updates")
+        instances.update_data(domain)
+
     logger.debug("processed='%s' - EXIT!", processed)
     return processed