]> git.mxchange.org Git - fba.git/blobdiff - fba/commands.py
Continued:
[fba.git] / fba / commands.py
index 5a59379da16571518780777fdd174aab5c476f33..1ecf9e2b30d8cd5e9f73fb3f40248d32a8e19312 100644 (file)
@@ -20,6 +20,8 @@ import json
 import logging
 import time
 
+from urllib.parse import urlparse
+
 import argparse
 import atoma
 import bs4
@@ -35,6 +37,7 @@ from fba.helpers import blacklist
 from fba.helpers import config
 from fba.helpers import cookies
 from fba.helpers import locking
+from fba.helpers import processing
 from fba.helpers import software as software_helper
 from fba.helpers import tidyup
 
@@ -43,6 +46,7 @@ from fba.http import network
 
 from fba.models import blocks
 from fba.models import instances
+from fba.models import sources
 
 from fba.networks import friendica
 from fba.networks import lemmy
@@ -98,12 +102,20 @@ def check_nodeinfo(args: argparse.Namespace) -> int:
 def fetch_pixelfed_api(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
-    # No CSRF by default, you don't have to add network.api_headers by yourself here
+    # No CSRF by default, you don't have to add network.source_headers by yourself here
     headers = tuple()
+    source_domain = "pixelfed.org"
+
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
 
     try:
-        logger.debug("Checking CSRF from pixelfed.org")
-        headers = csrf.determine("pixelfed.org", dict())
+        logger.debug("Checking CSRF from source_domain='%s' ...", source_domain)
+        headers = csrf.determine(source_domain, dict())
     except network.exceptions as exception:
         logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
         return list()
@@ -111,7 +123,7 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int:
     try:
         logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
         fetched = network.get_json_api(
-            "pixelfed.org",
+            source_domain,
             "/api/v1/servers/all.json?scope=All&country=all&language=all",
             headers,
             (config.get("connection_timeout"), config.get("read_timeout"))
@@ -135,18 +147,23 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int:
             elif row["domain"] == "":
                 logger.debug("row[domain] is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(row["domain"]):
-                logger.warning("row[domain]='%s' is not wanted - SKIPPED!", row["domain"])
+
+            logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
+            domain = row["domain"].encode("idna").decode("utf-8")
+            logger.debug("domain='%s' - AFTER!", domain)
+
+            if not utils.is_domain_wanted(domain):
+                logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
                 continue
-            elif instances.is_registered(row["domain"]):
-                logger.debug("row[domain]='%s' is already registered - SKIPPED!", row["domain"])
+            elif instances.is_registered(domain):
+                logger.debug("domain='%s' is already registered - SKIPPED!", domain)
                 continue
-            elif instances.is_recent(row["domain"]):
-                logger.debug("row[domain]='%s' has been recently crawled - SKIPPED!", row["domain"])
+            elif instances.is_recent(domain):
+                logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
                 continue
 
-            logger.debug("Fetching instances from row[domain]='%s' ...", row["domain"])
-            federation.fetch_instances(row["domain"], None, None, inspect.currentframe().f_code.co_name)
+            logger.debug("Fetching instances from domain='%s' ...", domain)
+            federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
 
     except network.exceptions as exception:
         logger.warning("Cannot fetch graphql,exception[%s]:'%s' - EXIT!", type(exception), str(exception))
@@ -157,15 +174,32 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int:
 
 def fetch_bkali(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    source_domain = "gql.api.bka.li"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
     domains = list()
     try:
-        fetched = network.post_json_api("gql.api.bka.li", "/v1/graphql", json.dumps({
-            "query": "query domainlist {nodeinfo(order_by: {domain: asc}) {domain}}"
-        }))
+        logger.info("Fetching domainlist from source_domain='%s' ...", source_domain)
+        fetched = network.post_json_api(
+            source_domain,
+            "/v1/graphql",
+            json.dumps({
+                "query": "query domainlist {nodeinfo(order_by: {domain: asc}) {domain}}"
+            })
+        )
 
         logger.debug("fetched[]='%s'", type(fetched))
         if "error_message" in fetched:
-            logger.warning("post_json_api() for 'gql.api.bka.li' returned error message='%s", fetched["error_message"])
+            logger.warning("post_json_api() for 'gql.sources.bka.li' returned error message='%s", fetched["error_message"])
             return 100
         elif isinstance(fetched["json"], dict) and "error" in fetched["json"] and "message" in fetched["json"]["error"]:
             logger.warning("post_json_api() returned error: '%s", fetched["error"]["message"])
@@ -208,10 +242,12 @@ def fetch_bkali(args: argparse.Namespace) -> int:
 
     logger.debug("domains()=%d", len(domains))
     if len(domains) > 0:
-        locking.acquire()
-
         logger.info("Adding %d new instances ...", len(domains))
         for domain in domains:
+            logger.debug("domain='%s' - BEFORE!", domain)
+            domain = domain.encode("idna").decode("utf-8")
+            logger.debug("domain='%s' - AFTER!", domain)
+
             try:
                 logger.info("Fetching instances from domain='%s' ...", domain)
                 federation.fetch_instances(domain, 'tak.teleyal.blog', None, inspect.currentframe().f_code.co_name)
@@ -237,6 +273,7 @@ def fetch_blocks(args: argparse.Namespace) -> int:
             logger.warning("args.domain='%s' is not registered, please run ./utils.py fetch_instances '%s' first.", args.domain, args.domain)
             return 102
 
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
     if args.domain is not None and args.domain != "":
@@ -279,7 +316,6 @@ def fetch_blocks(args: argparse.Namespace) -> int:
         instances.set_has_obfuscation(blocker, False)
 
         blocking = list()
-        blockdict = list()
         if software == "pleroma":
             logger.info("blocker='%s',software='%s'", blocker, software)
             blocking = pleroma.fetch_blocks(blocker, nodeinfo_url)
@@ -298,8 +334,10 @@ def fetch_blocks(args: argparse.Namespace) -> int:
         else:
             logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software)
 
-        logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
-        instances.set_total_blocks(blocker, blocking)
+        logger.debug("blocker='%s'", blocker)
+        if blocker != "chaos.social":
+            logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
+            instances.set_total_blocks(blocker, blocking)
 
         logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
         blockdict = list()
@@ -362,7 +400,12 @@ def fetch_blocks(args: argparse.Namespace) -> int:
             if block["blocked"] == "":
                 logger.debug("block[blocked] is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(block["blocked"]):
+
+            logger.debug("block[blocked]='%s' - BEFORE!", block["blocked"])
+            block["blocked"] = block["blocked"].lstrip(".").encode("idna").decode("utf-8")
+            logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
+            if not utils.is_domain_wanted(block["blocked"]):
                 logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
                 continue
             elif block["block_level"] in ["accept", "accepted"]:
@@ -372,9 +415,9 @@ def fetch_blocks(args: argparse.Namespace) -> int:
                 logger.debug("Hash wasn't found, adding: blocked='%s',blocker='%s'", block["blocked"], blocker)
                 federation.fetch_instances(block["blocked"], blocker, None, inspect.currentframe().f_code.co_name)
 
-            block["block_level"] = utils.alias_block_level(block["block_level"])
+            block["block_level"] = blocks.alias_block_level(block["block_level"])
 
-            if utils.process_block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
+            if processing.block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
                 logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
                 blockdict.append({
                     "blocked": block["blocked"],
@@ -406,14 +449,22 @@ def fetch_blocks(args: argparse.Namespace) -> int:
 def fetch_observer(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
-    # Acquire lock
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
+    source_domain = "fediverse.observer"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
     types = list()
     if args.software is None:
         logger.info("Fetching software list ...")
         raw = utils.fetch_url(
-            "https://fediverse.observer",
+            f"https://{source_domain}",
             network.web_headers,
             (config.get("connection_timeout"), config.get("read_timeout"))
         ).text
@@ -449,7 +500,7 @@ def fetch_observer(args: argparse.Namespace) -> int:
         try:
             logger.debug("Fetching table data for software='%s' ...", software)
             raw = utils.fetch_url(
-                f"https://fediverse.observer/app/views/tabledata.php?software={software}",
+                f"https://{source_domain}/app/views/tabledata.php?software={software}",
                 network.web_headers,
                 (config.get("connection_timeout"), config.get("read_timeout"))
             ).text
@@ -458,7 +509,7 @@ def fetch_observer(args: argparse.Namespace) -> int:
             doc = bs4.BeautifulSoup(raw, features="html.parser")
             logger.debug("doc[]='%s'", type(doc))
         except network.exceptions as exception:
-            logger.warning("Cannot fetch software='%s' from fediverse.observer: '%s'", software, type(exception))
+            logger.warning("Cannot fetch software='%s' from source_domain='%s': '%s'", software, source_domain, type(exception))
             continue
 
         items = doc.findAll("a", {"class": "url"})
@@ -466,12 +517,17 @@ def fetch_observer(args: argparse.Namespace) -> int:
         for item in items:
             logger.debug("item[]='%s'", type(item))
             domain = item.decode_contents()
-
             logger.debug("domain='%s' - AFTER!", domain)
+
             if domain == "":
                 logger.debug("domain is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(domain):
+
+            logger.debug("domain='%s' - BEFORE!", domain)
+            domain = domain.encode("idna").decode("utf-8")
+            logger.debug("domain='%s' - AFTER!", domain)
+
+            if not utils.is_domain_wanted(domain):
                 logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
                 continue
             elif instances.is_registered(domain):
@@ -491,13 +547,23 @@ def fetch_observer(args: argparse.Namespace) -> int:
 def fetch_todon_wiki(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
+
+    source_domain = "wiki.todon.eu"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
     blocklist = {
         "silenced": list(),
         "reject": list(),
     }
 
-    raw = utils.fetch_url("https://wiki.todon.eu/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+    raw = utils.fetch_url(f"https://{source_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
     doc = bs4.BeautifulSoup(raw, "html.parser")
@@ -538,7 +604,7 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int:
                 continue
 
             logger.info("Adding new block: blocked='%s',block_level='%s'", blocked, block_level)
-            if utils.process_block(blocker, blocked, None, block_level) and block_level == "reject" and config.get("bot_enabled"):
+            if processing.block(blocker, blocked, None, block_level) and block_level == "reject" and config.get("bot_enabled"):
                 logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", blocked, block_level, blocker)
                 blockdict.append({
                     "blocked": blocked,
@@ -563,6 +629,10 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int:
 
 def fetch_cs(args: argparse.Namespace):
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
     extensions = [
         "extra",
         "abbr",
@@ -583,12 +653,20 @@ def fetch_cs(args: argparse.Namespace):
         "wikilinks"
     ]
 
-    domains = {
+    blocklist = {
         "silenced": list(),
         "reject"  : list(),
     }
 
-    raw = utils.fetch_url("https://raw.githubusercontent.com/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+    source_domain = "raw.githubusercontent.com"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    raw = utils.fetch_url(f"https://{source_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
     doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features="html.parser")
@@ -596,11 +674,11 @@ def fetch_cs(args: argparse.Namespace):
 
     silenced = doc.find("h2", {"id": "silenced-instances"}).findNext("table").find("tbody")
     logger.debug("silenced[%s]()=%d", type(silenced), len(silenced))
-    domains["silenced"] = federation.find_domains(silenced)
+    blocklist["silenced"] = federation.find_domains(silenced)
 
     blocked = doc.find("h2", {"id": "blocked-instances"}).findNext("table").find("tbody")
     logger.debug("blocked[%s]()=%d", type(blocked), len(blocked))
-    domains["reject"] = federation.find_domains(blocked)
+    blocklist["reject"] = federation.find_domains(blocked)
 
     blocking = blocklist["silenced"] + blocklist["reject"]
     blocker = "chaos.social"
@@ -608,17 +686,18 @@ def fetch_cs(args: argparse.Namespace):
     logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
     instances.set_total_blocks(blocker, blocking)
 
-    logger.debug("domains[silenced]()=%d,domains[reject]()=%d", len(domains["silenced"]), len(domains["reject"]))
-    blockdict = list()
-    if len(domains) > 0:
-        locking.acquire()
-
-        for block_level in domains:
-            logger.info("block_level='%s' has %d row(s)", block_level, len(domains[block_level]))
+    logger.debug("blocklist[silenced]()=%d,blocklist[reject]()=%d", len(blocklist["silenced"]), len(blocklist["reject"]))
+    if len(blocking) > 0:
+        blockdict = list()
+        for block_level in blocklist:
+            logger.info("block_level='%s' has %d row(s)", block_level, len(blocklist[block_level]))
 
-            for row in domains[block_level]:
+            for row in blocklist[block_level]:
                 logger.debug("row[%s]='%s'", type(row), row)
-                if instances.is_recent(row["domain"], "last_blocked"):
+                if not "domain" in row:
+                    logger.warning("row[]='%s' has no element 'domain' - SKIPPED!", type(row))
+                    continue
+                elif instances.is_recent(row["domain"], "last_blocked"):
                     logger.debug("row[domain]='%s' has been recently crawled - SKIPPED!", row["domain"])
                     continue
                 elif not instances.is_registered(row["domain"]):
@@ -629,7 +708,7 @@ def fetch_cs(args: argparse.Namespace):
                         logger.warning("Exception '%s' during fetching instances (fetch_cs) from row[domain]='%s'", type(exception), row["domain"])
                         instances.set_last_error(row["domain"], exception)
 
-                if utils.process_block(blocker, row["domain"], row["reason"], block_level) and block_level == "reject" and config.get("bot_enabled"):
+                if processing.block(blocker, row["domain"], row["reason"], block_level) and block_level == "reject" and config.get("bot_enabled"):
                     logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", row["domain"], block_level, blocker)
                     blockdict.append({
                         "blocked": row["domain"],
@@ -654,8 +733,21 @@ def fetch_cs(args: argparse.Namespace):
 
 def fetch_fba_rss(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
     domains = list()
 
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    components = urlparse(args.feed)
+
+    if sources.is_recent(components.netloc):
+        logger.info("API from components.netloc='%s' has recently being accessed - EXIT!", components.netloc)
+        return 0
+    else:
+        logger.debug("components.netloc='%s' has not been recently used, marking ...", components.netloc)
+        sources.update(components.netloc)
+
     logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
     response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
@@ -673,7 +765,12 @@ def fetch_fba_rss(args: argparse.Namespace) -> int:
             if domain == "":
                 logger.debug("domain is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(domain):
+
+            logger.debug("domain='%s' - BEFORE!", domain)
+            domain = domain.encode("idna").decode("utf-8")
+            logger.debug("domain='%s' - AFTER!", domain)
+
+            if not utils.is_domain_wanted(domain):
                 logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
                 continue
             elif domain in domains:
@@ -691,10 +788,9 @@ def fetch_fba_rss(args: argparse.Namespace) -> int:
 
     logger.debug("domains()=%d", len(domains))
     if len(domains) > 0:
-        locking.acquire()
-
         logger.info("Adding %d new instances ...", len(domains))
         for domain in domains:
+            logger.debug("domain='%s'", domain)
             try:
                 logger.info("Fetching instances from domain='%s' ...", domain)
                 federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
@@ -708,7 +804,19 @@ def fetch_fba_rss(args: argparse.Namespace) -> int:
 
 def fetch_fbabot_atom(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
-    feed = "https://ryona.agency/users/fba/feed.atom"
+
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    source_domain = "ryona.agency"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    feed = f"https://{source_domain}/users/fba/feed.atom"
 
     domains = list()
 
@@ -735,7 +843,12 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
                     if domain == "":
                         logger.debug("domain is empty - SKIPPED!")
                         continue
-                    elif not utils.is_domain_wanted(domain):
+
+                    logger.debug("domain='%s' - BEFORE!", domain)
+                    domain = domain.encode("idna").decode("utf-8")
+                    logger.debug("domain='%s' - AFTER!", domain)
+
+                    if not utils.is_domain_wanted(domain):
                         logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
                         continue
                     elif domain in domains:
@@ -753,14 +866,12 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
 
     logger.debug("domains()=%d", len(domains))
     if len(domains) > 0:
-        locking.acquire()
-
         logger.info("Adding %d new instances ...", len(domains))
         for domain in domains:
             logger.debug("domain='%s'", domain)
             try:
                 logger.info("Fetching instances from domain='%s' ...", domain)
-                federation.fetch_instances(domain, "ryona.agency", None, inspect.currentframe().f_code.co_name)
+                federation.fetch_instances(domain, source_domain, None, inspect.currentframe().f_code.co_name)
             except network.exceptions as exception:
                 logger.warning("Exception '%s' during fetching instances (fetch_fbabot_atom) from domain='%s'", type(exception), domain)
                 instances.set_last_error(domain, exception)
@@ -771,6 +882,16 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
 
 def fetch_instances(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("args.domain='%s' - checking ...", args.domain)
+    if not validators.domain(args.domain):
+        logger.warning("args.domain='%s' is not valid.", args.domain)
+        return 100
+    elif blacklist.is_blacklisted(args.domain):
+        logger.warning("args.domain='%s' is blacklisted, won't check it!", args.domain)
+        return 101
+
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
     # Initial fetch
@@ -789,7 +910,7 @@ def fetch_instances(args: argparse.Namespace) -> int:
 
     # Loop through some instances
     database.cursor.execute(
-        "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY rowid DESC", [time.time() - config.get("recheck_instance")]
+        "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY rowid DESC", [time.time() - config.get("recheck_instance")]
     )
 
     rows = database.cursor.fetchall()
@@ -799,26 +920,41 @@ def fetch_instances(args: argparse.Namespace) -> int:
         if row["domain"] == "":
             logger.debug("row[domain] is empty - SKIPPED!")
             continue
-        elif not utils.is_domain_wanted(row["domain"]):
-            logger.warning("Domain row[domain]='%s' is not wanted - SKIPPED!", row["domain"])
+
+        logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
+        domain = row["domain"].encode("idna").decode("utf-8")
+        logger.debug("domain='%s' - AFTER!", domain)
+
+        if not utils.is_domain_wanted(domain):
+            logger.warning("Domain domain='%s' is not wanted - SKIPPED!", domain)
             continue
 
         try:
-            logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", row["domain"], row["origin"], row["software"], row["nodeinfo_url"])
-            federation.fetch_instances(row["domain"], row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"])
+            logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", domain, row["origin"], row["software"], row["nodeinfo_url"])
+            federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"])
         except network.exceptions as exception:
-            logger.warning("Exception '%s' during fetching instances (fetch_instances) from row[domain]='%s'", type(exception), row["domain"])
-            instances.set_last_error(row["domain"], exception)
+            logger.warning("Exception '%s' during fetching instances (fetch_instances) from domain='%s'", type(exception), domain)
+            instances.set_last_error(domain, exception)
 
     logger.debug("Success - EXIT!")
     return 0
 
 def fetch_oliphant(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
+    source_domain = "codeberg.org"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
     # Base URL
-    base_url = "https://codeberg.org/oliphant/blocklists/raw/branch/main/blocklists"
+    base_url = f"https://{source_domain}/oliphant/blocklists/raw/branch/main/blocklists"
 
     # URLs to fetch
     blocklists = (
@@ -849,6 +985,9 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
         },{
             "blocker": "sunny.garden",
             "csv_url": "mastodon/sunny.garden.csv",
+        },{
+            "blocker": "sunny.garden",
+            "csv_url": "mastodon/gardenfence.csv",
         },{
             "blocker": "solarpunk.moe",
             "csv_url": "mastodon/solarpunk.moe.csv",
@@ -858,6 +997,9 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
         },{
             "blocker": "union.place",
             "csv_url": "mastodon/union.place.csv",
+        },{
+            "blocker": "oliphant.social",
+            "csv_url": "mastodon/birdsite.csv",
         }
     )
 
@@ -872,25 +1014,21 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
         elif args.domain in domains:
             logger.debug("args.domain='%s' already handled - SKIPPED!", args.domain)
             continue
-        elif instances.is_recent(block["blocker"]):
-            logger.debug("block[blocker]='%s' has been recently crawled - SKIPPED!", block["blocker"])
-            continue
 
         # Fetch this URL
         logger.info("Fetching csv_url='%s' for blocker='%s' ...", block["csv_url"], block["blocker"])
         response = utils.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
-        logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
-        if not response.ok or response.status_code > 399 or response.content == "":
+        logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
+        if not response.ok or response.status_code >= 300 or response.content == "":
             logger.warning("Could not fetch csv_url='%s' for blocker='%s' - SKIPPED!", block["csv_url"], block["blocker"])
             continue
 
         logger.debug("Fetched %d Bytes, parsing CSV ...", len(response.content))
-        reader = csv.DictReader(response.content.decode('utf-8').splitlines(), dialect="unix")
+        reader = csv.DictReader(response.content.decode("utf-8").splitlines(), dialect="unix")
 
         blockdict = list()
 
-        logger.info("Processing %d rows ...", len(reader))
         cnt = 0
         for row in reader:
             logger.debug("row[%s]='%s'", type(row), row)
@@ -906,9 +1044,9 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
                 continue
 
             if "#severity" in row:
-                severity = row["#severity"]
+                severity = blocks.alias_block_level(row["#severity"])
             elif "severity" in row:
-                severity = row["severity"]
+                severity = blocks.alias_block_level(row["severity"])
             else:
                 logger.debug("row='%s' does not contain severity column", row)
                 continue
@@ -928,18 +1066,35 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
             if domain == "":
                 logger.debug("domain is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(domain):
-                logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+            elif domain.endswith(".onion"):
+                logger.debug("domain='%s' is a TOR .onion domain - SKIPPED", domain)
+                continue
+            elif domain.endswith(".arpa"):
+                logger.debug("domain='%s' is a reverse IP address - SKIPPED", domain)
+                continue
+            elif domain.endswith(".tld"):
+                logger.debug("domain='%s' is a fake domain - SKIPPED", domain)
+                continue
+            elif domain.find("*") >= 0 or domain.find("?") >= 0:
+                logger.debug("domain='%s' is obfuscated - Invoking utils.deobfuscate(%s, %s) ...", domain, domain, block["blocker"])
+                domain = utils.deobfuscate(domain, block["blocker"])
+                logger.debug("domain='%s' - AFTER!", domain)
+
+            if not validators.domain(domain):
+                logger.debug("domain='%s' is not a valid domain - SKIPPED!")
+                continue
+            elif blacklist.is_blacklisted(domain):
+                logger.warning("domain='%s' is blacklisted - SKIPPED!", domain)
                 continue
 
             logger.debug("Marking domain='%s' as handled", domain)
             domains.append(domain)
 
             logger.debug("Processing domain='%s' ...", domain)
-            processed = utils.process_domain(domain, block["blocker"], inspect.currentframe().f_code.co_name)
+            processed = processing.domain(domain, block["blocker"], inspect.currentframe().f_code.co_name)
             logger.debug("processed='%s'", processed)
 
-            if utils.process_block(block["blocker"], domain, None, "reject") and config.get("bot_enabled"):
+            if processing.block(block["blocker"], domain, None, severity) and config.get("bot_enabled"):
                 logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", domain, block["block_level"], block["blocker"])
                 blockdict.append({
                     "blocked": domain,
@@ -947,15 +1102,17 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
                 })
 
             if reject_media:
-                utils.process_block(block["blocker"], domain, None, "reject_media")
+                processing.block(block["blocker"], domain, None, "reject_media")
             if reject_reports:
-                utils.process_block(block["blocker"], domain, None, "reject_reports")
+                processing.block(block["blocker"], domain, None, "reject_reports")
 
-        logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", block["blocker"], cnt)
-        instances.set_total_blocks(block["blocker"], cnt)
+        logger.debug("block[blocker]='%s'", block["blocker"])
+        if block["blocker"] != "chaos.social":
+            logger.debug("Invoking instances.set_total_blocks(%s, domains()=%d) ...", block["blocker"], len(domains))
+            instances.set_total_blocks(block["blocker"], domains)
 
         logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"])
-        if instances.has_pending(blocker):
+        if instances.has_pending(block["blocker"]):
             logger.debug("Flushing updates for block[blocker]='%s' ...", block["blocker"])
             instances.update_data(block["blocker"])
 
@@ -972,6 +1129,8 @@ def fetch_oliphant(args: argparse.Namespace) -> int:
 
 def fetch_txt(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
     # Static URLs
@@ -1007,7 +1166,7 @@ def fetch_txt(args: argparse.Namespace) -> int:
                     continue
 
                 logger.debug("Processing domain='%s',row[blocker]='%s'", domain, row["blocker"])
-                processed = utils.process_domain(domain, row["blocker"], inspect.currentframe().f_code.co_name)
+                processed = processing.domain(domain, row["blocker"], inspect.currentframe().f_code.co_name)
 
                 logger.debug("processed='%s'", processed)
                 if not processed:
@@ -1019,9 +1178,23 @@ def fetch_txt(args: argparse.Namespace) -> int:
 
 def fetch_fedipact(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
-    response = utils.fetch_url("https://fedipact.online", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+    source_domain = "fedipact.online"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    response = utils.fetch_url(
+        f"https://{source_domain}",
+        network.web_headers,
+        (config.get("connection_timeout"), config.get("read_timeout"))
+    )
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
     if response.ok and response.status_code < 300 and response.text != "":
@@ -1040,7 +1213,12 @@ def fetch_fedipact(args: argparse.Namespace) -> int:
             if domain == "":
                 logger.debug("domain is empty - SKIPPED!")
                 continue
-            elif not utils.is_domain_wanted(domain):
+
+            logger.debug("domain='%s' - BEFORE!", domain)
+            domain = domain.encode("idna").decode("utf-8")
+            logger.debug("domain='%s' - AFTER!", domain)
+
+            if not utils.is_domain_wanted(domain):
                 logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
                 continue
             elif instances.is_registered(domain):
@@ -1058,9 +1236,23 @@ def fetch_fedipact(args: argparse.Namespace) -> int:
 
 def fetch_joinfediverse(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
-    raw = utils.fetch_url("https://joinfediverse.wiki/FediBlock", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+    source_domain = "joinfediverse.wiki"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    raw = utils.fetch_url(
+        f"https://{source_domain}/FediBlock",
+        network.web_headers,
+        (config.get("connection_timeout"), config.get("read_timeout"))
+    ).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
     doc = bs4.BeautifulSoup(raw, "html.parser")
@@ -1091,14 +1283,14 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
 
                     logger.debug("text[]='%s'", type(text))
                     if not isinstance(text, str):
-                        logger.debug("text[]='%s' is not 'str' - SKIPPED!", type(text))
+                        logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
                         continue
                     elif validators.domain(text.strip()):
                         logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
                         continue
 
                     text = tidyup.domain(text.strip())
-                    logger.debug("text='%s'", text)
+                    logger.debug("text='%s' - AFTER!", text)
                     if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
                         logger.debug("Found header: '%s'=%d", text, cnt)
                         block_headers[cnt] = text
@@ -1149,8 +1341,10 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
         logger.debug("block='%s'", block)
         if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
             origin = block["blocked"]
+            logger.debug("origin='%s'", origin)
             for subdomain in block["subdomain(s)"]:
                 block["blocked"] = subdomain + "." + origin
+                logger.debug("block[blocked]='%s'", block["blocked"])
                 blocking.append(block)
         else:
             blocking.append(block)
@@ -1158,21 +1352,24 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
     logger.debug("blocking()=%d", blocking)
     for block in blocking:
         logger.debug("block[]='%s'", type(block))
-        block["blocked"] = tidyup.domain(block["blocked"])
+        if "blocked" not in block:
+            raise KeyError(f"block()={len(block)} does not have element 'blocked'")
 
+        block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
         logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
         if block["blocked"] == "":
             logger.debug("block[blocked] is empty - SKIPPED!")
             continue
         elif not utils.is_domain_wanted(block["blocked"]):
-            logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+            logger.warning("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
             continue
         elif instances.is_recent(block["blocked"]):
-            logger.debug("blocked='%s' has been recently checked - SKIPPED!", block["blocked"])
+            logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
             continue
 
         logger.info("Proccessing blocked='%s' ...", block["blocked"])
-        utils.process_domain(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
+        processing.domain(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
 
     blockdict = list()
     for blocker in domains:
@@ -1180,7 +1377,7 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
         logger.debug("blocker[%s]='%s'", type(blocker), blocker)
 
         for block in blocking:
-            logger.debug("block[blocked]='%s',block[reason]='%s' - BEFORE!", block["blocked"], block["reason"])
+            logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
             block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
 
             logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
@@ -1192,7 +1389,7 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
                 continue
 
             logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
-            if utils.process_block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
+            if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
                 logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
                 blockdict.append({
                     "blocked": block["blocked"],
@@ -1217,6 +1414,7 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int:
 def recheck_obfuscation(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
+    logger.debug("Invoking locking.acquire() ...")
     locking.acquire()
 
     if isinstance(args.domain, str) and args.domain != "" and utils.is_domain_wanted(args.domain):
@@ -1253,12 +1451,15 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
         else:
             logger.warning("Unknown sofware: domain='%s',software='%s'", row["domain"], row["software"])
 
-        logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
-        instances.set_total_blocks(row["domain"], blocking)
+        logger.debug("row[domain]='%s'", row["domain"])
+        if row["domain"] != "chaos.social":
+            logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
+            instances.set_total_blocks(row["domain"], blocking)
 
-        logger.info("Checking %d block(s) from domain='%s' ...", len(blocking), row["domain"])
         obfuscated = 0
         blockdict = list()
+
+        logger.info("Checking %d block(s) from domain='%s' ...", len(blocking), row["domain"])
         for block in blocking:
             logger.debug("block[blocked]='%s'", block["blocked"])
             blocked = None
@@ -1278,7 +1479,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
             elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
                 logger.debug("block='%s' is obfuscated.", block["blocked"])
                 obfuscated = obfuscated + 1
-                blocked = utils.deobfuscate_domain(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+                blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
             elif not utils.is_domain_wanted(block["blocked"]):
                 logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
                 continue
@@ -1294,10 +1495,10 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
                     logger.debug("blocked='%s' is already blocked by domain='%s' - SKIPPED!", blocked, row["domain"])
                     continue
 
-                block["block_level"] = utils.alias_block_level(block["block_level"])
+                block["block_level"] = blocks.alias_block_level(block["block_level"])
 
                 logger.info("blocked='%s' has been deobfuscated to blocked='%s', adding ...", block["blocked"], blocked)
-                if utils.process_block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
+                if processing.block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
                     logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], row["domain"])
                     blockdict.append({
                         "blocked": blocked,
@@ -1327,14 +1528,23 @@ def recheck_obfuscation(args: argparse.Namespace) -> int:
 def fetch_fedilist(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
-    url = "http://demo.fedilist.com/instance/csv?onion=not"
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    source_domain = "demo.fedilist.com"
+    if sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    url = f"http://{source_domain}/instance/csv?onion=not"
     if args.software is not None and args.software != "":
         logger.debug("args.software='%s'", args.software)
-        url = f"http://demo.fedilist.com/instance/csv?software={args.software}&onion=not"
-
-    locking.acquire()
+        url = f"http://{source_domain}/instance/csv?software={args.software}&onion=not"
 
-    logger.info("Fetching url='%s' from fedilist.com ...", url)
+    logger.info("Fetching url='%s' ...", url)
     response = reqto.get(
         url,
         headers=network.web_headers,
@@ -1343,23 +1553,36 @@ def fetch_fedilist(args: argparse.Namespace) -> int:
     )
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
-    reader = csv.DictReader(response.content.decode('utf-8').splitlines(), dialect="unix")
+    if not response.ok or response.status_code >= 300 or len(response.content) == 0:
+        logger.warning("Failed fetching url='%s': response.ok='%s',response.status_code=%d,response.content()=%d - EXIT!", url, response.ok, response.status_code, len(response.text))
+        return 1
+
+    reader = csv.DictReader(response.content.decode("utf-8").splitlines(), dialect="unix")
 
     logger.debug("reader[]='%s'", type(reader))
-    blockdict = list()
     for row in reader:
         logger.debug("row[]='%s'", type(row))
+        if "hostname" not in row:
+            logger.warning("row()=%d has no element 'hostname' - SKIPPED!", len(row))
+            continue
+
+        logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
         domain = tidyup.domain(row["hostname"])
         logger.debug("domain='%s' - AFTER!", domain)
 
         if domain == "":
             logger.debug("domain is empty after tidyup: row[hostname]='%s' - SKIPPED!", row["hostname"])
             continue
-        elif not utils.is_domain_wanted(domain):
+
+        logger.debug("domain='%s' - BEFORE!", domain)
+        domain = domain.encode("idna").decode("utf-8")
+        logger.debug("domain='%s' - AFTER!", domain)
+
+        if not utils.is_domain_wanted(domain):
             logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
             continue
         elif (args.all is None or not args.all) and instances.is_registered(domain):
-            logger.debug("domain='%s' is already registered, --all not specified: args.all[]='%s'", type(args.all))
+            logger.debug("domain='%s' is already registered, --all not specified: args.all[]='%s'", domain, type(args.all))
             continue
         elif instances.is_recent(domain):
             logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
@@ -1374,27 +1597,34 @@ def fetch_fedilist(args: argparse.Namespace) -> int:
 def update_nodeinfo(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
 
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
     if args.domain is not None and args.domain != "":
         logger.debug("Fetching args.domain='%s'", args.domain)
-        database.cursor.execute("SELECT domain FROM instances WHERE domain = ?", [args.domain])
+        database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ?", [args.domain])
     elif args.software is not None and args.software != "":
         logger.info("Fetching domains for args.software='%s'", args.software)
-        database.cursor.execute("SELECT domain FROM instances WHERE software = ?", [args.software])
+        database.cursor.execute("SELECT domain, software FROM instances WHERE software = ?", [args.software])
     else:
         logger.info("Fetching domains for recently updated ...")
-        database.cursor.execute("SELECT domain FROM instances WHERE last_nodeinfo < ? OR last_nodeinfo IS NULL", [time.time() - config.get("recheck_block")])
+        database.cursor.execute("SELECT domain, software FROM instances WHERE last_nodeinfo < ? OR last_nodeinfo IS NULL", [time.time() - config.get("recheck_nodeinfo")])
 
     domains = database.cursor.fetchall()
 
     logger.info("Checking %d domain(s) ...", len(domains))
+    cnt = 0
     for row in domains:
         logger.debug("row[]='%s'", type(row))
         try:
-            logger.info("Updating nodeinfo for row[domain]='%s' ...", row["domain"])
+            logger.info("Checking nodeinfo for row[domain]='%s',row[software]='%s' (%s%%) ...", row["domain"], row["software"], "{:5.1f}".format(cnt / len(domains) * 100))
             software = federation.determine_software(row["domain"])
 
-            logger.info("Determined software='%s'", software)
-            instances.set_software(row["domain"], software)
+            logger.debug("Determined software='%s'", software)
+            if (software != row["software"] and software is not None) or args.force is True:
+                logger.warning("Software type for row[domain]='%s' has changed from '%s' to '%s'!", row["domain"], row["software"], software)
+                instances.set_software(row["domain"], software)
+
             instances.set_success(row["domain"])
         except network.exceptions as exception:
             logger.warning("Exception '%s' during updating nodeinfo for row[domain]='%s'", type(exception), row["domain"])
@@ -1402,6 +1632,116 @@ def update_nodeinfo(args: argparse.Namespace) -> int:
 
         instances.set_last_nodeinfo(row["domain"])
         instances.update_data(row["domain"])
+        cnt = cnt + 1
+
+    logger.debug("Success! - EXIT!")
+    return 0
+
+def fetch_instances_social(args: argparse.Namespace) -> int:
+    logger.debug("args[]='%s' - CALLED!", type(args))
+
+    logger.debug("Invoking locking.acquire() ...")
+    locking.acquire()
+
+    source_domain = "instances.social"
+
+    if config.get("instances_social_api_key") == "":
+        logger.error("API key not set. Please set in your config.json file.")
+        return 1
+    elif sources.is_recent(source_domain):
+        logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+        return 0
+    else:
+        logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+        sources.update(source_domain)
+
+    headers = {
+        "Authorization": f"Bearer {config.get('instances_social_api_key')}",
+    }
+
+    fetched = network.get_json_api(
+        source_domain,
+        "/api/1.0/instances/list?count=0&sort_by=name",
+        headers,
+        (config.get("connection_timeout"), config.get("read_timeout"))
+    )
+    logger.debug("fetched[]='%s'", type(fetched))
+
+    if "error_message" in fetched:
+        logger.warning("Error during fetching API result: '%s' - EXIT!", fetched["error_message"])
+        return 2
+    elif "exception" in fetched:
+        logger.warning("Exception '%s' during fetching API result - EXIT!", type(fetched["exception"]))
+        return 3
+    elif "json" not in fetched:
+        logger.warning("fetched has no element 'json' - EXIT!")
+        return 4
+    elif "instances" not in fetched["json"]:
+        logger.warning("fetched[row] has no element 'instances' - EXIT!")
+        return 5
+
+    domains = list()
+    rows = fetched["json"]["instances"]
+
+    logger.info("Checking %d row(s) ...", len(rows))
+    for row in rows:
+        logger.debug("row[]='%s'", type(row))
+        domain = tidyup.domain(row["name"])
+        logger.debug("domain='%s' - AFTER!", domain)
+
+        if domain == "":
+            logger.debug("domain is empty - SKIPPED!")
+            continue
+
+        logger.debug("domain='%s' - BEFORE!", domain)
+        domain = domain.encode("idna").decode("utf-8")
+        logger.debug("domain='%s' - AFTER!", domain)
+
+        if not utils.is_domain_wanted(domain):
+            logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+            continue
+        elif domain in domains:
+            logger.debug("domain='%s' is already added - SKIPPED!", domain)
+            continue
+        elif instances.is_registered(domain):
+            logger.debug("domain='%s' is already registered - SKIPPED!", domain)
+            continue
+        elif instances.is_recent(domain):
+            logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
+            continue
+
+        logger.info("Fetching instances from domain='%s'", domain)
+        federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
+
+    logger.debug("Success! - EXIT!")
+    return 0
+
+def convert_idna(args: argparse.Namespace) -> int:
+    logger.debug("args[]='%s' - CALLED!", type(args))
+
+    database.cursor.execute("SELECT domain FROM instances WHERE domain NOT LIKE '%xn--%' ORDER BY domain ASC")
+    rows = database.cursor.fetchall()
+
+    logger.debug("rows[]='%s'", type(rows))
+    instances.translate_idnas(rows, "domain")
+
+    database.cursor.execute("SELECT origin FROM instances WHERE origin NOT LIKE '%xn--%' ORDER BY origin ASC")
+    rows = database.cursor.fetchall()
+
+    logger.debug("rows[]='%s'", type(rows))
+    instances.translate_idnas(rows, "origin")
+
+    database.cursor.execute("SELECT blocker FROM blocks WHERE blocker NOT LIKE '%xn--%' ORDER BY blocker ASC")
+    rows = database.cursor.fetchall()
+
+    logger.debug("rows[]='%s'", type(rows))
+    blocks.translate_idnas(rows, "blocker")
+
+    database.cursor.execute("SELECT blocked FROM blocks WHERE blocked NOT LIKE '%xn--%' ORDER BY blocked ASC")
+    rows = database.cursor.fetchall()
+
+    logger.debug("rows[]='%s'", type(rows))
+    blocks.translate_idnas(rows, "blocked")
 
     logger.debug("Success! - EXIT!")
     return 0