]> git.mxchange.org Git - fba.git/blobdiff - fba/networks/mastodon.py
Continued:
[fba.git] / fba / networks / mastodon.py
index 3f90a9b7926058c2b87938776e3b92873352f4fd..f0de5de88b33a0217ec5b257f651eb7d1952006f 100644 (file)
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-import inspect
 import logging
-
-import bs4
 import validators
 
-from fba import csrf
-from fba import fba
+import bs4
 
-from fba.helpers import blacklist
 from fba.helpers import config
+from fba.helpers import domain as domain_helper
 from fba.helpers import tidyup
 
+from fba.http import federation
 from fba.http import network
 
 from fba.models import blocks
@@ -63,25 +60,17 @@ language_mapping = {
 }
 
 def fetch_blocks_from_about(domain: str) -> dict:
-    logger.debug("domain(%d)='%s' - CALLED!", len(domain), domain)
-    if not isinstance(domain, str):
-        raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
-    elif domain == "":
-        raise ValueError("Parameter 'domain' is empty")
-    elif domain.lower() != domain:
-        raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
-    elif not validators.domain(domain.split("/")[0]):
-        raise ValueError(f"domain='{domain}' is not a valid domain")
-    elif domain.endswith(".arpa"):
-        raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
-    elif domain.endswith(".tld"):
-        raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
+    logger.debug("domain='%s' - CALLED!", domain)
+    domain_helper.raise_on(domain)
 
-    logger.debug("Fetching mastodon blocks from domain:", domain)
+    if not instances.is_registered(domain):
+        raise Exception(f"domain='{domain}' is not registered but function is invoked.")
+
+    logger.debug("Fetching mastodon blocks from domain='%s'", domain)
     doc = None
     for path in ["/about/more", "/about"]:
         try:
-            logger.debug(f"Fetching path='{path}' from domain='{domain}' ...")
+            logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
             doc = bs4.BeautifulSoup(
                 network.fetch_response(
                     domain,
@@ -93,11 +82,11 @@ def fetch_blocks_from_about(domain: str) -> dict:
             )
 
             if len(doc.find_all("h3")) > 0:
-                logger.debug(f"path='{path}' had some headlines - BREAK!")
+                logger.debug("path='%s' had some headlines - BREAK!", path)
                 break
 
         except network.exceptions as exception:
-            logger.warning(f"Cannot fetch from domain='{domain}',exception='{type(exception)}'")
+            logger.warning("Cannot fetch from domain='%s',exception='%s'", domain, type(exception))
             instances.set_last_error(domain, exception)
             break
 
@@ -108,249 +97,108 @@ def fetch_blocks_from_about(domain: str) -> dict:
         "Silenced servers" : [],
     }
 
-    logger.debug("doc[]='%'", type(doc))
+    logger.debug("doc[]='%s'", type(doc))
     if doc is None:
-        logger.warning(f"Cannot fetch any /about pages for domain='{domain}' - EXIT!")
-        return blocklist
+        logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain)
+        return list()
 
     for header in doc.find_all("h3"):
         header_text = tidyup.reason(header.text)
 
         logger.debug("header_text='%s'", header_text)
         if header_text in language_mapping:
-            logger.debug("header_text='%s'", header_text)
+            logger.debug("Translating header_text='%s' ...", header_text)
             header_text = language_mapping[header_text]
         else:
-            logger.warning(f"header_text='{header_text}' not found in language mapping table")
+            logger.warning("header_text='%s' not found in language mapping table", header_text)
 
         if header_text in blocklist or header_text.lower() in blocklist:
             # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
             for line in header.find_all_next("table")[0].find_all("tr")[1:]:
+                domain = line.find("span").text
+                hash   = line.find("span")["title"][9:]
+                reason = line.find_all("td")[1].text
+
+                logger.debug("domain='%s',reason='%s' - BEFORE!", domain, reason)
+                domain = tidyup.domain(domain) if domain != "" else None
+                reason = tidyup.reason(reason) if reason != "" else None
+
+                logger.debug("domain='%s',reason='%s' - AFTER!", domain, reason)
+                if domain is None or domain == "":
+                    logger.warning("domain='%s' is empty,line='%s' - SKIPPED!", domain, line)
+                    continue
+
+                logger.debug("Appending domain='%s',hash='%s',reason='%s' to blocklist header_text='%s' ...", domain, hash, reason, blocklist)
                 blocklist[header_text].append({
-                    "domain": tidyup.domain(line.find("span").text),
-                    "hash"  : tidyup.domain(line.find("span")["title"][9:]),
-                    "reason": tidyup.reason(line.find_all("td")[1].text),
+                    "domain": domain,
+                    "hash"  : hash,
+                    "reason": reason,
                 })
         else:
-            logger.warning(f"header_text='{header_text}' not found in blocklist()={len(blocklist)}")
+            logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist))
 
-    logger.debug("Returning blocklist for domain:", domain)
+    logger.debug("Returning blocklist for domain='%s' - EXIT!", domain)
     return {
         "reject"        : blocklist["Suspended servers"],
         "media_removal" : blocklist["Filtered media"],
         "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"],
     }
 
-def fetch_blocks(domain: str, origin: str, nodeinfo_url: str):
-    logger.debug(f"domain='{domain}',origin='{origin}',nodeinfo_url='{nodeinfo_url}' - CALLED!")
-    if not isinstance(domain, str):
-        raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
-    elif domain == "":
-        raise ValueError("Parameter 'domain' is empty")
-    elif domain.lower() != domain:
-        raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
-    elif not validators.domain(domain.split("/")[0]):
-        raise ValueError(f"domain='{domain}' is not a valid domain")
-    elif domain.endswith(".arpa"):
-        raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
-    elif domain.endswith(".tld"):
-        raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
-    elif not isinstance(origin, str) and origin is not None:
-        raise ValueError(f"Parameter origin[]='{type(origin)}' is not 'str'")
-    elif origin == "":
-        raise ValueError("Parameter 'origin' is empty")
-    elif not isinstance(nodeinfo_url, str):
-        raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not 'str'")
-    elif nodeinfo_url == "":
-        raise ValueError("Parameter 'nodeinfo_url' is empty")
-
-    # No CSRF by default, you don't have to add network.api_headers by yourself here
-    headers = tuple()
+def fetch_blocks(domain: str) -> list:
+    logger.debug("domain='%s' - CALLED!", domain)
+    domain_helper.raise_on(domain)
 
-    try:
-        logger.debug(f"Checking CSRF for domain='{domain}'")
-        headers = csrf.determine(domain, dict())
-    except network.exceptions as exception:
-        logger.warning(f"Exception '{type(exception)}' during checking CSRF (fetch_blocks,{__name__}) - EXIT!")
-        instances.set_last_error(domain, exception)
-        return
+    if not instances.is_registered(domain):
+        raise Exception(f"domain='{domain}' is not registered but function is invoked.")
 
-    try:
-        # json endpoint for newer mastodongs
-        found_blocks = list()
-        blocklist = list()
+    blocklist = list()
 
-        rows = {
-            "reject"        : [],
-            "media_removal" : [],
-            "followers_only": [],
-            "report_removal": [],
-        }
+    logger.debug("Invoking federation.fetch_blocks(%s) ...", domain)
+    rows = federation.fetch_blocks(domain)
 
-        logger.debug("Querying API domain_blocks:", domain)
-        data = network.get_json_api(
-            domain,
-            "/api/v1/instance/domain_blocks",
-            headers,
-            (config.get("connection_timeout"), config.get("read_timeout"))
-        )
-
-        logger.debug("data[]='%s'", type(data))
-        if "error_message" in data:
-            logger.debug(f"Was not able to fetch domain_blocks from domain='{domain}': status_code='{data['status_code']}',error_message='{data['error_message']}'")
-            instances.set_last_error(domain, data)
-            return
-        elif "json" in data and "error" in data["json"]:
-            logger.warning(f"JSON API returned error message: '{data['json']['error']}'")
-            instances.set_last_error(domain, data)
-            return
-        else:
-            # Getting blocklist
-            blocklist = data["json"]
+    logger.debug("rows[%s]()=%d", type(rows), len(rows))
+    if len(rows) == 0:
+        logger.debug("domain='%s' has returned zero rows, trying /about/more page ...", domain)
+        rows = fetch_blocks_from_about(domain)
 
-        if len(blocklist) > 0:
-            logger.info("Checking %d entries from domain='%s' ...", len(blocklist), domain)
-            for block in blocklist:
-                # Check type
-                logger.debug(f"block[]='{type(block)}'")
-                if not isinstance(block, dict):
-                    logger.debug(f"block[]='{type(block)}' is of type 'dict' - SKIPPED!")
-                    continue
-
-                # Map block -> entry
-                logger.debug(f"block[{type(block)}]='{block}'")
-                entry = {
-                    "domain": block["domain"],
-                    "hash"  : block["digest"],
-                    "reason": block["comment"] if "comment" in block else None
-                }
-
-                logger.debug("severity,domain,hash,comment:", block['severity'], block['domain'], block['digest'], block['comment'])
-                if block['severity'] == 'suspend':
-                    logger.debug(f"Adding entry='{entry}' with severity='{block['severity']}' ...")
-                    rows['reject'].append(entry)
-                elif block['severity'] == 'silence':
-                    logger.debug(f"Adding entry='{entry}' with severity='{block['severity']}' ...")
-                    rows['followers_only'].append(entry)
-                elif block['severity'] == 'reject_media':
-                    logger.debug(f"Adding entry='{entry}' with severity='{block['severity']}' ...")
-                    rows['media_removal'].append(entry)
-                elif block['severity'] == 'reject_reports':
-                    logger.debug(f"Adding entry='{entry}' with severity='{block['severity']}' ...")
-                    rows['report_removal'].append(entry)
-                else:
-                    logger.warning(f"Unknown severity='{block['severity']}', domain='{block['domain']}'")
-        else:
-            logger.debug(f"domain='{domain}' has returned zero rows, trying /about/more page ...")
-            rows = fetch_blocks_from_about(domain)
-
-        logger.info("Checking %d entries from domain='%s' ...", len(rows.items()), domain)
-        for block_level, blocklist in rows.items():
-            logger.debug("domain,block_level,blocklist():", domain, block_level, len(blocklist))
-            block_level = tidyup.domain(block_level)
-
-            logger.debug("AFTER-block_level:", block_level)
-            if block_level == "":
-                logger.warning("block_level is empty, domain:", domain)
+    logger.debug("rows[%s]()=%d", type(rows), len(rows))
+    if len(rows) > 0:
+        logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain)
+        for block in rows:
+            # Check type
+            logger.debug("block[]='%s'", type(block))
+            if not isinstance(block, dict):
+                logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block))
                 continue
-            elif block_level == "accept":
-                logger.debug(f"domain='{domain}' skipping block_level='accept'")
+            elif "domain" not in block:
+                logger.debug("block='%s'", block)
+                logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block))
+                continue
+            elif not domain_helper.is_wanted(block["domain"]):
+                logger.debug("block[domain]='%s' is not wanted - SKIPPED!", block["domain"])
+                continue
+            elif "severity" not in block:
+                logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block))
+                continue
+            elif block["severity"] in ["accept", "accepted"]:
+                logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"])
+                continue
+            elif "digest" in block and not validators.hashes.sha256(block["digest"]):
+                logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"])
                 continue
 
-            logger.debug(f"Checking {len(blocklist)} entries from domain='{domain}',block_level='{block_level}' ...")
-            for block in blocklist:
-                logger.debug(f"block[]='{type(block)}'")
-                blocked, blocked_hash, reason = block.values()
-                logger.debug(f"blocked='{blocked}',blocked_hash='{blocked_hash}',reason='{reason}':")
-                blocked = tidyup.domain(blocked)
-                reason  = tidyup.reason(reason) if reason is not None and reason != "" else None
-                logger.debug(f"blocked='{blocked}',reason='{reason}' - AFTER!")
-
-                if blocked == "":
-                    logger.warning("blocked is empty, domain='%s'", domain)
-                    continue
-                elif blacklist.is_blacklisted(blocked):
-                    logger.debug("blocked='%s' is blacklisted - SKIPPED!", blocked)
-                    continue
-                elif blocked.count("*") > 0:
-                    # Doing the hash search for instance names as well to tidy up DB
-                    row = instances.deobscure("*", blocked, blocked_hash)
-
-                    logger.debug("row[]='%s'", type(row))
-                    if row is None:
-                        logger.warning(f"Cannot deobsfucate blocked='{blocked}',blocked_hash='{blocked_hash}' - SKIPPED!")
-                        continue
-
-                    logger.debug("Updating domain: ", row[0])
-                    blocked      = row[0]
-                    origin       = row[1]
-                    nodeinfo_url = row[2]
-                elif blocked.count("?") > 0:
-                    # Doing the hash search for instance names as well to tidy up DB
-                    row = instances.deobscure("?", blocked, blocked_hash)
-
-                    logger.debug("row[]='%s'", type(row))
-                    if row is None:
-                        logger.warning(f"Cannot deobsfucate blocked='{blocked}',blocked_hash='{blocked_hash}' - SKIPPED!")
-                        continue
-
-                    logger.debug("Updating domain: ", row[0])
-                    blocked      = row[0]
-                    origin       = row[1]
-                    nodeinfo_url = row[2]
-
-                logger.debug("Looking up instance by domain:", blocked)
-                if not validators.domain(blocked):
-                    logger.warning(f"blocked='{blocked}' is not a valid domain name - SKIPPED!")
-                    continue
-                elif blocked.endswith(".arpa"):
-                    logger.warning(f"blocked='{blocked}' is a reversed .arpa domain and should not be used generally.")
-                    continue
-                elif blocked.endswith(".tld"):
-                    logger.warning(f"blocked='{blocked}' is a fake domain, please don't crawl them!")
-                    continue
-                elif blacklist.is_blacklisted(blocked):
-                    logger.debug("blocked='%s' is blacklisted - SKIPPED!", blocked)
-                    continue
-                elif not instances.is_registered(blocked):
-                    logger.debug(f"Domain blocked='{blocked}' wasn't found, adding ..., domain='{domain}',origin='{origin}',nodeinfo_url='{nodeinfo_url}'")
-                    instances.add(blocked, domain, inspect.currentframe().f_code.co_name, nodeinfo_url)
-
-                logger.debug("Looking up instance by domain:", blocked)
-                if not validators.domain(blocked):
-                    logger.warning(f"blocked='{blocked}' is not a valid domain name - SKIPPED!")
-                    continue
-                elif blocked.endswith(".arpa"):
-                    logger.warning(f"blocked='{blocked}' is a reversed .arpa domain and should not be used generally.")
-                    continue
-                elif blocked.endswith(".tld"):
-                    logger.warning(f"blocked='{blocked}' is a fake domain, please don't crawl them!")
-                    continue
-                elif blacklist.is_blacklisted(blocked):
-                    logger.debug("blocked='%s' is blacklisted - SKIPPED!", blocked)
-                    continue
-                elif not instances.is_registered(blocked):
-                    logger.debug("Hash wasn't found, adding:", blocked, domain)
-                    instances.add(blocked, domain, inspect.currentframe().f_code.co_name, nodeinfo_url)
-
-                if not blocks.is_instance_blocked(domain, blocked, block_level):
-                    logger.debug("Blocking:", domain, blocked, block_level)
-                    blocks.add_instance(domain, blocked, reason, block_level)
-
-                    if block_level == "reject":
-                        found_blocks.append({
-                            "blocked": blocked,
-                            "reason" : reason
-                        })
-                else:
-                    logger.debug(f"Updating block last seen and reason for domain='{domain}',blocked='{blocked}' ...")
-                    blocks.update_last_seen(domain, blocked, block_level)
-                    blocks.update_reason(reason, domain, blocked, block_level)
-
-        logger.debug("Committing changes ...")
-        fba.connection.commit()
-    except network.exceptions as exception:
-        logger.warning(f"domain='{domain}',exception[{type(exception)}]:'{str(exception)}'")
-        instances.set_last_error(domain, exception)
-
-    logger.debug("EXIT!")
+            reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None
+
+            logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s'", domain, block["domain"], reason, block["severity"])
+            blocklist.append({
+                "blocker"    : domain,
+                "blocked"    : block["domain"],
+                "hash"       : block["digest"] if "digest" in block else None,
+                "reason"     : reason,
+                "block_level": blocks.alias_block_level(block["severity"]),
+            })
+    else:
+        logger.debug("domain='%s' has no block list", domain)
+
+    logger.debug("blocklist()=%d - EXIT!", len(blocklist))
+    return blocklist