X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=fba%2Fhttp%2Ffederation.py;h=136a4ea3158b848c2af41e20b64fa4ca09b597cd;hb=c32f3df7a967ab56c13efde503e74b6fcf611f90;hp=e726ae5941ddf27074f047a99cc97bda7c17d03a;hpb=4d4dac367bd3d068f318f862a2560a0f02b4260a;p=fba.git diff --git a/fba/http/federation.py b/fba/http/federation.py index e726ae5..136a4ea 100644 --- a/fba/http/federation.py +++ b/fba/http/federation.py @@ -21,8 +21,7 @@ import bs4 import requests import validators -from fba import csrf - +from fba.helpers import blacklist from fba.helpers import config from fba.helpers import cookies from fba.helpers import domain as domain_helper @@ -30,9 +29,11 @@ from fba.helpers import software as software_helper from fba.helpers import tidyup from fba.helpers import version +from fba.http import csrf from fba.http import network from fba.http import nodeinfo +from fba.models import blocks from fba.models import instances from fba.networks import lemmy @@ -42,6 +43,12 @@ from fba.networks import peertube # Depth counter, being raised and lowered _DEPTH = 0 +# API paths +_api_paths = [ + "/api/v1/instance/peers", + "/api/v3/site", +] + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -50,21 +57,25 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s',_DEPTH=%d - CALLED!", domain, origin, software, command, path, _DEPTH) domain_helper.raise_on(domain) - if not isinstance(origin, str) and origin is not None: + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(origin, str) and origin is not None: raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'") elif not isinstance(command, str): raise ValueError(f"Parameter command[]='{type(command)}' is not of type 'str'") elif command == "": raise ValueError("Parameter 'command' is empty") - elif command in ["fetch_blocks", "fetch_cs", "fetch_bkali", "fetch_relays", "fetch_fedipact", "fetch_joinmobilizon", "fetch_joinmisskey", "fetch_joinfediverse"] and origin is None: + elif command in ["fetch_blocks", "fetch_cs", "fetch_bkali", "fetch_relays", "fetch_fedipact", "fetch_joinmobilizon", "fetch_joinmisskey", "fetch_joinfediverse", "fetch_relaylist"] and origin is None: raise ValueError(f"Parameter command='{command}' but origin is None, please fix invoking this function.") elif not isinstance(path, str) and path is not None: raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'") + elif path is not None and not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with a slash") elif _DEPTH > 0 and instances.is_recent(domain, "last_instance_fetch"): raise ValueError(f"domain='{domain}' has recently been fetched but function was invoked") elif software is None and not instances.is_recent(domain, "last_nodeinfo"): try: - logger.debug("Software for domain='%s' is not set, determining ...", domain) + logger.debug("Software for domain='%s',path='%s' is not set, determining ...", domain, path) software = determine_software(domain, path) except network.exceptions as exception: logger.warning("Exception '%s' during determining software type", type(exception)) @@ -84,51 +95,62 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("Adding new domain='%s',origin='%s',command='%s',path='%s',software='%s'", domain, origin, command, path, software) instances.add(domain, origin, command, path, software) + logger.debug("software='%s'", software) + if software_helper.is_relay(software): + logger.debug("software='%s' is a relay software - EXIT!", software) + _DEPTH = _DEPTH - 1 + return + logger.debug("Updating last_instance_fetch for domain='%s' ...", domain) instances.set_last_instance_fetch(domain) peerlist = list() logger.debug("software='%s'", software) if software is not None: - try: - logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin) - peerlist = fetch_peers(domain, software, origin) - except network.exceptions as exception: - logger.warning("Cannot fetch peers from domain='%s',software='%s': '%s'", domain, software, type(exception)) + logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin) + peerlist = fetch_peers(domain, software, origin) logger.debug("peerlist[]='%s'", type(peerlist)) if isinstance(peerlist, list): logger.debug("Invoking instances.set_total_peerlist(%s,%d) ...", domain, len(peerlist)) instances.set_total_peers(domain, peerlist) + logger.debug("Invoking cookies.clear(%s) ...", domain) + cookies.clear(domain) + logger.debug("peerlist[]='%s'", type(peerlist)) - if peerlist is None or len(peerlist) == 0: + if peerlist is None: logger.warning("Cannot fetch peers: domain='%s',software='%s'", domain, software) - if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) - - logger.debug("Invoking cookies.clear(%s) ...", domain) - cookies.clear(domain) + instances.update(domain) _DEPTH = _DEPTH - 1 logger.debug("EXIT!") return + elif len(peerlist) == 0: + logger.info("domain='%s' returned an empty peer list.", domain) + if instances.has_pending(domain): + logger.debug("Flushing updates for domain='%s' ...", domain) + instances.update(domain) + + _DEPTH = _DEPTH - 1 + logger.debug("domain='%s',software='%s' has an empty peer list returned - EXIT!", domain, software) + return logger.info("Checking %d instance(s) from domain='%s',software='%s',depth=%d ...", len(peerlist), domain, software, _DEPTH) for instance in peerlist: - logger.debug("instance='%s'", instance) + logger.debug("instance[%s]='%s'", type(instance), instance) if instance is None or instance == "": logger.debug("instance[%s]='%s' is either None or empty - SKIPPED!", type(instance), instance) continue logger.debug("instance='%s' - BEFORE!", instance) - instance = tidyup.domain(instance) + instance = tidyup.domain(instance) if isinstance(instance, str) and instance != "" else None logger.debug("instance='%s' - AFTER!", instance) - if instance == "": - logger.warning("Empty instance after tidyup.domain(), domain='%s'", domain) + if instance is None or instance == "": + logger.warning("instance='%s' is empty after tidyup.domain(), domain='%s'", instance, domain) continue elif ".." in instance: logger.warning("instance='%s' contains double-dot, removing ...", instance) @@ -151,7 +173,7 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("Checking if domain='%s' has pending updates ...", domain) if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) + instances.update(domain) logger.debug("instance='%s',origin='%s',_DEPTH=%d reached!", instance, origin, _DEPTH) if _DEPTH <= config.get("max_crawl_depth") and len(peerlist) >= config.get("min_peers_length"): @@ -161,13 +183,10 @@ def fetch_instances(domain: str, origin: str, software: str, command: str, path: logger.debug("Adding instance='%s',domain='%s',command='%s',_DEPTH=%d ...", instance, domain, command, _DEPTH) instances.add(instance, domain, command) - logger.debug("Invoking cookies.clear(%s) ...", domain) - cookies.clear(domain) - logger.debug("Checking if domain='%s' has pending updates ...", domain) if instances.has_pending(domain): logger.debug("Flushing updates for domain='%s' ...", domain) - instances.update_data(domain) + instances.update(domain) _DEPTH = _DEPTH - 1 logger.debug("EXIT!") @@ -176,8 +195,14 @@ def fetch_peers(domain: str, software: str, origin: str) -> list: logger.debug("domain='%s',software='%s',origin='%s' - CALLED!", domain, software, origin) domain_helper.raise_on(domain) - if not isinstance(software, str) and software is not None: + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(software, str) and software is not None: raise ValueError(f"Parameter software[]='{type(software)}' is not of type 'str'") + elif isinstance(software, str) and software == "": + raise ValueError("Parameter 'software' is empty") + elif software_helper.is_relay(software): + raise ValueError(f"domain='{domain}' is of software='{software}' and isn't supported here.") elif not isinstance(origin, str) and origin is not None: raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'") elif isinstance(origin, str) and origin == "": @@ -206,25 +231,20 @@ def fetch_peers(domain: str, software: str, origin: str) -> list: logger.debug("Returning empty list ... - EXIT!") return list() - paths = [ - "/api/v1/instance/peers", - "/api/v3/site", - ] - # Init peers variable peers = list() - logger.debug("Checking %d paths ...", len(paths)) - for path in paths: + logger.debug("Checking %d API paths ...", len(_api_paths)) + for path in _api_paths: logger.debug("Fetching path='%s' from domain='%s',software='%s' ...", path, domain, software) data = network.get_json_api( domain, path, - headers, - (config.get("connection_timeout"), config.get("read_timeout")) + headers=headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")) ) - logger.debug("data[]='%s'", type(data)) + logger.debug("data(%d)[]='%s'", len(data), type(data)) if "error_message" in data: logger.debug("Was not able to fetch peers from path='%s',domain='%s' ...", path, domain) instances.set_last_error(domain, data) @@ -250,25 +270,28 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: logger.debug("domain='%s',path='%s' - CALLED!", domain, path) domain_helper.raise_on(domain) - if not isinstance(path, str): + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(path, str): raise ValueError(f"path[]='{type(path)}' is not of type 'str'") elif path == "": raise ValueError("Parameter 'path' is empty") + elif not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with / but should") - logger.debug("domain='%s',path='%s' - CALLED!", domain, path) software = None logger.debug("Fetching path='%s' from domain='%s' ...", path, domain) response = network.fetch_response( domain, path, - network.web_headers, - (config.get("connection_timeout"), config.get("read_timeout")), + headers=network.web_headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")), allow_redirects=True ) logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) - if ((response.ok and response.status_code < 300) or response.status_code == 410) and response.text.find(" 0 and domain_helper.is_in_url(domain, response.url): + if ((response.ok and response.status_code == 200) or response.status_code == 410) and response.text.find(" 0 and domain_helper.is_in_url(domain, response.url): logger.debug("Parsing response.text()=%d Bytes ...", len(response.text)) doc = bs4.BeautifulSoup(response.text, "html.parser") @@ -279,15 +302,15 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: app_name = doc.find("meta", {"name" : "application-name"}) logger.debug("generator[]='%s',site_name[]='%s',platform[]='%s',app_name[]='%s'", type(generator), type(site_name), type(platform), type(app_name)) - if isinstance(platform, bs4.element.Tag) and isinstance(platform.get("content"), str): + if isinstance(platform, bs4.element.Tag) and isinstance(platform.get("content"), str) and platform.get("content") != "": logger.debug("Found property=og:platform, domain='%s'", domain) software = tidyup.domain(platform.get("content")) + logger.debug("software[%s]='%s' after tidyup.domain() ...", type(software), software) - logger.debug("software[%s]='%s'", type(software), software) if software is not None and software != "": logger.debug("domain='%s' has og:platform='%s' - Setting detection_mode=PLATFORM ...", domain, software) instances.set_detection_mode(domain, "PLATFORM") - elif isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str): + elif isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str) and generator.get("content") != "": logger.debug("Found generator meta tag: domain='%s'", domain) software = tidyup.domain(generator.get("content")) @@ -295,7 +318,7 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: if software is not None and software != "": logger.info("domain='%s' is generated by software='%s' - Setting detection_mode=GENERATOR ...", domain, software) instances.set_detection_mode(domain, "GENERATOR") - elif isinstance(app_name, bs4.element.Tag) and isinstance(app_name.get("content"), str): + elif isinstance(app_name, bs4.element.Tag) and isinstance(app_name.get("content"), str) and app_name.get("content") != "": logger.debug("Found property=og:app_name, domain='%s'", domain) software = tidyup.domain(app_name.get("content")) @@ -303,7 +326,7 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: if software is not None and software != "": logger.debug("domain='%s' has application-name='%s' - Setting detection_mode=app_name ...", domain, software) instances.set_detection_mode(domain, "APP_NAME") - elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str): + elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str) and site_name.get("content") != "": logger.debug("Found property=og:site_name, domain='%s'", domain) software = tidyup.domain(site_name.get("content")) @@ -315,11 +338,15 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: logger.warning("domain='%s' doesn't match response.url='%s', maybe redirect to other domain?", domain, response.url) components = urlparse(response.url) + domain2 = components.netloc.lower().split(":")[0] - logger.debug("components[]='%s'", type(components)) - if not instances.is_registered(components.netloc): + logger.debug("domain2='%s'", domain2) + if not domain_helper.is_wanted(domain2): + logger.debug("domain2='%s' is not wanted - EXIT!", domain2) + return None + elif not instances.is_registered(domain2): logger.info("components.netloc='%s' is not registered, adding ...", components.netloc) - fetch_instances(components.netloc, domain, None, "fetch_generator") + instances.add(domain2, domain, "redirect_target") message = f"Redirect from domain='{domain}' to response.url='{response.url}'" instances.set_last_error(domain, message) @@ -337,7 +364,7 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: logger.debug("software='%s' may contain a version number, domain='%s', removing it ...", software, domain) software = version.remove(software) - logger.debug("software[]='%s'", type(software)) + logger.debug("software[%s]='%s'", type(software), software) if isinstance(software, str) and "powered by " in software: logger.debug("software='%s' has 'powered by' in it", software) software = version.remove(software_helper.strip_powered_by(software)) @@ -351,22 +378,24 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str: logger.debug("software='%s' has ' see ' in it", software) software = software_helper.strip_until(software, " see ") - logger.debug("software='%s' - EXIT!", software) + logger.debug("software[%s]='%s' - EXIT!", type(software), software) return software def determine_software(domain: str, path: str = None) -> str: logger.debug("domain='%s',path='%s' - CALLED!", domain, path) domain_helper.raise_on(domain) - if not isinstance(path, str) and path is not None: + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(path, str) and path is not None: raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'") + elif path is not None and not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with a slash") - logger.debug("Determining software for domain='%s',path='%s'", domain, path) + logger.debug("Fetching nodeinfo from domain='%s',path='%s' ...", domain, path) + data = nodeinfo.fetch(domain, path) software = None - logger.debug("Fetching nodeinfo from domain='%s' ...", domain) - data = nodeinfo.fetch_nodeinfo(domain, path) - logger.debug("data[%s]='%s'", type(data), data) if "exception" in data: # Continue raising it @@ -418,7 +447,7 @@ def determine_software(domain: str, path: str = None) -> str: logger.debug("Generator for domain='%s' is: '%s'", domain, software) logger.debug("software[%s]='%s'", type(software), software) - if software is None: + if software is None or software == "": logger.debug("Returning None - EXIT!") return None @@ -440,7 +469,7 @@ def determine_software(domain: str, path: str = None) -> str: software = software.strip() - logger.debug("software='%s' - EXIT!", software) + logger.debug("software[%s]='%s' - EXIT!", type(software), software) return software def find_domains(tag: bs4.element.Tag) -> list: @@ -466,7 +495,7 @@ def find_domains(tag: bs4.element.Tag) -> list: logger.debug("domain='%s' is blacklisted - SKIPPED!", domain) continue elif domain == "gab.com/.ai, develop.gab.com": - logger.debug("Multiple domains detected in one row") + logger.debug("Multiple gab.com domains detected in one row") domains.append({ "domain": "gab.com", "reason": reason, @@ -500,7 +529,7 @@ def add_peers(rows: dict) -> list: peers = list() for key in ["linked", "allowed", "blocked"]: - logger.debug("Checking key='%s'", key) + logger.debug("key='%s'", key) if key not in rows or rows[key] is None: logger.debug("Cannot find key='%s' or it is NoneType - SKIPPED!", key) continue @@ -530,3 +559,101 @@ def add_peers(rows: dict) -> list: logger.debug("peers()=%d - EXIT!", len(peers)) return peers + +def fetch_blocks(domain: str) -> list: + logger.debug("domain='%s' - CALLED!", domain) + domain_helper.raise_on(domain) + + if not instances.is_registered(domain): + raise Exception(f"domain='{domain}' is not registered but function is invoked.") + elif blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + + # Init block list + blocklist = list() + + # No CSRF by default, you don't have to add network.api_headers by yourself here + headers = tuple() + + try: + logger.debug("Checking CSRF for domain='%s'", domain) + headers = csrf.determine(domain, dict()) + except network.exceptions as exception: + logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__) + instances.set_last_error(domain, exception) + + logger.debug("Returning empty list ... - EXIT!") + return list() + + try: + # json endpoint for newer mastodongs + logger.info("Fetching domain_blocks from domain='%s' ...", domain) + data = network.get_json_api( + domain, + "/api/v1/instance/domain_blocks", + headers=headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")) + ) + rows = list() + + logger.debug("data(%d)[]='%s'", len(data), type(data)) + if "error_message" in data: + logger.debug("Was not able to fetch domain_blocks from domain='%s': status_code=%d,error_message='%s'", domain, data['status_code'], data['error_message']) + instances.set_last_error(domain, data) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist + elif "json" in data and "error" in data["json"]: + logger.warning("JSON API returned error message: '%s'", data["json"]["error"]) + instances.set_last_error(domain, data) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist + else: + # Getting blocklist + rows = data["json"] + + logger.debug("Marking domain='%s' as successfully handled ...", domain) + instances.set_success(domain) + + logger.debug("rows(%d)[]='%s'", len(rows), type(rows)) + if len(rows) > 0: + logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain) + for block in rows: + # Check type + logger.debug("block[]='%s'", type(block)) + if not isinstance(block, dict): + logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block)) + continue + elif "domain" not in block: + logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block)) + continue + elif "severity" not in block: + logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block)) + continue + elif block["severity"] in ["accept", "accepted"]: + logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"]) + continue + elif "digest" in block and not validators.hashes.sha256(block["digest"]): + logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"]) + continue + + reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None + + logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s' ...", domain, block["domain"], reason, block["severity"]) + blocklist.append({ + "blocker" : domain, + "blocked" : block["domain"], + "digest" : block["digest"] if "digest" in block else None, + "reason" : reason, + "block_level": blocks.alias_block_level(block["severity"]), + }) + else: + logger.debug("domain='%s' has no block list", domain) + + except network.exceptions as exception: + logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception)) + instances.set_last_error(domain, exception) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist