X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=fba%2Fhttp%2Ffederation.py;h=f04fd3fe2f42deab6de2135d6b52f7aa4fcd7eec;hb=213d26eb30a0b472ce4b0cfaf95a2e96e6cbc28e;hp=49ee0e16075732f82c00a3d502b1bdd7b6117305;hpb=7f4d598905d28b697eff26c4d231b9b6cc61d787;p=fba.git diff --git a/fba/http/federation.py b/fba/http/federation.py index 49ee0e1..f04fd3f 100644 --- a/fba/http/federation.py +++ b/fba/http/federation.py @@ -18,128 +18,206 @@ import logging from urllib.parse import urlparse import bs4 +import requests import validators -from fba import csrf -from fba import utils - +from fba.helpers import blacklist from fba.helpers import config +from fba.helpers import cookies from fba.helpers import domain as domain_helper +from fba.helpers import software as software_helper from fba.helpers import tidyup from fba.helpers import version +from fba.http import csrf from fba.http import network +from fba.http import nodeinfo +from fba.models import blocks from fba.models import instances from fba.networks import lemmy from fba.networks import misskey from fba.networks import peertube -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) +# Depth counter, being raised and lowered +_DEPTH = 0 -# "rel" identifiers (no real URLs) -nodeinfo_identifier = [ - "https://nodeinfo.diaspora.software/ns/schema/2.1", - "https://nodeinfo.diaspora.software/ns/schema/2.0", - "https://nodeinfo.diaspora.software/ns/schema/1.1", - "https://nodeinfo.diaspora.software/ns/schema/1.0", - "http://nodeinfo.diaspora.software/ns/schema/2.1", - "http://nodeinfo.diaspora.software/ns/schema/2.0", - "http://nodeinfo.diaspora.software/ns/schema/1.1", - "http://nodeinfo.diaspora.software/ns/schema/1.0", +# API paths +_api_paths = [ + "/api/v1/instance/peers", + "/api/v3/site", ] +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + def fetch_instances(domain: str, origin: str, software: str, command: str, path: str = None): - logger.debug(f"domain='{domain}',origin='{origin}',software='{software}',path='{path}' - CALLED!") + global _DEPTH + logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s',_DEPTH=%d - CALLED!", domain, origin, software, command, path, _DEPTH) domain_helper.raise_on(domain) - if not isinstance(origin, str) and origin is not None: - raise ValueError(f"Parameter origin[]='{type(origin)}' is not 'str'") - elif software is None: - logger.debug(f"Updating last_instance_fetch for domain='{domain}' ...") - instances.set_last_instance_fetch(domain) - - logger.debug(f"software for domain='{domain}' is not set, determining ...") - software = None + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(origin, str) and origin is not None: + raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'") + elif not isinstance(command, str): + raise ValueError(f"Parameter command[]='{type(command)}' is not of type 'str'") + elif command == "": + raise ValueError("Parameter 'command' is empty") + elif command in ["fetch_blocks", "fetch_cs", "fetch_bkali", "fetch_relays", "fetch_fedipact", "fetch_joinmobilizon", "fetch_joinmisskey", "fetch_joinfediverse", "fetch_relaylist"] and origin is None: + raise ValueError(f"Parameter command='{command}' but origin is None, please fix invoking this function.") + elif not isinstance(path, str) and path is not None: + raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'") + elif path is not None and not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with a slash") + elif _DEPTH > 0 and instances.is_recent(domain, "last_instance_fetch"): + raise ValueError(f"domain='{domain}' has recently been fetched but function was invoked") + elif software is None and not instances.is_recent(domain, "last_nodeinfo"): try: + logger.debug("Software for domain='%s',path='%s' is not set, determining ...", domain, path) software = determine_software(domain, path) except network.exceptions as exception: logger.warning("Exception '%s' during determining software type", type(exception)) instances.set_last_error(domain, exception) - logger.debug(f"Determined software='{software}' for domain='{domain}'") + logger.debug("Determined software='%s' for domain='%s'", software, domain) + elif software is None: + logger.debug("domain='%s' has unknown software or nodeinfo has recently being fetched", domain) elif not isinstance(software, str): - raise ValueError(f"Parameter software[]='{type(software)}' is not 'str'") - elif not isinstance(command, str): - raise ValueError(f"Parameter command[]='{type(command)}' is not 'str'") - elif command == "": - raise ValueError("Parameter 'command' is empty") + raise ValueError(f"Parameter software[]='{type(software)}' is not of type 'str'") + # Increase depth + _DEPTH = _DEPTH + 1 + + logger.debug("Checking if domain='%s' is registered ...", domain) if not instances.is_registered(domain): - logger.debug(f"Adding new domain='{domain}',origin='{origin}',command='{command}',path='{path}',software='{software}'") + logger.debug("Adding new domain='%s',origin='%s',command='%s',path='%s',software='%s'", domain, origin, command, path, software) instances.add(domain, origin, command, path, software) - logger.debug(f"Updating last_instance_fetch for domain='{domain}' ...") + logger.debug("software='%s'", software) + if software_helper.is_relay(software): + logger.debug("software='%s' is a relay software - EXIT!", software) + _DEPTH = _DEPTH - 1 + return + + logger.debug("Updating last_instance_fetch for domain='%s' ...", domain) instances.set_last_instance_fetch(domain) - logger.debug("Fetching instances for domain='%s',software='%s'", domain, software) - peerlist = fetch_peers(domain, software) + peerlist = list() + logger.debug("software='%s'", software) + if software is not None: + logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin) + peerlist = fetch_peers(domain, software, origin) + logger.debug("peerlist[]='%s'", type(peerlist)) + if isinstance(peerlist, list): + logger.debug("Invoking instances.set_total_peerlist(%s,%d) ...", domain, len(peerlist)) + instances.set_total_peers(domain, peerlist) + + logger.debug("Invoking cookies.clear(%s) ...", domain) + cookies.clear(domain) + + logger.debug("peerlist[]='%s'", type(peerlist)) if peerlist is None: - logger.warning("Cannot fetch peers: domain='%s'", domain) + logger.warning("Cannot fetch peers: domain='%s',software='%s'", domain, software) + if instances.has_pending(domain): + logger.debug("Flushing updates for domain='%s' ...", domain) + instances.update(domain) + + _DEPTH = _DEPTH - 1 + logger.debug("EXIT!") + return + elif len(peerlist) == 0: + logger.info("domain='%s' returned an empty peer list.", domain) + if instances.has_pending(domain): + logger.debug("Flushing updates for domain='%s' ...", domain) + instances.update(domain) + + _DEPTH = _DEPTH - 1 + logger.debug("domain='%s',software='%s' has an empty peer list returned - EXIT!", domain, software) return - elif instances.has_pending(domain): - logger.debug(f"domain='{domain}' has pending nodeinfo data, flushing ...") - instances.update_data(domain) - logger.info("Checking %d instances from domain='%s' ...", len(peerlist), domain) + logger.info("Checking %d instance(s) from domain='%s',software='%s',depth=%d ...", len(peerlist), domain, software, _DEPTH) for instance in peerlist: - logger.debug(f"instance='{instance}'") - if instance is None: - # Skip "None" types as tidup.domain() cannot parse them + logger.debug("instance[%s]='%s'", type(instance), instance) + if instance in [None, ""]: + logger.debug("instance[%s]='%s' is either None or empty - SKIPPED!", type(instance), instance) continue - logger.debug(f"instance='{instance}' - BEFORE") - instance = tidyup.domain(instance) - logger.debug(f"instance='{instance}' - AFTER") + logger.debug("instance='%s' - BEFORE!", instance) + instance = tidyup.domain(instance) if isinstance(instance, str) and instance != "" else None + logger.debug("instance='%s' - AFTER!", instance) - if instance == "": - logger.warning("Empty instance after tidyup.domain(), domain='%s'", domain) + if instance in [None, ""]: + logger.warning("instance='%s' is empty after tidyup.domain(), domain='%s'", instance, domain) continue - elif not utils.is_domain_wanted(instance): + elif ".." in instance: + logger.warning("instance='%s' contains double-dot, removing ...", instance) + instance = instance.replace("..", ".") + + logger.debug("instance='%s' - BEFORE!", instance) + instance = instance.encode("idna").decode("utf-8") + logger.debug("instance='%s' - AFTER!", instance) + + if not domain_helper.is_wanted(instance): logger.debug("instance='%s' is not wanted - SKIPPED!", instance) continue - elif instance.find("/profile/") > 0 or instance.find("/users/") > 0: + elif instance.find("/profile/") > 0 or instance.find("/users/") > 0 or (instances.is_registered(instance.split("/")[0]) and instance.find("/c/") > 0): logger.debug("instance='%s' is a link to a single user profile - SKIPPED!", instance) continue + elif instance.find("/tag/") > 0: + logger.debug("instance='%s' is a link to a tag - SKIPPED!", instance) + continue elif not instances.is_registered(instance): - logger.debug("Adding new instance='%s',domain='%s',command='%s'", instance, domain, command) - instances.add(instance, domain, command) + logger.debug("Checking if domain='%s' has pending updates ...", domain) + if instances.has_pending(domain): + logger.debug("Flushing updates for domain='%s' ...", domain) + instances.update(domain) + + logger.debug("instance='%s',origin='%s',_DEPTH=%d reached!", instance, origin, _DEPTH) + if _DEPTH <= config.get("max_crawl_depth") and len(peerlist) >= config.get("min_peers_length"): + logger.debug("Fetching instance='%s',origin='%s',command='%s',path='%s',_DEPTH=%d ...", instance, domain, command, path, _DEPTH) + fetch_instances(instance, domain, None, command, path) + else: + logger.debug("Adding instance='%s',domain='%s',command='%s',_DEPTH=%d ...", instance, domain, command, _DEPTH) + instances.add(instance, domain, command) + + logger.debug("Checking if domain='%s' has pending updates ...", domain) + if instances.has_pending(domain): + logger.debug("Flushing updates for domain='%s' ...", domain) + instances.update(domain) + _DEPTH = _DEPTH - 1 logger.debug("EXIT!") -def fetch_peers(domain: str, software: str) -> list: - logger.debug(f"domain({len(domain)})='{domain}',software='{software}' - CALLED!") +def fetch_peers(domain: str, software: str, origin: str) -> list: + logger.debug("domain='%s',software='%s',origin='%s' - CALLED!", domain, software, origin) domain_helper.raise_on(domain) - if not isinstance(software, str) and software is not None: - raise ValueError(f"software[]='{type(software)}' is not 'str'") + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(software, str) and software is not None: + raise ValueError(f"Parameter software[]='{type(software)}' is not of type 'str'") + elif isinstance(software, str) and software == "": + raise ValueError("Parameter 'software' is empty") + elif software_helper.is_relay(software): + raise ValueError(f"domain='{domain}' is of software='{software}' and isn't supported here.") + elif not isinstance(origin, str) and origin is not None: + raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'") + elif isinstance(origin, str) and origin == "": + raise ValueError("Parameter 'origin' is empty") if software == "misskey": - logger.debug(f"Invoking misskey.fetch_peers({domain}) ...") + logger.debug("Invoking misskey.fetch_peers(%s) ...", domain) return misskey.fetch_peers(domain) elif software == "lemmy": - logger.debug(f"Invoking lemmy.fetch_peers({domain}) ...") - return lemmy.fetch_peers(domain) + logger.debug("Invoking lemmy.fetch_peers(%s,%s) ...", domain, origin) + return lemmy.fetch_peers(domain, origin) elif software == "peertube": - logger.debug(f"Invoking peertube.fetch_peers({domain}) ...") + logger.debug("Invoking peertube.fetch_peers(%s) ...", domain) return peertube.fetch_peers(domain) - # Init peers variable - peers = list() - # No CSRF by default, you don't have to add network.api_headers by yourself here headers = tuple() @@ -147,340 +225,236 @@ def fetch_peers(domain: str, software: str) -> list: logger.debug("Checking CSRF for domain='%s'", domain) headers = csrf.determine(domain, dict()) except network.exceptions as exception: - logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) + logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__) instances.set_last_error(domain, exception) - return peers - logger.debug(f"Fetching peers from '{domain}',software='{software}' ...") - data = network.get_json_api( - domain, - "/api/v1/instance/peers", - headers, - (config.get("connection_timeout"), config.get("read_timeout")) - ) + logger.debug("Returning empty list ... - EXIT!") + return list() - logger.debug("data[]='%s'", type(data)) - if "error_message" in data: - logger.debug("Was not able to fetch peers, trying alternative ...") + # Init peers variable + peers = list() + + logger.debug("Checking %d API paths ...", len(_api_paths)) + for path in _api_paths: + logger.debug("Fetching path='%s' from domain='%s',software='%s' ...", path, domain, software) data = network.get_json_api( domain, - "/api/v3/site", - headers, - (config.get("connection_timeout"), config.get("read_timeout")) + path, + headers=headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")) ) - logger.debug("data[]='%s'", type(data)) + logger.debug("data(%d)[]='%s'", len(data), type(data)) if "error_message" in data: - logger.warning("Could not reach any JSON API at domain='%s',status_code='%d',error_message='%s'", domain, data['status_code'], data['error_message']) - elif "federated_instances" in data["json"]: - logger.debug("Found federated_instances for domain='%s'", domain) - peers = peers + add_peers(data["json"]["federated_instances"]) - logger.debug("Added instance(s) to peers") - else: - message = "JSON response does not contain 'federated_instances' or 'error_message'" - logger.warning("message='%s',domain='%s'", message, domain) - instances.set_last_error(domain, message) - elif isinstance(data["json"], list): - logger.debug("Querying API was successful: domain='%s',data[json]()=%d", domain, len(data['json'])) - peers = data["json"] - else: - logger.warning("Cannot parse data[json][]='%s'", type(data['json'])) + logger.debug("Was not able to fetch peers from path='%s',domain='%s' ...", path, domain) + instances.set_last_error(domain, data) + elif "json" in data and len(data["json"]) > 0: + logger.debug("Querying API path='%s' was successful: domain='%s',data[json][%s]()=%d", path, domain, type(data['json']), len(data['json'])) + peers = data["json"] + + logger.debug("Marking domain='%s' as successfully handled ...", domain) + instances.set_success(domain) + break + + if not isinstance(peers, list): + logger.warning("peers[]='%s' is not of type 'list', maybe bad API response?", type(peers)) + peers = list() - logger.debug("Adding %d for domain='%s'", len(peers), domain) + logger.debug("Invoking instances.set_total_peers(%s,%d) ...", domain, len(peers)) instances.set_total_peers(domain, peers) logger.debug("peers()=%d - EXIT!", len(peers)) return peers -def fetch_nodeinfo(domain: str, path: str = None) -> dict: - logger.debug("domain='%s',path='%s' - CALLED!", domain, path) - domain_helper.raise_on(domain) - - if not isinstance(path, str) and path is not None: - raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'") - - logger.debug("Fetching nodeinfo from domain='%s' ...", domain) - nodeinfo = fetch_wellknown_nodeinfo(domain) - - logger.debug("nodeinfo[%s]({len(nodeinfo)}='%s'", type(nodeinfo), nodeinfo) - if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0: - logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json'])) - return nodeinfo["json"] - - # No CSRF by default, you don't have to add network.api_headers by yourself here - headers = tuple() - data = dict() - - try: - logger.debug("Checking CSRF for domain='%s'", domain) - headers = csrf.determine(domain, dict()) - except network.exceptions as exception: - logger.warning("Exception '%s' during checking CSRF (nodeinfo,%s) - EXIT!", type(exception), __name__) - instances.set_last_error(domain, exception) - return { - "status_code" : 500, - "error_message": f"exception[{type(exception)}]='{str(exception)}'", - "exception" : exception, - } - - request_paths = [ - "/nodeinfo/2.1.json", - "/nodeinfo/2.1", - "/nodeinfo/2.0.json", - "/nodeinfo/2.0", - "/nodeinfo/1.0", - "/api/v1/instance" - ] - - for request in request_paths: - logger.debug("path[%s]='%s',request='%s'", type(path), path, request) - if path is None or path == request or path == f"http://{domain}{path}" or path == f"https://{domain}{path}": - logger.debug("Fetching request='%s' from domain='%s' ...", request, domain) - if path in [f"http://{domain}{path}", f"https://{domain}{path}"]: - logger.debug("domain='%s',path='%s' has protocol in path, splitting ...", domain, path) - components = urlparse(path) - path = components.path - - data = network.get_json_api( - domain, - request, - headers, - (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")) - ) - - logger.debug("data[]='%s'", type(data)) - if "error_message" not in data: - logger.debug("Success: request='%s'", request) - instances.set_detection_mode(domain, "STATIC_CHECK") - instances.set_nodeinfo_url(domain, request) - break - - logger.warning("Failed fetching nodeinfo from domain='%s',status_code='%s',error_message='%s'", domain, data['status_code'], data['error_message']) - - logger.debug("data()=%d - EXIT!", len(data)) - return data - -def fetch_wellknown_nodeinfo(domain: str) -> dict: - logger.debug("domain(%d)='%s' - CALLED!", len(domain), domain) - domain_helper.raise_on(domain) - - # No CSRF by default, you don't have to add network.api_headers by yourself here - headers = tuple() - - try: - logger.debug("Checking CSRF for domain='%s'", domain) - headers = csrf.determine(domain, dict()) - except network.exceptions as exception: - logger.warning("Exception '%s' during checking CSRF (fetch_wellknown_nodeinfo,%s) - EXIT!", type(exception), __name__) - instances.set_last_error(domain, exception) - return { - "status_code" : 500, - "error_message": type(exception), - "exception" : exception, - } - - logger.debug("Fetching .well-known info for domain='%s'", domain) - data = network.get_json_api( - domain, - "/.well-known/nodeinfo", - headers, - (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")) - ) - - if "error_message" not in data: - nodeinfo = data["json"] - logger.debug("Found entries: nodeinfo()=%d,domain='%s'", len(nodeinfo), domain) - if "links" in nodeinfo: - logger.debug("Found nodeinfo[links]()=%d record(s)", len(nodeinfo["links"])) - for link in nodeinfo["links"]: - logger.debug("link[%s]='%s'", type(link), link) - if not isinstance(link, dict) or not "rel" in link: - logger.warning("link[]='%s' is not 'dict' or no element 'rel' found", type(link)) - elif link["rel"] in nodeinfo_identifier: - # Default is that 'href' has a complete URL, but some hosts don't send that - url = link["href"] - components = urlparse(link["href"]) - - logger.debug("components[%s]='%s'", type(components), components) - if components.scheme == "" and components.netloc == "": - logger.debug("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain) - url = f"https://{domain}{url}" - components = urlparse(url) - - if not utils.is_domain_wanted(components.netloc): - logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc) - continue - - logger.debug("Fetching nodeinfo from url='%s' ...", url) - data = network.fetch_api_url( - url, - (config.get("connection_timeout"), config.get("read_timeout")) - ) - - logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data)) - if "error_message" not in data and "json" in data: - logger.debug("Found JSON nodeinfo()=%d", len(data)) - instances.set_detection_mode(domain, "AUTO_DISCOVERY") - instances.set_nodeinfo_url(domain, link["href"]) - break - else: - instances.set_last_error(domain, data) - else: - logger.warning("Unknown 'rel' value: domain='%s',link[rel]='%s'", domain, link["rel"]) - else: - logger.warning("nodeinfo does not contain 'links': domain='%s'", domain) - - logger.debug("Returning data[]='%s' - EXIT!", type(data)) - return data - def fetch_generator_from_path(domain: str, path: str = "/") -> str: - logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path) + logger.debug("domain='%s',path='%s' - CALLED!", domain, path) domain_helper.raise_on(domain) - if not isinstance(path, str): - raise ValueError(f"path[]='{type(path)}' is not 'str'") + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(path, str): + raise ValueError(f"path[]='{type(path)}' is not of type 'str'") elif path == "": raise ValueError("Parameter 'path' is empty") + elif not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with / but should") - logger.debug("domain='%s',path='%s' - CALLED!", domain, path) software = None logger.debug("Fetching path='%s' from domain='%s' ...", path, domain) - response = network.fetch_response(domain, path, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) + response = network.fetch_response( + domain, + path, + headers=network.web_headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")), + allow_redirects=True + ) logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) - if response.ok and response.status_code < 300 and response.text.find(" 0: + if ((response.ok and response.status_code == 200) or response.status_code == 410) and response.text.find(" 0 and domain_helper.is_in_url(domain, response.url): logger.debug("Parsing response.text()=%d Bytes ...", len(response.text)) doc = bs4.BeautifulSoup(response.text, "html.parser") logger.debug("doc[]='%s'", type(doc)) + platform = doc.find("meta", {"property": "og:platform"}) generator = doc.find("meta", {"name" : "generator"}) site_name = doc.find("meta", {"property": "og:site_name"}) + app_name = doc.find("meta", {"name" : "application-name"}) + + logger.debug("generator[]='%s',site_name[]='%s',platform[]='%s',app_name[]='%s'", type(generator), type(site_name), type(platform), type(app_name)) + if isinstance(platform, bs4.element.Tag) and isinstance(platform.get("content"), str) and platform.get("content") != "": + logger.debug("Found property=og:platform, domain='%s'", domain) + software = tidyup.domain(platform.get("content")) + logger.debug("software[%s]='%s' after tidyup.domain() ...", type(software), software) - logger.debug("generator[]='%s',site_name[]='%s'", type(generator), type(site_name)) - if isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str): + if software is not None and software != "": + logger.debug("domain='%s' has og:platform='%s' - Setting detection_mode=PLATFORM ...", domain, software) + instances.set_detection_mode(domain, "PLATFORM") + elif isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str) and generator.get("content") != "": logger.debug("Found generator meta tag: domain='%s'", domain) software = tidyup.domain(generator.get("content")) logger.debug("software[%s]='%s'", type(software), software) if software is not None and software != "": - logger.info("domain='%s' is generated by '%s'", domain, software) + logger.info("domain='%s' is generated by software='%s' - Setting detection_mode=GENERATOR ...", domain, software) instances.set_detection_mode(domain, "GENERATOR") - elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str): + elif isinstance(app_name, bs4.element.Tag) and isinstance(app_name.get("content"), str) and app_name.get("content") != "": + logger.debug("Found property=og:app_name, domain='%s'", domain) + software = tidyup.domain(app_name.get("content")) + + logger.debug("software[%s]='%s'", type(software), software) + if software is not None and software != "": + logger.debug("domain='%s' has application-name='%s' - Setting detection_mode=app_name ...", domain, software) + instances.set_detection_mode(domain, "APP_NAME") + elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str) and site_name.get("content") != "": logger.debug("Found property=og:site_name, domain='%s'", domain) software = tidyup.domain(site_name.get("content")) logger.debug("software[%s]='%s'", type(software), software) if software is not None and software != "": - logger.info("domain='%s' has og:site_name='%s'", domain, software) + logger.debug("domain='%s' has og:site_name='%s' - Setting detection_mode=SITE_NAME ...", domain, software) instances.set_detection_mode(domain, "SITE_NAME") + elif not domain_helper.is_in_url(domain, response.url): + logger.warning("domain='%s' doesn't match response.url='%s', maybe redirect to other domain?", domain, response.url) + + components = urlparse(response.url) + domain2 = components.netloc.lower().split(":")[0] + + logger.debug("domain2='%s'", domain2) + if not domain_helper.is_wanted(domain2): + logger.debug("domain2='%s' is not wanted - EXIT!", domain2) + return None + elif not instances.is_registered(domain2): + logger.info("components.netloc='%s' is not registered, adding ...", components.netloc) + instances.add(domain2, domain, "redirect_target") + + message = f"Redirect from domain='{domain}' to response.url='{response.url}'" + instances.set_last_error(domain, message) + instances.set_software(domain, None) + instances.set_detection_mode(domain, None) + instances.set_nodeinfo_url(domain, None) + + raise requests.exceptions.TooManyRedirects(message) logger.debug("software[]='%s'", type(software)) if isinstance(software, str) and software == "": logger.debug("Corrected empty string to None for software of domain='%s'", domain) software = None elif isinstance(software, str) and ("." in software or " " in software): - logger.debug("software='%s' may contain a version number, domain='{domain}', removing it ...", software) + logger.debug("software='%s' may contain a version number, domain='%s', removing it ...", software, domain) software = version.remove(software) - logger.debug("software[]='%s'", type(software)) + logger.debug("software[%s]='%s'", type(software), software) if isinstance(software, str) and "powered by " in software: logger.debug("software='%s' has 'powered by' in it", software) - software = version.remove(version.strip_powered_by(software)) + software = version.remove(software_helper.strip_powered_by(software)) elif isinstance(software, str) and " hosted on " in software: logger.debug("software='%s' has 'hosted on' in it", software) - software = version.remove(version.strip_hosted_on(software)) + software = version.remove(software_helper.strip_hosted_on(software)) elif isinstance(software, str) and " by " in software: logger.debug("software='%s' has ' by ' in it", software) - software = version.strip_until(software, " by ") + software = software_helper.strip_until(software, " by ") elif isinstance(software, str) and " see " in software: logger.debug("software='%s' has ' see ' in it", software) - software = version.strip_until(software, " see ") + software = software_helper.strip_until(software, " see ") - logger.debug("software='%s' - EXIT!", software) + logger.debug("software[%s]='%s' - EXIT!", type(software), software) return software def determine_software(domain: str, path: str = None) -> str: - logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path) + logger.debug("domain='%s',path='%s' - CALLED!", domain, path) domain_helper.raise_on(domain) - if not isinstance(path, str) and path is not None: - raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'") + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + elif not isinstance(path, str) and path is not None: + raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'") + elif path is not None and not path.startswith("/"): + raise ValueError(f"path='{path}' does not start with a slash") - logger.debug("Determining software for domain='%s',path='%s'", domain, path) + logger.debug("Fetching nodeinfo from domain='%s',path='%s' ...", domain, path) + data = nodeinfo.fetch(domain, path) software = None - logger.debug("Fetching nodeinfo from domain='%s' ...", domain) - data = fetch_nodeinfo(domain, path) - - logger.debug("data[]='%s'", type(data)) + logger.debug("data[%s]='%s'", type(data), data) if "exception" in data: # Continue raising it + logger.debug("data()=%d contains exception='%s' - raising ...", len(data), type(data["exception"])) raise data["exception"] elif "error_message" in data: - logger.debug("Returned error_message during fetching nodeinfo: '%s',status_code='%d'", data['error_message'], data['status_code']) - return fetch_generator_from_path(domain) - elif "status" in data and data["status"] == "error" and "message" in data: - logger.warning("JSON response is an error: '%s'", data["message"]) + logger.debug("Returned error_message during fetching nodeinfo: '%s',status_code=%d", data['error_message'], data['status_code']) + software = fetch_generator_from_path(domain) + logger.debug("Generator for domain='%s' is: '%s'", domain, software) + elif "json" in data: + logger.debug("domain='%s',path='%s',data[json] found ...", domain, path) + data = data["json"] + else: + logger.debug("Auto-detection for domain='%s' was failing, fetching / ...", domain) + software = fetch_generator_from_path(domain) + logger.debug("Generator for domain='%s' is: '%s'", domain, software) + + if "status" in data and data["status"] == "error" and "message" in data: + logger.warning("JSON response is an error: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"]) instances.set_last_error(domain, data["message"]) - return fetch_generator_from_path(domain) + instances.set_detection_mode(domain, None) + instances.set_nodeinfo_url(domain, None) + software = fetch_generator_from_path(domain) + logger.debug("Generator for domain='%s' is: '%s'", domain, software) + elif "software" in data and "name" in data["software"]: + logger.debug("Found data[json][software][name] in JSON response") + software = data["software"]["name"] + logger.debug("software[%s]='%s' - FOUND!", type(software), software) elif "message" in data: - logger.warning("JSON response contains only a message: '%s'", data["message"]) + logger.warning("JSON response contains only a message: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"]) instances.set_last_error(domain, data["message"]) - return fetch_generator_from_path(domain) + instances.set_detection_mode(domain, None) + instances.set_nodeinfo_url(domain, None) + + logger.debug("Invoking fetch_generator_from_path(%s) ...", domain) + software = fetch_generator_from_path(domain) + logger.debug("Generator for domain='%s' is: '%s'", domain, software) + elif "server" in data and "software" in data["server"]: + logger.debug("Found data[server][software]='%s' for domain='%s'", data["server"]["software"].lower(), domain) + software = data["server"]["software"].lower() + logger.debug("Detected software for domain='%s' is: '%s'", domain, software) elif "software" not in data or "name" not in data["software"]: - logger.debug("JSON response from domain='%s' does not include [software][name], fetching / ...", domain) + logger.debug("JSON response from domain='%s' does not include [software][name] - Resetting detection_mode,nodeinfo_url ...", domain) + instances.set_detection_mode(domain, None) + instances.set_nodeinfo_url(domain, None) + + logger.debug("Invoking fetch_generator_from_path(%s) ...", domain) software = fetch_generator_from_path(domain) logger.debug("Generator for domain='%s' is: '%s'", domain, software) - elif "software" in data and "name" in data["software"]: - logger.debug("Found data[software][name] in JSON response") - software = data["software"]["name"] - if software is None: + logger.debug("software[%s]='%s'", type(software), software) + if software in [None, ""]: logger.debug("Returning None - EXIT!") return None logger.debug("software='%s'- BEFORE!", software) - software = tidyup.domain(software) - logger.debug("software='%s'- AFTER!", software) - - if software in ["akkoma", "rebased", "akkounfucked", "ched"]: - logger.debug("Setting pleroma: domain='%s',software='%s'", domain, software) - software = "pleroma" - elif software in ["hometown", "ecko"]: - logger.debug("Setting mastodon: domain='%s',software='%s'", domain, software) - software = "mastodon" - elif software in ["slipfox calckey", "calckey", "groundpolis", "foundkey", "cherrypick", "meisskey", "magnetar", "keybump"]: - logger.debug("Setting misskey: domain='%s',software='%s'", domain, software) - software = "misskey" - elif software == "runtube.re": - logger.debug("Setting peertube: domain='%s',software='%s'", domain, software) - software = "peertube" - elif software == "nextcloud social": - logger.debug("Setting nextcloud: domain='%s',software='%s'", domain, software) - software = "nextcloud" - elif software.find("/") > 0: - logger.warning("Spliting of slash: domain='%s',software='%s'", domain, software) - software = tidyup.domain(software.split("/")[-1]) - elif software.find("|") > 0: - logger.warning("Spliting of pipe: domain='%s',software='%s'", domain, software) - software = tidyup.domain(software.split("|")[0]) - elif "powered by" in software: - logger.debug("software='%s' has 'powered by' in it", software) - software = version.strip_powered_by(software) - elif isinstance(software, str) and " by " in software: - logger.debug("software='%s' has ' by ' in it", software) - software = version.strip_until(software, " by ") - elif isinstance(software, str) and " see " in software: - logger.debug("software='%s' has ' see ' in it", software) - software = version.strip_until(software, " see ") - - logger.debug("software['%s']='%s'", type(software), software) - if software == "": - logger.warning("tidyup.domain() left no software name behind: domain='%s'", domain) - software = None + software = software_helper.alias(software) + logger.debug("software['%s']='%s' - AFTER!", type(software), software) - logger.debug("software[]='%s'", type(software)) if str(software) == "": logger.debug("software for domain='%s' was not detected, trying generator ...", domain) software = fetch_generator_from_path(domain) @@ -491,9 +465,11 @@ def determine_software(domain: str, path: str = None) -> str: logger.debug("software[]='%s'", type(software)) if isinstance(software, str) and "powered by" in software: logger.debug("software='%s' has 'powered by' in it", software) - software = version.remove(version.strip_powered_by(software)) + software = version.remove(software_helper.strip_powered_by(software)) + + software = software.strip() - logger.debug("software='%s' - EXIT!", domain, software) + logger.debug("software[%s]='%s' - EXIT!", type(software), software) return software def find_domains(tag: bs4.element.Tag) -> list: @@ -515,11 +491,11 @@ def find_domains(tag: bs4.element.Tag) -> list: logger.debug("domain='%s',reason='%s'", domain, reason) - if not utils.is_domain_wanted(domain): + if not domain_helper.is_wanted(domain): logger.debug("domain='%s' is blacklisted - SKIPPED!", domain) continue elif domain == "gab.com/.ai, develop.gab.com": - logger.debug("Multiple domains detected in one row") + logger.debug("Multiple gab.com domains detected in one row") domains.append({ "domain": "gab.com", "reason": reason, @@ -549,11 +525,11 @@ def find_domains(tag: bs4.element.Tag) -> list: def add_peers(rows: dict) -> list: logger.debug("rows[]='%s' - CALLED!", type(rows)) if not isinstance(rows, dict): - raise ValueError(f"Parameter rows[]='{type(rows)}' is not 'dict'") + raise ValueError(f"Parameter rows[]='{type(rows)}' is not of type 'dict'") peers = list() for key in ["linked", "allowed", "blocked"]: - logger.debug("Checking key='%s'", key) + logger.debug("key='%s'", key) if key not in rows or rows[key] is None: logger.debug("Cannot find key='%s' or it is NoneType - SKIPPED!", key) continue @@ -561,11 +537,11 @@ def add_peers(rows: dict) -> list: logger.debug("Adding %d peer(s) to peers list ...", len(rows[key])) for peer in rows[key]: logger.debug("peer[%s]='%s' - BEFORE!", type(peer), peer) - if peer is None or peer == "": + if peer in [None, ""]: logger.debug("peer is empty - SKIPPED") continue elif isinstance(peer, dict) and "domain" in peer: - logger.debug("peer[domain]='%s'", peer['domain']) + logger.debug("peer[domain]='%s'", peer["domain"]) peer = tidyup.domain(peer["domain"]) elif isinstance(peer, str): logger.debug("peer='%s'", peer) @@ -574,12 +550,110 @@ def add_peers(rows: dict) -> list: raise ValueError(f"peer[]='{type(peer)}' is not supported,key='{key}'") logger.debug("peer[%s]='%s' - AFTER!", type(peer), peer) - if not utils.is_domain_wanted(peer): + if not domain_helper.is_wanted(peer): logger.debug("peer='%s' is not wanted - SKIPPED!", peer) continue - logger.debug("Adding peer='%s' ...", peer) + logger.debug("Appending peer='%s' ...", peer) peers.append(peer) logger.debug("peers()=%d - EXIT!", len(peers)) return peers + +def fetch_blocks(domain: str) -> list: + logger.debug("domain='%s' - CALLED!", domain) + domain_helper.raise_on(domain) + + if not instances.is_registered(domain): + raise Exception(f"domain='{domain}' is not registered but function is invoked.") + elif blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function was invoked") + + # Init block list + blocklist = list() + + # No CSRF by default, you don't have to add network.api_headers by yourself here + headers = tuple() + + try: + logger.debug("Checking CSRF for domain='%s'", domain) + headers = csrf.determine(domain, dict()) + except network.exceptions as exception: + logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__) + instances.set_last_error(domain, exception) + + logger.debug("Returning empty list ... - EXIT!") + return list() + + try: + # json endpoint for newer mastodongs + logger.info("Fetching domain_blocks from domain='%s' ...", domain) + data = network.get_json_api( + domain, + "/api/v1/instance/domain_blocks", + headers=headers, + timeout=(config.get("connection_timeout"), config.get("read_timeout")) + ) + rows = list() + + logger.debug("data(%d)[]='%s'", len(data), type(data)) + if "error_message" in data: + logger.debug("Was not able to fetch domain_blocks from domain='%s': status_code=%d,error_message='%s'", domain, data['status_code'], data['error_message']) + instances.set_last_error(domain, data) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist + elif "json" in data and "error" in data["json"]: + logger.warning("JSON API returned error message: '%s'", data["json"]["error"]) + instances.set_last_error(domain, data) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist + else: + # Getting blocklist + rows = data["json"] + + logger.debug("Marking domain='%s' as successfully handled ...", domain) + instances.set_success(domain) + + logger.debug("rows(%d)[]='%s'", len(rows), type(rows)) + if len(rows) > 0: + logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain) + for block in rows: + # Check type + logger.debug("block[]='%s'", type(block)) + if not isinstance(block, dict): + logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block)) + continue + elif "domain" not in block: + logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block)) + continue + elif "severity" not in block: + logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block)) + continue + elif block["severity"] in ["accept", "accepted"]: + logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"]) + continue + elif "digest" in block and not validators.hashes.sha256(block["digest"]): + logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"]) + continue + + reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None + + logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s' ...", domain, block["domain"], reason, block["severity"]) + blocklist.append({ + "blocker" : domain, + "blocked" : block["domain"], + "digest" : block["digest"] if "digest" in block else None, + "reason" : reason, + "block_level": blocks.alias_block_level(block["severity"]), + }) + else: + logger.debug("domain='%s' has no block list", domain) + + except network.exceptions as exception: + logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception)) + instances.set_last_error(domain, exception) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist