X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=fba%2Fnetworks%2Flemmy.py;h=7b427fc91ca05db6a85238be3015bb2b76b41294;hb=bd36d58fadcf377a7982103ca4d7e5c4376ef463;hp=e8bba35004d4b0bbb5dd711620778bc7aa3f6c63;hpb=349d937806201188473d6f8c26d256136401d517;p=fba.git diff --git a/fba/networks/lemmy.py b/fba/networks/lemmy.py index e8bba35..7b427fc 100644 --- a/fba/networks/lemmy.py +++ b/fba/networks/lemmy.py @@ -14,12 +14,12 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import json import logging import bs4 from fba import csrf -from fba import utils from fba.helpers import config from fba.helpers import domain as domain_helper @@ -32,9 +32,10 @@ from fba.models import instances logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +#logger.setLevel(logging.DEBUG) -def fetch_peers(domain: str) -> list: - logger.debug("domain='%s' - CALLED!", domain) +def fetch_peers(domain: str, origin: str) -> list: + logger.debug("domain='%s',origin='%s' - CALLED!", domain, origin) domain_helper.raise_on(domain) peers = list() @@ -46,8 +47,10 @@ def fetch_peers(domain: str) -> list: logger.debug("Checking CSRF for domain='%s'", domain) headers = csrf.determine(domain, dict()) except network.exceptions as exception: - logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) + logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__) instances.set_last_error(domain, exception) + + logger.debug("Returning empty list ... - EXIT!") return list() try: @@ -66,10 +69,13 @@ def fetch_peers(domain: str) -> list: elif "federated_instances" in data["json"] and isinstance(data["json"]["federated_instances"], dict): logger.debug("Found federated_instances for domain='%s'", domain) peers = peers + federation.add_peers(data["json"]["federated_instances"]) + + logger.debug("Marking domain='%s' as successfully handled ...", domain) instances.set_success(domain) - else: - logger.warning("JSON response does not contain 'federated_instances', domain='%s'", domain) - instances.set_last_error(domain, data) + + if len(peers) == 0: + logger.warning("Fetching instances for domain='%s' from /instances ...", domain) + peers = fetch_instances(domain, origin) except network.exceptions as exception: logger.warning("Exception during fetching JSON: domain='%s',exception[%s]:'%s'", domain, type(exception), str(exception)) @@ -83,7 +89,7 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list: domain_helper.raise_on(domain) if not isinstance(nodeinfo_url, str): - raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not 'str'") + raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not of type 'str'") elif nodeinfo_url == "": raise ValueError("Parameter 'nodeinfo_url' is empty") @@ -139,39 +145,75 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list: doc = bs4.BeautifulSoup(response.text, "html.parser") logger.debug("doc[]='%s'", type(doc)) - headers = doc.findAll("h5") found = None - logger.debug("Search in %d header(s) ...", len(headers)) - for header in headers: - logger.debug("header[]='%s'", type(header)) - content = header.contents[0] - - logger.debug("content[%s]='%s'", type(content), content) - if content is None: - logger.debug("domain='%s' has returned empty header='%s' - SKIPPED!", domain, header) - continue - elif content.lower() in translations: - logger.debug("Found header with blocked instances - BREAK!") - found = header + for criteria in [{"class": "home-instances container-lg"}, {"class": "container"}]: + logger.debug("criteria='%s'", criteria) + containers = doc.findAll("div", criteria) + + logger.debug("Checking %d containers ...", len(containers)) + for container in containers: + logger.debug("container[]='%s'", type(container)) + for header in container.find_all(["h2", "h3", "h4", "h5"]): + content = header + logger.debug("header[%s]='%s' - BEFORE!", type(header), header) + if header is not None: + content = str(header.contents[0]) + logger.debug("content[%s]='%s' - AFTER!", type(content), content) + + if content is None: + logger.debug("domain='%s' has returned empty header='%s' - SKIPPED!", domain, header) + continue + elif not isinstance(content, str): + logger.debug("content[]='%s' is not supported/wanted type 'str' - SKIPPED!", type(content)) + continue + elif content.lower() in translations: + logger.debug("Found header='%s' with blocked instances - BREAK(3) !", header) + found = header + break + + logger.debug("found[]='%s'", type(found)) + if found is not None: + logger.debug("Found header with blocked instances - BREAK(2) !") + break + + logger.debug("found[]='%s'", type(found)) + if found is not None: + logger.debug("Found header with blocked instances - BREAK(1) !") break logger.debug("found[]='%s'", type(found)) if found is None: - logger.debug("domain='%s' is not blocking any instances - EXIT!", domain) + logger.info("domain='%s' has no HTML blocklist, checking scripts ...", domain) + peers = parse_script(doc, "blocked") + + logger.debug("domain='%s' has %d peer(s).", domain, len(peers)) + for blocked in peers: + logger.debug("Appending blocker='%s',blocked='%s',block_level='reject' ...", domain, blocked) + blocklist.append({ + "blocker" : domain, + "blocked" : blocked, + "reason" : None, + "block_level": "reject", + }) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) return blocklist - blocking = found.find_next("ul").findAll("a") + blocking = found.find_next(["ul", "table"]).findAll("a") logger.debug("Found %d blocked instance(s) ...", len(blocking)) for tag in blocking: logger.debug("tag[]='%s'", type(tag)) blocked = tidyup.domain(tag.contents[0]) logger.debug("blocked='%s'", blocked) - if not utils.is_domain_wanted(blocked): + if blocked == "": + logger.warning("blocked='%s' is empty after tidyup.domain() - SKIPPED!", tag.contents[0]) + continue + elif not domain_helper.is_wanted(blocked): logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked) continue - logger.debug("Appending blocker='%s',blocked='%s',block_level='reject'", domain, blocked) + logger.debug("Appending blocker='%s',blocked='%s',block_level='reject' ...", domain, blocked) blocklist.append({ "blocker" : domain, "blocked" : blocked, @@ -185,3 +227,154 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list: logger.debug("blocklist()=%d - EXIT!", len(blocklist)) return blocklist + +def fetch_instances(domain: str, origin: str) -> list: + logger.debug("domain='%s',origin='%s' - CALLED!", domain, origin) + domain_helper.raise_on(domain) + + peers = list() + + try: + # json endpoint for newer mastodongs + logger.debug("Fetching /instances from domain='%s'", domain) + response = network.fetch_response( + domain, + "/instances", + network.web_headers, + (config.get("connection_timeout"), config.get("read_timeout")) + ) + + logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) + if response.ok and response.status_code < 300 and response.text != "": + logger.debug("Parsing %s Bytes ...", len(response.text)) + + doc = bs4.BeautifulSoup(response.text, "html.parser") + logger.debug("doc[]='%s'", type(doc)) + + for criteria in [{"class": "home-instances container-lg"}, {"class": "container"}]: + logger.debug("criteria='%s'", criteria) + containers = doc.findAll("div", criteria) + + logger.debug("Checking %d containers ...", len(containers)) + for header in containers: + logger.debug("header[%s]='%s'", type(header), header) + + rows = header.find_next(["ul","table"]).findAll("a") + logger.debug("Found %d instance(s) ...", len(rows)) + for tag in rows: + logger.debug("tag[]='%s'", type(tag)) + text = tag.contents[0] if isinstance(tag.contents[0], str) else tag.contents[0].text + peer = tidyup.domain(text) + logger.debug("peer='%s'", peer) + + if peer == "": + logger.debug("peer is empty - SKIPPED!") + continue + elif not domain_helper.is_wanted(peer): + logger.debug("peer='%s' is not wanted - SKIPPED!", peer) + continue + elif peer in peers: + logger.debug("peer='%s' already added - SKIPPED!", peer) + continue + + logger.debug("Appending peer='%s' ...", peer) + peers.append(peer) + + logger.debug("peers()=%d", len(peers)) + if len(peers) == 0: + logger.debug("Found no peers for domain='%s', trying script tag ...", domain) + peers = parse_script(doc) + + logger.debug("Marking domain='%s' as successfully handled, peers()=%d ...", domain, len(peers)) + instances.set_success(domain) + + except network.exceptions as exception: + logger.warning("domain='%s',exception[%s]:'%s'", domain, type(exception), str(exception)) + instances.set_last_error(domain, exception) + + logger.debug("peers()=%d - EXIT!", len(peers)) + return peers + +def parse_script(doc: bs4.BeautifulSoup, only: str = None) -> list: + logger.debug("doc[]='%s',only='%s' - CALLED!") + if not isinstance(doc, bs4.BeautifulSoup): + raise ValueError(f"Parameter doc[]='{type(only)}' is not of type 'bs4.BeautifulSoup'") + elif not isinstance(only, str) and only is not None: + raise ValueError(f"Parameter only[]='{type(only)}' is not of type 'str'") + elif isinstance(only, str) and only == "": + raise ValueError("Parameter 'only' is empty") + + scripts = doc.find_all("script") + peers = list() + + logger.debug("scripts()=%d", len(scripts)) + for script in scripts: + logger.debug("script[%s].contents()=%d", type(script), len(script.contents)) + if len(script.contents) == 0: + logger.debug("script has no contents - SKIPPED!") + continue + elif not script.contents[0].startswith("window.isoData"): + logger.debug("script.contents[0]='%s' does not start with window.isoData - SKIPPED!", script.contents[0]) + continue + + logger.debug("script.contents[0][]='%s'", type(script.contents[0])) + + iso_data = script.contents[0].split("=")[1].strip().replace(":undefined", ":\"undefined\"") + logger.debug("iso_data[%s]='%s'", type(iso_data), iso_data) + + parsed = None + try: + parsed = json.loads(iso_data) + except json.decoder.JSONDecodeError as exception: + logger.warning("Exception '%s' during parsing %d Bytes: '%s' - EXIT!", type(exception), len(iso_data), str(exception)) + return list() + + logger.debug("parsed[%s]()=%d", type(parsed), len(parsed)) + + if "routeData" not in parsed: + logger.warning("parsed[%s]()=%d does not contain element 'routeData'", type(parsed), len(parsed)) + continue + elif "federatedInstancesResponse" not in parsed["routeData"]: + logger.warning("parsed[routeData][%s]()=%d does not contain element 'federatedInstancesResponse'", type(parsed["routeData"]), len(parsed["routeData"])) + continue + elif "data" not in parsed["routeData"]["federatedInstancesResponse"]: + logger.warning("parsed[routeData][federatedInstancesResponse][%s]()=%d does not contain element 'data'", type(parsed["routeData"]["federatedInstancesResponse"]), len(parsed["routeData"]["federatedInstancesResponse"])) + continue + elif "federated_instances" not in parsed["routeData"]["federatedInstancesResponse"]["data"]: + logger.warning("parsed[routeData][federatedInstancesResponse][data][%s]()=%d does not contain element 'data'", type(parsed["routeData"]["federatedInstancesResponse"]["data"]), len(parsed["routeData"]["federatedInstancesResponse"]["data"])) + continue + + data = parsed["routeData"]["federatedInstancesResponse"]["data"]["federated_instances"] + logger.debug("Checking %d data elements ...", len(data)) + for element in data: + logger.debug("element='%s'", element) + if isinstance(only, str) and only != element: + logger.debug("Skipping unwanted element='%s',only='%s'", element, only) + continue + + logger.debug("Checking data[%s]()=%d row(s) ...", element, len(data[element])) + for row in data[element]: + logger.debug("row[]='%s'", type(row)) + if "domain" not in row: + logger.warning("row()=%d has no element 'domain' - SKIPPED!", len(row)) + continue + + logger.debug("row[domain]='%s' - BEFORE!", row["domain"]) + peer = tidyup.domain(row["domain"]) + logger.debug("peer='%s' - AFTER!", peer) + + if peer == "": + logger.debug("peer is empty - SKIPPED!") + continue + elif not domain_helper.is_wanted(peer): + logger.debug("peer='%s' is not wanted - SKIPPED!", peer) + continue + elif peer in peers: + logger.debug("peer='%s' already added - SKIPPED!", peer) + continue + + logger.debug("Appending peer='%s' ...", peer) + peers.append(peer) + + logger.debug("peers()=%d - EXIT!", len(peers)) + return peers