X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=fba%2Fnetworks%2Fmastodon.py;h=be22e94f9eba52f0d413ad2503733fc3e3623310;hb=7ce34f60c519ff20cc76b2030a91a0cefde3062b;hp=4d0edf31fe08967855d043bb14b2fba497f12411;hpb=6a214cdf6d9a4eb165209b8e2fe8aaab98dba2b1;p=fba.git diff --git a/fba/networks/mastodon.py b/fba/networks/mastodon.py index 4d0edf3..be22e94 100644 --- a/fba/networks/mastodon.py +++ b/fba/networks/mastodon.py @@ -14,15 +14,11 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import inspect import logging +import validators import bs4 -from fba import csrf -from fba import database -from fba import utils - from fba.helpers import blacklist from fba.helpers import config from fba.helpers import domain as domain_helper @@ -64,14 +60,19 @@ language_mapping = { } def fetch_blocks_from_about(domain: str) -> dict: - logger.debug("domain(%d)='%s' - CALLED!", len(domain), domain) + logger.debug("domain='%s' - CALLED!", domain) domain_helper.raise_on(domain) - logger.debug("Fetching mastodon blocks from domain='%s'", domain) + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function is invoked.") + elif not instances.is_registered(domain): + raise Exception(f"domain='{domain}' is not registered but function is invoked.") + + logger.info("Fetching mastodon blocks from domain='%s'", domain) doc = None for path in ["/about/more", "/about"]: try: - logger.debug(f"Fetching path='{path}' from domain='{domain}' ...") + logger.debug("Fetching path='%s' from domain='%s' ...", path, domain) doc = bs4.BeautifulSoup( network.fetch_response( domain, @@ -83,7 +84,7 @@ def fetch_blocks_from_about(domain: str) -> dict: ) if len(doc.find_all("h3")) > 0: - logger.debug(f"path='{path}' had some headlines - BREAK!") + logger.debug("path='%s' had some headlines - BREAK!", path) break except network.exceptions as exception: @@ -101,7 +102,7 @@ def fetch_blocks_from_about(domain: str) -> dict: logger.debug("doc[]='%s'", type(doc)) if doc is None: logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain) - return blocklist + return list() for header in doc.find_all("h3"): header_text = tidyup.reason(header.text) @@ -116,10 +117,24 @@ def fetch_blocks_from_about(domain: str) -> dict: if header_text in blocklist or header_text.lower() in blocklist: # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu for line in header.find_all_next("table")[0].find_all("tr")[1:]: + domain = line.find("span").text + digest = line.find("span")["title"][9:] + reason = line.find_all("td")[1].text + + logger.debug("domain='%s',reason='%s' - BEFORE!", domain, reason) + domain = tidyup.domain(domain) if domain != "" else None + reason = tidyup.reason(reason) if reason != "" else None + + logger.debug("domain='%s',reason='%s' - AFTER!", domain, reason) + if domain is None or domain == "": + logger.warning("domain='%s' is empty,line='%s' - SKIPPED!", domain, line) + continue + + logger.debug("Appending domain='%s',digest='%s',reason='%s' to blocklist header_text='%s' ...", domain, digest, reason, blocklist) blocklist[header_text].append({ - "domain": tidyup.domain(line.find("span").text), - "hash" : tidyup.domain(line.find("span")["title"][9:]), - "reason": tidyup.reason(line.find_all("td")[1].text), + "domain": domain, + "digest": digest, + "reason": reason, }) else: logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist)) @@ -131,191 +146,58 @@ def fetch_blocks_from_about(domain: str) -> dict: "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"], } -def fetch_blocks(domain: str, origin: str, nodeinfo_url: str): - logger.debug(f"domain='{domain}',origin='{origin}',nodeinfo_url='{nodeinfo_url}' - CALLED!") +def fetch_blocks(domain: str) -> list: + logger.debug("domain='%s' - CALLED!", domain) domain_helper.raise_on(domain) - if not isinstance(origin, str) and origin is not None: - raise ValueError(f"Parameter origin[]='{type(origin)}' is not 'str'") - elif origin == "": - raise ValueError("Parameter 'origin' is empty") - elif not isinstance(nodeinfo_url, str): - raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not 'str'") - elif nodeinfo_url == "": - raise ValueError("Parameter 'nodeinfo_url' is empty") - - # No CSRF by default, you don't have to add network.api_headers by yourself here - headers = tuple() - - try: - logger.debug("Checking CSRF for domain='%s'", domain) - headers = csrf.determine(domain, dict()) - except network.exceptions as exception: - logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s) - EXIT!", type(exception), __name__) - instances.set_last_error(domain, exception) - return - try: - # json endpoint for newer mastodongs - found_blocks = list() - blocklist = list() + if blacklist.is_blacklisted(domain): + raise Exception(f"domain='{domain}' is blacklisted but function is invoked.") + elif not instances.is_registered(domain): + raise Exception(f"domain='{domain}' is not registered but function is invoked.") - rows = { - "reject" : [], - "media_removal" : [], - "followers_only": [], - "report_removal": [], - } + blocklist = list() - logger.debug("Querying API domain_blocks: domain='%s'", domain) - data = network.get_json_api( - domain, - "/api/v1/instance/domain_blocks", - headers, - (config.get("connection_timeout"), config.get("read_timeout")) - ) - - logger.debug("data[]='%s'", type(data)) - if "error_message" in data: - logger.debug(f"Was not able to fetch domain_blocks from domain='{domain}': status_code='{data['status_code']}',error_message='{data['error_message']}'") - instances.set_last_error(domain, data) - return - elif "json" in data and "error" in data["json"]: - logger.warning("JSON API returned error message: '%s'", data['json']['error']) - instances.set_last_error(domain, data) - return - else: - # Getting blocklist - blocklist = data["json"] + logger.debug("Invoking fetch_blocks_from_about(%s) ...", domain) + rows = fetch_blocks_from_about(domain) - if len(blocklist) > 0: - logger.info("Checking %d entries from domain='%s' ...", len(blocklist), domain) - for block in blocklist: - # Check type - logger.debug("block[]='%s'", type(block)) - if not isinstance(block, dict): - logger.debug(f"block[]='{type(block)}' is of type 'dict' - SKIPPED!") - continue - - # Map block -> entry - logger.debug(f"block[{type(block)}]='{block}'") - entry = { - "domain": block["domain"], - "hash" : block["digest"], - "reason": block["comment"] if "comment" in block else None - } - - logger.debug("severity='%s',domain='%s',hash='%s',comment='%s'", block['severity'], block['domain'], block['digest'], block['comment']) - if block['severity'] == 'suspend': - logger.debug("Adding entry='%s' with severity='%s' ...", entry, block['severity']) - rows['reject'].append(entry) - elif block['severity'] == 'silence': - logger.debug("Adding entry='%s' with severity='%s' ...", entry, block['severity']) - rows['followers_only'].append(entry) - elif block['severity'] == 'reject_media': - logger.debug("Adding entry='%s' with severity='%s' ...", entry, block['severity']) - rows['media_removal'].append(entry) - elif block['severity'] == 'reject_reports': - logger.debug("Adding entry='%s' with severity='%s' ...", entry, block['severity']) - rows['report_removal'].append(entry) - else: - logger.warning("Unknown severity='%s', domain='%s'", block['severity'], block['domain']) - else: - logger.debug("domain='%s' has returned zero rows, trying /about/more page ...", domain) - rows = fetch_blocks_from_about(domain) - - logger.info("Checking %d entries from domain='%s' ...", len(rows.items()), domain) - for block_level, blocklist in rows.items(): - logger.debug("domain='%s',block_level='%s',blocklist()=%d", domain, block_level, len(blocklist)) - block_level = tidyup.domain(block_level) - - logger.debug("block_level='%s' - AFTER!", block_level) - if block_level == "": - logger.warning("block_level is empty, domain='%s'", domain) + logger.debug("rows[%s]()=%d", type(rows), len(rows)) + if len(rows) > 0: + logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain) + for block in rows: + # Check type + logger.debug("block[]='%s'", type(block)) + if not isinstance(block, dict): + logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block)) continue - elif block_level == "accept": - logger.debug("domain='%s' skipping block_level='accept'", domain) + elif "domain" not in block: + logger.debug("block='%s'", block) + logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block)) + continue + elif not domain_helper.is_wanted(block["domain"]): + logger.debug("block[domain]='%s' is not wanted - SKIPPED!", block["domain"]) + continue + elif "severity" not in block: + logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block)) + continue + elif block["severity"] in ["accept", "accepted"]: + logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"]) + continue + elif "digest" in block and not validators.hashes.sha256(block["digest"]): + logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"]) continue - logger.debug("Checking %s entries from domain='{domain}',block_level='{block_level}' ...", len(blocklist)) - for block in blocklist: - logger.debug("block[]='%s'", type(block)) - blocked, blocked_hash, reason = block.values() - - logger.debug("blocked='%s',blocked_hash='%s',reason='%s'", blocked, blocked_hash, reason) - blocked = tidyup.domain(blocked) - reason = tidyup.reason(reason) if reason is not None and reason != "" else None - logger.debug("blocked='%s',reason='%s' - AFTER!", blocked, reason) - - if blocked == "": - logger.warning("blocked is empty, domain='%s'", domain) - continue - elif blocked.count("*") > 0: - logger.debug("domain='%s' uses obfucated domains, marking ...", domain) - instances.set_has_obfucation(domain, True) - - # Doing the hash search for instance names as well to tidy up DB - row = instances.deobfucate("*", blocked, blocked_hash) - - logger.debug("row[]='%s'", type(row)) - if row is None: - logger.warning("Cannot deobfucate blocked='%s',blocked_hash='%s' - SKIPPED!", blocked, blocked_hash) - continue - - logger.debug("Updating domain: row[0]='%s'", row[0]) - blocked = row[0] - origin = row[1] - nodeinfo_url = row[2] - elif blocked.count("?") > 0: - logger.debug("domain='%s' uses obfucated domains, marking ...", domain) - instances.set_has_obfucation(domain, True) - - # Doing the hash search for instance names as well to tidy up DB - row = instances.deobfucate("?", blocked, blocked_hash) - - logger.debug("row[]='%s'", type(row)) - if row is None: - logger.warning("Cannot deobfucate blocked='%s',blocked_hash='%s' - SKIPPED!", blocked, blocked_hash) - continue - - logger.debug("Updating domain: row[0]='%s'", row[0]) - blocked = row[0] - origin = row[1] - nodeinfo_url = row[2] - - logger.debug("Looking up instance by domain: blocked='%s'", blocked) - if not utils.is_domain_wanted(blocked): - logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked) - continue - elif not instances.is_registered(blocked): - logger.debug(f"Domain blocked='%s' wasn't found, adding ..., domain='%s',origin='%s',nodeinfo_url='%s'", blocked, domain, origin, nodeinfo_url) - instances.add(blocked, domain, inspect.currentframe().f_code.co_name, nodeinfo_url) - - logger.debug("Looking up instance by domain: blocked='%s'", blocked) - if not utils.is_domain_wanted(blocked): - logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked) - continue - elif not instances.is_registered(blocked): - logger.debug("Hash wasn't found, adding: blocked='%s',domain='%s'", blocked, domain) - instances.add(blocked, domain, inspect.currentframe().f_code.co_name, nodeinfo_url) - - if not blocks.is_instance_blocked(domain, blocked, block_level): - logger.debug("Blocking domain='%s',blocked='%s',block_level='%s' ...", domain, blocked, block_level) - blocks.add_instance(domain, blocked, reason, block_level) - - if block_level == "reject": - found_blocks.append({ - "blocked": blocked, - "reason" : reason - }) - else: - logger.debug("Updating block last seen and reason for domain='%s',blocked='%s' ...", domain, blocked) - blocks.update_last_seen(domain, blocked, block_level) - blocks.update_reason(reason, domain, blocked, block_level) - - logger.debug("Invoking commit() ...") - database.connection.commit() - except network.exceptions as exception: - logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception)) - instances.set_last_error(domain, exception) - - logger.debug("EXIT!") + reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None + + logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s'", domain, block["domain"], reason, block["severity"]) + blocklist.append({ + "blocker" : domain, + "blocked" : block["domain"], + "digest" : block["digest"] if "digest" in block else None, + "reason" : reason, + "block_level": blocks.alias_block_level(block["severity"]), + }) + else: + logger.debug("domain='%s' has no block list", domain) + + logger.debug("blocklist()=%d - EXIT!", len(blocklist)) + return blocklist