import requests
import validators
-from fba import csrf
-
from fba.helpers import config
from fba.helpers import cookies
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
from fba.helpers import version
+from fba.http import csrf
from fba.http import network
from fba.http import nodeinfo
+from fba.models import blocks
from fba.models import instances
from fba.networks import lemmy
if instances.has_pending(domain):
logger.debug("Flushing updates for domain='%s' ...", domain)
- instances.update_data(domain)
+ instances.update(domain)
logger.debug("Invoking cookies.clear(%s) ...", domain)
cookies.clear(domain)
logger.debug("Checking if domain='%s' has pending updates ...", domain)
if instances.has_pending(domain):
logger.debug("Flushing updates for domain='%s' ...", domain)
- instances.update_data(domain)
+ instances.update(domain)
logger.debug("instance='%s',origin='%s',_DEPTH=%d reached!", instance, origin, _DEPTH)
if _DEPTH <= config.get("max_crawl_depth") and len(peerlist) >= config.get("min_peers_length"):
logger.debug("Checking if domain='%s' has pending updates ...", domain)
if instances.has_pending(domain):
logger.debug("Flushing updates for domain='%s' ...", domain)
- instances.update_data(domain)
+ instances.update(domain)
_DEPTH = _DEPTH - 1
logger.debug("EXIT!")
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if ((response.ok and response.status_code < 300) or response.status_code == 410) and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
+ if ((response.ok and response.status_code == 200) or response.status_code == 410) and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
logger.debug("software='%s' - EXIT!", software)
return software
-def determine_software(domain: str, path: str = None) -> str:
- logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
+def determine_software(domain: str, path: str = None, nodeinfo_url: str = None) -> str:
+ logger.debug("domain='%s',path='%s',nodeinfo_url='%s' - CALLED!", domain, path, nodeinfo_url)
domain_helper.raise_on(domain)
if not isinstance(path, str) and path is not None:
raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
+ elif not isinstance(nodeinfo_url, str) and nodeinfo_url is not None:
+ raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not of type 'str'")
- logger.debug("Determining software for domain='%s',path='%s'", domain, path)
+ logger.debug("Fetching nodeinfo from domain='%s',path='%s',nodeinfo_url='%s' ...", domain, path, nodeinfo_url)
+ data = nodeinfo.fetch(domain, path, nodeinfo_url)
software = None
- logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
- data = nodeinfo.fetch_nodeinfo(domain, path)
-
logger.debug("data[%s]='%s'", type(data), data)
if "exception" in data:
# Continue raising it
logger.debug("software='%s' has 'powered by' in it", software)
software = version.remove(software_helper.strip_powered_by(software))
+ software = software.strip()
+
logger.debug("software='%s' - EXIT!", software)
return software
logger.debug("peers()=%d - EXIT!", len(peers))
return peers
+
+def fetch_blocks(domain: str) -> list:
+ logger.debug("domain='%s' - CALLED!", domain)
+ domain_helper.raise_on(domain)
+
+ # Init block list
+ blocklist = list()
+
+ # No CSRF by default, you don't have to add network.api_headers by yourself here
+ headers = tuple()
+
+ try:
+ logger.debug("Checking CSRF for domain='%s'", domain)
+ headers = csrf.determine(domain, dict())
+ except network.exceptions as exception:
+ logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__)
+ instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
+ return list()
+
+ try:
+ # json endpoint for newer mastodongs
+ logger.debug("Querying API domain_blocks: domain='%s'", domain)
+ data = network.get_json_api(
+ domain,
+ "/api/v1/instance/domain_blocks",
+ headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+ rows = list()
+
+ logger.debug("data[]='%s'", type(data))
+ if "error_message" in data:
+ logger.debug("Was not able to fetch domain_blocks from domain='%s': status_code=%d,error_message='%s'", domain, data['status_code'], data['error_message'])
+ instances.set_last_error(domain, data)
+ return blocklist
+ elif "json" in data and "error" in data["json"]:
+ logger.warning("JSON API returned error message: '%s'", data["json"]["error"])
+ instances.set_last_error(domain, data)
+ return blocklist
+ else:
+ # Getting blocklist
+ rows = data["json"]
+
+ logger.debug("Marking domain='%s' as successfully handled ...", domain)
+ instances.set_success(domain)
+
+ logger.debug("rows[%s]()=%d", type(rows), len(rows))
+ if len(rows) > 0:
+ logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain)
+ for block in rows:
+ # Check type
+ logger.debug("block[]='%s'", type(block))
+ if not isinstance(block, dict):
+ logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block))
+ continue
+ elif "domain" not in block:
+ logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block))
+ continue
+ elif "severity" not in block:
+ logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block))
+ continue
+ elif block["severity"] in ["accept", "accepted"]:
+ logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"])
+ continue
+ elif "digest" in block and not validators.hashes.sha256(block["digest"]):
+ logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"])
+ continue
+
+ reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None
+
+ logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s'", domain, block["domain"], reason, block["severity"])
+ blocklist.append({
+ "blocker" : domain,
+ "blocked" : block["domain"],
+ "hash" : block["digest"] if "digest" in block else None,
+ "reason" : reason,
+ "block_level": blocks.alias_block_level(block["severity"]),
+ })
+ else:
+ logger.debug("domain='%s' has no block list", domain)
+
+ except network.exceptions as exception:
+ logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception))
+ instances.set_last_error(domain, exception)
+
+ logger.debug("blocklist()=%d - EXIT!", len(blocklist))
+ return blocklist