-from bs4 import BeautifulSoup
-from hashlib import sha256
-
-import reqto
+# Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
+# Copyright (C) 2023 Free Software Foundation
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+import bs4
+import hashlib
import re
-import sqlite3
+import reqto
import json
+import sqlite3
import sys
+import time
+import validators
with open("config.json") as f:
config = json.loads(f.read())
+# Don't check these, known trolls/flooders/testing/developing
blacklist = [
+ # Floods network with fake nodes as "research" project
"activitypub-troll.cf",
+ # Similar troll
"gab.best",
+ # Similar troll
"4chan.icu",
+ # Flooder (?)
"social.shrimpcam.pw",
+ # Flooder (?)
"mastotroll.netz.org",
+ # Testing/developing installations
"ngrok.io",
+ "ngrok-free.app",
+ "misskeytest.chn.moe",
+]
+
+# Array with pending errors needed to be written to database
+pending_errors = {
+}
+
+# "rel" identifiers (no real URLs)
+nodeinfo_identifier = [
+ "https://nodeinfo.diaspora.software/ns/schema/2.1",
+ "https://nodeinfo.diaspora.software/ns/schema/2.0",
+ "https://nodeinfo.diaspora.software/ns/schema/1.1",
+ "https://nodeinfo.diaspora.software/ns/schema/1.0",
+ "http://nodeinfo.diaspora.software/ns/schema/2.1",
+ "http://nodeinfo.diaspora.software/ns/schema/2.0",
+ "http://nodeinfo.diaspora.software/ns/schema/1.1",
+ "http://nodeinfo.diaspora.software/ns/schema/1.0",
]
+# HTTP headers for non-API requests
headers = {
- "user-agent": config["useragent"]
+ "User-Agent": config["useragent"],
+}
+# HTTP headers for API requests
+api_headers = {
+ "User-Agent": config["useragent"],
+ "Content-Type": "application/json",
}
-conn = sqlite3.connect("blocks.db")
-c = conn.cursor()
+# Found info from node, such as nodeinfo URL, detection mode that needs to be
+# written to database. Both arrays must be filled at the same time or else
+# update_nodeinfos() will fail
+nodeinfos = {
+ # Detection mode: 'AUTO_DISCOVERY', 'STATIC_CHECKS' or 'GENERATOR'
+ # NULL means all detection methods have failed (maybe still reachable instance)
+ "detection_mode": {},
+ # Found nodeinfo URL
+ "nodeinfo_url": {},
+}
-def get_hash(domain: str) -> str:
- # NOISY-DEBUG: print("DEBUG: Calculating hash for domain:", domain)
- return sha256(domain.encode("utf-8")).hexdigest()
+language_mapping = {
+ # English -> English
+ "Silenced instances" : "Silenced servers",
+ "Suspended instances" : "Suspended servers",
+ "Limited instances" : "Limited servers",
+ # Mappuing German -> English
+ "Gesperrte Server" : "Suspended servers",
+ "Gefilterte Medien" : "Filtered media",
+ "Stummgeschaltete Server" : "Silenced servers",
+ # Japanese -> English
+ "停止済みのサーバー" : "Suspended servers",
+ "制限中のサーバー" : "Limited servers",
+ "メディアを拒否しているサーバー": "Filtered media",
+ "サイレンス済みのサーバー" : "Silenced servers",
+ # ??? -> English
+ "שרתים מושעים" : "Suspended servers",
+ "מדיה מסוננת" : "Filtered media",
+ "שרתים מוגבלים" : "Silenced servers",
+ # French -> English
+ "Serveurs suspendus" : "Suspended servers",
+ "Médias filtrés" : "Filtered media",
+ "Serveurs limités" : "Limited servers",
+ "Serveurs modérés" : "Limited servers",
+}
+
+# URL for fetching peers
+get_peers_url = "/api/v1/instance/peers"
+
+# Cache for redundant SQL queries
+cache = {}
+
+# Connect to database
+connection = sqlite3.connect("blocks.db")
+cursor = connection.cursor()
+
+# Pattern instance for version numbers
+patterns = [
+ # semantic version number (with v|V) prefix)
+ re.compile("^(?P<version>v|V{0,1})(\.{0,1})(?P<major>0|[1-9]\d*)\.(?P<minor>0+|[1-9]\d*)(\.(?P<patch>0+|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?)?$"),
+ # non-sematic, e.g. 1.2.3.4
+ re.compile("^(?P<version>v|V{0,1})(\.{0,1})(?P<major>0|[1-9]\d*)\.(?P<minor>0+|[1-9]\d*)(\.(?P<patch>0+|[1-9]\d*)(\.(?P<subpatch>0|[1-9]\d*))?)$"),
+ # non-sematic, e.g. 2023-05[-dev]
+ re.compile("^(?P<year>[1-9]{1}[0-9]{3})\.(?P<month>[0-9]{2})(-dev){0,1}$"),
+ # non-semantic, e.g. abcdef0
+ re.compile("^[a-f0-9]{7}$"),
+]
+
+##### Cache #####
+
+def is_cache_initialized(key: str) -> bool:
+ return key in cache
+
+def set_all_cache_key(key: str, rows: list, value: any):
+ # DEBUG: print(f"DEBUG: key='{key}',rows()={len(rows)},value[]={type(value)} - CALLED!")
+ if not is_cache_initialized(key):
+ # DEBUG: print(f"DEBUG: Cache for key='{key}' not initialized.")
+ cache[key] = {}
+
+ for sub in rows:
+ # DEBUG: print(f"DEBUG: Setting key='{key}',sub[{type(sub)}]='{sub}'")
+
+ if isinstance(sub, tuple):
+ cache[key][sub[0]] = value
+ else:
+ print(f"WARNING: Unsupported type row[]='{type(row)}'")
+
+ # DEBUG: print("DEBUG: EXIT!")
+
+def set_cache_key(key: str, sub: str, value: any):
+ if not is_cache_initialized(key):
+ print(f"WARNING: Bad method call, key='{key}' is not initialized yet.")
+ raise Exception(f"Cache for key='{key}' is not initialized, but function called")
+
+ cache[key][sub] = value
+
+def is_cache_key_set(key: str, sub: str) -> bool:
+ if not is_cache_initialized(key):
+ print(f"WARNING: Bad method call, key='{key}' is not initialized yet.")
+ raise Exception(f"Cache for key='{key}' is not initialized, but function called")
+
+ return sub in cache[key]
+
+def add_peers(rows: dict) -> list:
+ # DEBUG: print(f"DEBUG: rows()={len(rows)} - CALLED!")
+ peers = list()
+ for element in ["linked", "allowed", "blocked"]:
+ # DEBUG: print(f"DEBUG: Checking element='{element}'")
+ if element in rows and rows[element] != None:
+ # DEBUG: print(f"DEBUG: Adding {len(rows[element])} peer(s) to peers list ...")
+ for peer in rows[element]:
+ # DEBUG: print(f"DEBUG: peer='{peer}' - BEFORE!")
+ peer = tidyup(peer)
+
+ # DEBUG: print(f"DEBUG: peer='{peer}' - AFTER!")
+ if is_blacklisted(peer):
+ # DEBUG: print(f"DEBUG: peer='{peer}' is blacklisted, skipped!")
+ continue
+
+ # DEBUG: print(f"DEBUG: Adding peer='{peer}' ...")
+ peers.append(peer)
+
+ # DEBUG: print(f"DEBUG: peers()={len(peers)} - EXIT!")
+ return peers
+
+def remove_version(software: str) -> str:
+ # DEBUG: print(f"DEBUG: software='{software}' - CALLED!")
+ if not "." in software and " " not in software:
+ print(f"WARNING: software='{software}' does not contain a version number.")
+ return software
+
+ temp = software
+ if ";" in software:
+ temp = software.split(";")[0]
+ elif "," in software:
+ temp = software.split(",")[0]
+ elif " - " in software:
+ temp = software.split(" - ")[0]
+
+ # DEBUG: print(f"DEBUG: software='{software}'")
+ version = None
+ if " " in software:
+ version = temp.split(" ")[-1]
+ elif "/" in software:
+ version = temp.split("/")[-1]
+ elif "-" in software:
+ version = temp.split("-")[-1]
+ else:
+ # DEBUG: print(f"DEBUG: Was not able to find common seperator, returning untouched software='{software}'")
+ return software
+
+ matches = None
+ match = None
+ # DEBUG: print(f"DEBUG: Checking {len(patterns)} patterns ...")
+ for pattern in patterns:
+ # Run match()
+ match = pattern.match(version)
+
+ # DEBUG: print(f"DEBUG: match[]={type(match)}")
+ if type(match) is re.Match:
+ break
+
+ # DEBUG: print(f"DEBUG: version[{type(version)}]='{version}',match='{match}'")
+ if type(match) is not re.Match:
+ print(f"WARNING: version='{version}' does not match regex, leaving software='{software}' untouched.")
+ return software
+
+ # DEBUG: print(f"DEBUG: Found valid version number: '{version}', removing it ...")
+ end = len(temp) - len(version) - 1
+
+ # DEBUG: print(f"DEBUG: end[{type(end)}]={end}")
+ software = temp[0:end].strip()
+ if " version" in software:
+ # DEBUG: print(f"DEBUG: software='{software}' contains word ' version'")
+ software = strip_until(software, " version")
+
+ # DEBUG: print(f"DEBUG: software='{software}' - EXIT!")
+ return software
-def get_peers(domain: str) -> str:
- # NOISY-DEBUG: print("DEBUG: Getting peers for domain:", domain)
- peers = None
+def strip_powered_by(software: str) -> str:
+ # DEBUG: print(f"DEBUG: software='{software}' - CALLED!")
+ if software == "":
+ print(f"ERROR: Bad method call, 'software' is empty")
+ raise Exception("Parameter 'software' is empty")
+ elif not "powered by" in software:
+ print(f"WARNING: Cannot find 'powered by' in '{software}'!")
+ return software
+ start = software.find("powered by ")
+ # DEBUG: print(f"DEBUG: start[{type(start)}]='{start}'")
+
+ software = software[start + 11:].strip()
+ # DEBUG: print(f"DEBUG: software='{software}'")
+
+ software = strip_until(software, " - ")
+
+ # DEBUG: print(f"DEBUG: software='{software}' - EXIT!")
+ return software
+
+def strip_until(software: str, until: str) -> str:
+ # DEBUG: print(f"DEBUG: software='{software}',until='{until}' - CALLED!")
+ if software == "":
+ print(f"ERROR: Bad method call, 'software' is empty")
+ raise Exception("Parameter 'software' is empty")
+ elif until == "":
+ print(f"ERROR: Bad method call, 'until' is empty")
+ raise Exception("Parameter 'until' is empty")
+ elif not until in software:
+ print(f"WARNING: Cannot find 'powered by' in '{software}'!")
+ return software
+
+ # Next, strip until part
+ end = software.find(until)
+
+ # DEBUG: print(f"DEBUG: end[{type(end)}]='{end}'")
+ if end > 0:
+ software = software[0:end].strip()
+
+ # DEBUG: print(f"DEBUG: software='{software}' - EXIT!")
+ return software
+
+def is_blacklisted(domain: str) -> bool:
+ blacklisted = False
+ for peer in blacklist:
+ if peer in domain:
+ blacklisted = True
+
+ return blacklisted
+
+def remove_pending_error(domain: str):
try:
- res = reqto.get(f"https://{domain}/api/v1/instance/peers", headers=headers, timeout=5)
- peers = res.json()
+ # Prevent updating any pending errors, nodeinfo was found
+ del pending_errors[domain]
+
except:
- print("WARNING: Cannot fetch peers:", domain)
+ pass
- # NOISY-DEBUG: print("DEBUG: Returning peers[]:", type(peers))
- return peers
+def get_hash(domain: str) -> str:
+ return hashlib.sha256(domain.encode("utf-8")).hexdigest()
+
+def update_last_blocked(domain: str):
+ # DEBUG: print("DEBUG: Updating last_blocked for domain", domain)
+ try:
+ cursor.execute("UPDATE instances SET last_blocked = ?, last_updated = ? WHERE domain = ? LIMIT 1", [
+ time.time(),
+ time.time(),
+ domain
+ ])
-def post_json_api(domain: str, path: str, data: str) -> list:
- # NOISY-DEBUG: print("DEBUG: Sending POST to domain,path,data:", domain, path, data)
- res = reqto.post(f"https://{domain}{path}", data=data, headers=headers, timeout=5)
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
- if not res.ok:
- print("WARNING: Cannot query JSON API:", domain, path, data, res.status_code)
- raise
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
- doc = res.json()
- # NOISY-DEBUG: print("DEBUG: Returning doc():", len(doc))
- return doc
+ # DEBUG: print("DEBUG: EXIT!")
+
+def has_pending_nodeinfos(domain: str) -> bool:
+ # DEBUG: print(f"DEBUG: domain='{domain}' - CALLED!")
+ has_pending = False
+ for key in nodeinfos:
+ # DEBUG: print(f"DEBUG: key='{key}',domain='{domain}',nodeinfos[key]='{nodeinfos[key]}'")
+ if domain in nodeinfos[key]:
+ has_pending = True
+ break
+
+ # DEBUG: print(f"DEBUG: has_pending='{has_pending}' - EXIT!")
+ return has_pending
+
+def update_nodeinfos(domain: str):
+ # DEBUG: print("DEBUG: Updating nodeinfo for domain:", domain)
+ sql_string = ''
+ fields = list()
+ for key in nodeinfos:
+ # DEBUG: print("DEBUG: key:", key)
+ if domain in nodeinfos[key]:
+ # DEBUG: print(f"DEBUG: Adding '{nodeinfos[key][domain]}' for key='{key}' ...")
+ fields.append(nodeinfos[key][domain])
+ sql_string += f" {key} = ?,"
+
+ fields.append(domain)
+ # DEBUG: print(f"DEBUG: sql_string='{sql_string}',fields()={len(fields)}")
+
+ sql = "UPDATE instances SET" + sql_string + " last_status_code = NULL, last_error_details = NULL WHERE domain = ? LIMIT 1"
+ # DEBUG: print("DEBUG: sql:", sql)
-def fetch_nodeinfo(domain: str) -> list:
- print("DEBUG: Fetching nodeinfo from domain:", domain)
- json = None
try:
- print("DEBUG: Fetching 2.1.json from domain:", domain)
- res = reqto.get(f"https://{domain}/nodeinfo/2.1.json", headers=headers, timeout=5)
+ # DEBUG: print("DEBUG: Executing SQL:", sql)
+ cursor.execute(sql, fields)
+ # DEBUG: print(f"DEBUG: Success! (rowcount={cursor.rowcount })")
- if res.status_code == 404 or "text/html" in res.headers["content-type"]:
- print("DEBUG: Fetching 2.0 from domain:", domain)
- res = reqto.get(f"https://{domain}/nodeinfo/2.0", headers=headers, timeout=5)
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
- if res.status_code == 404 or "text/html" in res.headers["content-type"]:
- print("DEBUG: Fetching 2.0.json from domain:", domain)
- res = reqto.get(f"https://{domain}/nodeinfo/2.0.json", headers=headers, timeout=5)
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',sql='{sql}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
- if res.ok and "text/html" in res.headers["content-type"]:
- print("DEBUG: Fetching 2.1 from domain:", domain)
- res = reqto.get(f"https://{domain}/nodeinfo/2.1", headers=headers, timeout=5)
+ # DEBUG: print("DEBUG: Deleting nodeinfos for domain:", domain)
+ for key in nodeinfos:
+ try:
+ # DEBUG: print("DEBUG: Deleting key:", key)
+ del nodeinfos[key][domain]
+ except:
+ pass
- if res.status_code == 404 or "text/html" in res.headers["content-type"]:
- print("DEBUG: Fetching /api/v1/instance from domain:", domain)
- res = reqto.get(f"https://{domain}/api/v1/instance", headers=headers, timeout=5)
+ # DEBUG: print("DEBUG: EXIT!")
- print("DEBUG: domain,res.ok,res.status_code:", domain, res.ok, res.status_code)
- if res.ok:
- json = res.json()
+def log_error(domain: str, res: any):
+ # DEBUG: print("DEBUG: domain,res[]:", domain, type(res))
+ try:
+ # DEBUG: print("DEBUG: BEFORE res[]:", type(res))
+ if isinstance(res, BaseException) or isinstance(res, json.JSONDecodeError):
+ res = str(res)
+
+ # DEBUG: print("DEBUG: AFTER res[]:", type(res))
+ if type(res) is str:
+ cursor.execute("INSERT INTO error_log (domain, error_code, error_message, created) VALUES (?, 999, ?, ?)",[
+ domain,
+ res,
+ time.time()
+ ])
+ else:
+ cursor.execute("INSERT INTO error_log (domain, error_code, error_message, created) VALUES (?, ?, ?, ?)",[
+ domain,
+ res.status_code,
+ res.reason,
+ time.time()
+ ])
+
+ # Cleanup old entries
+ # DEBUG: print(f"DEBUG: Purging old records (distance: {config['error_log_cleanup'])")
+ cursor.execute("DELETE FROM error_log WHERE created < ?", [time.time() - config["error_log_cleanup"]])
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
- except:
- print("WARNING: Failed fetching nodeinfo from domain:", domain)
+ # DEBUG: print("DEBUG: EXIT!")
+
+def update_last_error(domain: str, res: any):
+ # DEBUG: print("DEBUG: domain,res[]:", domain, type(res))
+ try:
+ # DEBUG: print("DEBUG: BEFORE res[]:", type(res))
+ if isinstance(res, BaseException) or isinstance(res, json.JSONDecodeError):
+ res = str(res)
+
+ # DEBUG: print("DEBUG: AFTER res[]:", type(res))
+ if type(res) is str:
+ # DEBUG: print(f"DEBUG: Setting last_error_details='{res}'");
+ cursor.execute("UPDATE instances SET last_status_code = 999, last_error_details = ?, last_updated = ? WHERE domain = ? LIMIT 1", [
+ res,
+ time.time(),
+ domain
+ ])
+ else:
+ # DEBUG: print(f"DEBUG: Setting last_error_details='{res.reason}'");
+ cursor.execute("UPDATE instances SET last_status_code = ?, last_error_details = ?, last_updated = ? WHERE domain = ? LIMIT 1", [
+ res.status_code,
+ res.reason,
+ time.time(),
+ domain
+ ])
+
+ if cursor.rowcount == 0:
+ # DEBUG: print("DEBUG: Did not update any rows:", domain)
+ pending_errors[domain] = res
+
+ log_error(domain, res)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
+
+ # DEBUG: print("DEBUG: EXIT!")
+
+def update_last_instance_fetch(domain: str):
+ #print("DEBUG: Updating last_instance_fetch for domain:", domain)
+ try:
+ cursor.execute("UPDATE instances SET last_instance_fetch = ?, last_updated = ? WHERE domain = ? LIMIT 1", [
+ time.time(),
+ time.time(),
+ domain
+ ])
+
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
+
+ connection.commit()
+ #print("DEBUG: EXIT!")
+
+def update_last_nodeinfo(domain: str):
+ # DEBUG: print("DEBUG: Updating last_nodeinfo for domain:", domain)
+ try:
+ cursor.execute("UPDATE instances SET last_nodeinfo = ?, last_updated = ? WHERE domain = ? LIMIT 1", [
+ time.time(),
+ time.time(),
+ domain
+ ])
+
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
+
+ connection.commit()
+ # DEBUG: print("DEBUG: EXIT!")
+
+def get_peers(domain: str, software: str) -> list:
+ # DEBUG: print(f"DEBUG: domain='{domain}',software='{software}' - CALLED!")
+ peers = list()
+
+ if software == "misskey":
+ # DEBUG: print(f"DEBUG: domain='{domain}' is misskey, sending API POST request ...")
+ offset = 0
+ step = config["misskey_offset"]
+
+ # iterating through all "suspended" (follow-only in its terminology)
+ # instances page-by-page, since that troonware doesn't support
+ # sending them all at once
+ while True:
+ # DEBUG: print(f"DEBUG: Fetching offset='{offset}' from '{domain}' ...")
+ if offset == 0:
+ fetched = post_json_api(domain, "/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "limit": step
+ }), {"Origin": domain})
+ else:
+ fetched = post_json_api(domain, "/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "limit" : step,
+ "offset": offset - 1
+ }), {"Origin": domain})
+
+ # DEBUG: print("DEBUG: fetched():", len(fetched))
+ if len(fetched) == 0:
+ # DEBUG: print("DEBUG: Returned zero bytes, exiting loop:", domain)
+ break
+ elif len(fetched) != config["misskey_offset"]:
+ # DEBUG: print(f"DEBUG: Fetched '{len(fetched)}' row(s) but expected: '{config['misskey_offset']}'")
+ offset = offset + (config["misskey_offset"] - len(fetched))
+ else:
+ # DEBUG: print("DEBUG: Raising offset by step:", step)
+ offset = offset + step
+
+ # Check records
+ # DEBUG: print(f"DEBUG: fetched({len(fetched)})[]={type(fetched)}")
+ if isinstance(fetched, dict) and "error" in fetched and "message" in fetched["error"]:
+ print(f"WARNING: post_json_api() returned error: {fetched['error']['message']}")
+ update_last_error(domain, fetched["error"]["message"])
+ break
+
+ for row in fetched:
+ # DEBUG: print(f"DEBUG: row():{len(row)}")
+ if not "host" in row:
+ print(f"WARNING: row()={len(row)} does not contain element 'host': {row},domain='{domain}'")
+ continue
+ elif is_blacklisted(row["host"]):
+ # DEBUG: print(f"DEBUG: row[host]='{row['host']}' is blacklisted. domain='{domain}'")
+ continue
+
+ # DEBUG: print(f"DEBUG: Adding peer: '{row['host']}'")
+ peers.append(row["host"])
+
+ #print(f"DEBUG: Updating last_instance_fetch for domain='{domain}' ...")
+ update_last_instance_fetch(domain)
+
+ # DEBUG: print("DEBUG: Returning peers[]:", type(peers))
+ return peers
+ elif software == "lemmy":
+ # DEBUG: print(f"DEBUG: domain='{domain}' is Lemmy, fetching JSON ...")
+ try:
+ res = reqto.get(f"https://{domain}/api/v3/site", headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code='{res.status_code}',data[]='{type(data)}'")
+ if not res.ok or res.status_code >= 400:
+ print("WARNING: Could not reach any JSON API:", domain)
+ update_last_error(domain, res)
+ elif res.ok and isinstance(data, list):
+ # DEBUG: print(f"DEBUG: domain='{domain}' returned a list: '{data}'")
+ sys.exit(255)
+ elif "federated_instances" in data:
+ # DEBUG: print(f"DEBUG: Found federated_instances for domain='{domain}'")
+ peers = peers + add_peers(data["federated_instances"])
+ # DEBUG: print("DEBUG: Added instance(s) to peers")
+ else:
+ print("WARNING: JSON response does not contain 'federated_instances':", domain)
+ update_last_error(domain, res)
+
+ except BaseException as e:
+ print(f"WARNING: Exception during fetching JSON: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+
+ #print(f"DEBUG: Updating last_instance_fetch for domain='{domain}' ...")
+ update_last_instance_fetch(domain)
+
+ # DEBUG: print("DEBUG: Returning peers[]:", type(peers))
+ return peers
+ elif software == "peertube":
+ # DEBUG: print(f"DEBUG: domain='{domain}' is a PeerTube, fetching JSON ...")
+
+ start = 0
+ for mode in ["followers", "following"]:
+ # DEBUG: print(f"DEBUG: domain='{domain}',mode='{mode}'")
+ while True:
+ try:
+ res = reqto.get(f"https://{domain}/api/v1/server/{mode}?start={start}&count=100", headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code='{res.status_code}',data[]='{type(data)}'")
+ if res.ok and isinstance(data, dict):
+ # DEBUG: print("DEBUG: Success, data:", len(data))
+ if "data" in data:
+ # DEBUG: print(f"DEBUG: Found {len(data['data'])} record(s).")
+ for record in data["data"]:
+ # DEBUG: print(f"DEBUG: record()={len(record)}")
+ if mode in record and "host" in record[mode]:
+ # DEBUG: print(f"DEBUG: Found host={record[mode]['host']}, adding ...")
+ peers.append(record[mode]["host"])
+ else:
+ print(f"WARNING: record from '{domain}' has no '{mode}' or 'host' record: {record}")
+
+ if len(data["data"]) < 100:
+ # DEBUG: print("DEBUG: Reached end of JSON response:", domain)
+ break
+
+ # Continue with next row
+ start = start + 100
+
+ except BaseException as e:
+ print(f"WARNING: Exception during fetching JSON: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+
+ #print(f"DEBUG: Updating last_instance_fetch for domain='{domain}' ...")
+ update_last_instance_fetch(domain)
+
+ # DEBUG: print("DEBUG: Returning peers[]:", type(peers))
+ return peers
+
+ # DEBUG: print(f"DEBUG: Fetching get_peers_url='{get_peers_url}' from '{domain}' ...")
+ try:
+ res = reqto.get(f"https://{domain}{get_peers_url}", headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code={res.status_code},data[]='{type(data)}'")
+ if not res.ok or res.status_code >= 400:
+ # DEBUG: print(f"DEBUG: Was not able to fetch '{get_peers_url}', trying alternative ...")
+ res = reqto.get(f"https://{domain}/api/v3/site", headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code={res.status_code},data[]='{type(data)}'")
+ if not res.ok or res.status_code >= 400:
+ print("WARNING: Could not reach any JSON API:", domain)
+ update_last_error(domain, res)
+ elif res.ok and isinstance(data, list):
+ print(f"DEBUG: domain='{domain}' returned a list: '{data}'")
+ sys.exit(255)
+ elif "federated_instances" in data:
+ # DEBUG: print(f"DEBUG: Found federated_instances for domain='{domain}'")
+ peers = peers + add_peers(data["federated_instances"])
+ # DEBUG: print("DEBUG: Added instance(s) to peers")
+ else:
+ print("WARNING: JSON response does not contain 'federated_instances':", domain)
+ update_last_error(domain, res)
+ else:
+ # DEBUG: print("DEBUG: Querying API was successful:", domain, len(data))
+ peers = data
+
+ except BaseException as e:
+ print("WARNING: Some error during get():", domain, e)
+ update_last_error(domain, e)
+
+ #print(f"DEBUG: Updating last_instance_fetch for domain='{domain}' ...")
+ update_last_instance_fetch(domain)
+
+ # DEBUG: print("DEBUG: Returning peers[]:", type(peers))
+ return peers
- print("DEBUG: Returning json():", len(json))
- return json
+def post_json_api(domain: str, path: str, parameter: str, extra_headers: dict = {}) -> dict:
+ # DEBUG: print("DEBUG: Sending POST to domain,path,parameter:", domain, path, parameter, extra_headers)
+ data = {}
+ try:
+ res = reqto.post(f"https://{domain}{path}", data=parameter, headers={**api_headers, **extra_headers}, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code={res.status_code},data[]='{type(data)}'")
+ if not res.ok or res.status_code >= 400:
+ print(f"WARNING: Cannot query JSON API: domain='{domain}',path='{path}',parameter()={len(parameter)},res.status_code='{res.status_code}',data[]='{type(data)}'")
+ update_last_error(domain, res)
+
+ except BaseException as e:
+ print(f"WARNING: Some error during post(): domain='{domain}',path='{path}',parameter()={len(parameter)},exception[{type(e)}]:'{str(e)}'")
+
+ # DEBUG: print(f"DEBUG: Returning data({len(data)})=[]:{type(data)}")
+ return data
+
+def fetch_nodeinfo(domain: str, path: str = None) -> list:
+ # DEBUG: print("DEBUG: Fetching nodeinfo from domain,path:", domain, path)
+
+ nodeinfo = fetch_wellknown_nodeinfo(domain)
+ # DEBUG: print("DEBUG: nodeinfo:", len(nodeinfo))
+
+ if len(nodeinfo) > 0:
+ # DEBUG: print("DEBUG: Returning auto-discovered nodeinfo:", len(nodeinfo))
+ return nodeinfo
+
+ requests = [
+ f"https://{domain}/nodeinfo/2.1.json",
+ f"https://{domain}/nodeinfo/2.1",
+ f"https://{domain}/nodeinfo/2.0.json",
+ f"https://{domain}/nodeinfo/2.0",
+ f"https://{domain}/nodeinfo/1.0",
+ f"https://{domain}/api/v1/instance"
+ ]
+
+ data = {}
+ for request in requests:
+ if path != None and path != "" and request != path:
+ print(f"DEBUG: path='{path}' does not match request='{request}' - SKIPPED!")
+ continue
+
+ try:
+ # DEBUG: print("DEBUG: Fetching request:", request)
+ res = reqto.get(request, headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print(f"DEBUG: res.ok={res.ok},res.status_code={res.status_code},data[]='{type(data)}'")
+ if res.ok and isinstance(data, dict):
+ # DEBUG: print("DEBUG: Success:", request)
+ nodeinfos["detection_mode"][domain] = "STATIC_CHECK"
+ nodeinfos["nodeinfo_url"][domain] = request
+ break
+ elif res.ok and isinstance(data, list):
+ # DEBUG: print(f"DEBUG: domain='{domain}' returned a list: '{data}'")
+ sys.exit(255)
+ elif not res.ok or res.status_code >= 400:
+ print("WARNING: Failed fetching nodeinfo from domain:", domain)
+ update_last_error(domain, res)
+ continue
+
+ except BaseException as e:
+ # DEBUG: print("DEBUG: Cannot fetch API request:", request)
+ update_last_error(domain, e)
+ pass
+
+ # DEBUG: print("DEBUG: Returning data[]:", type(data))
+ return data
+
+def fetch_wellknown_nodeinfo(domain: str) -> list:
+ # DEBUG: print("DEBUG: Fetching .well-known info for domain:", domain)
+ data = {}
+
+ try:
+ res = reqto.get(f"https://{domain}/.well-known/nodeinfo", headers=api_headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ data = res.json()
+ # DEBUG: print("DEBUG: domain,res.ok,data[]:", domain, res.ok, type(data))
+ if res.ok and isinstance(data, dict):
+ nodeinfo = data
+ # DEBUG: print("DEBUG: Found entries:", len(nodeinfo), domain)
+ if "links" in nodeinfo:
+ # DEBUG: print("DEBUG: Found links in nodeinfo():", len(nodeinfo["links"]))
+ for link in nodeinfo["links"]:
+ # DEBUG: print("DEBUG: rel,href:", link["rel"], link["href"])
+ if link["rel"] in nodeinfo_identifier:
+ # DEBUG: print("DEBUG: Fetching nodeinfo from:", link["href"])
+ res = reqto.get(link["href"])
+
+ data = res.json()
+ # DEBUG: print("DEBUG: href,res.ok,res.status_code:", link["href"], res.ok, res.status_code)
+ if res.ok and isinstance(data, dict):
+ # DEBUG: print("DEBUG: Found JSON nodeinfo():", len(data))
+ nodeinfos["detection_mode"][domain] = "AUTO_DISCOVERY"
+ nodeinfos["nodeinfo_url"][domain] = link["href"]
+ break
+ else:
+ print("WARNING: Unknown 'rel' value:", domain, link["rel"])
+ else:
+ print("WARNING: nodeinfo does not contain 'links':", domain)
+
+ except BaseException as e:
+ print("WARNING: Failed fetching .well-known info:", domain)
+ update_last_error(domain, e)
+ pass
+
+ # DEBUG: print("DEBUG: Returning data[]:", type(data))
+ return data
+
+def fetch_generator_from_path(domain: str, path: str = "/") -> str:
+ # DEBUG: print(f"DEBUG: domain='{domain}',path='{path}' - CALLED!")
+ software = None
+
+ try:
+ # DEBUG: print(f"DEBUG: Fetching path='{path}' from '{domain}' ...")
+ res = reqto.get(f"https://{domain}{path}", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"]))
+
+ # DEBUG: print("DEBUG: domain,res.ok,res.status_code,res.text[]:", domain, res.ok, res.status_code, type(res.text))
+ if res.ok and res.status_code < 300 and len(res.text) > 0:
+ # DEBUG: print("DEBUG: Search for <meta name='generator'>:", domain)
+ doc = bs4.BeautifulSoup(res.text, "html.parser")
+
+ # DEBUG: print("DEBUG: doc[]:", type(doc))
+ tag = doc.find("meta", {"name": "generator"})
+
+ # DEBUG: print(f"DEBUG: tag[{type(tag)}: {tag}")
+ if isinstance(tag, bs4.element.Tag):
+ # DEBUG: print("DEBUG: Found generator meta tag: ", domain)
+ software = tidyup(tag.get("content"))
+ print(f"INFO: domain='{domain}' is generated by '{software}'")
+ nodeinfos["detection_mode"][domain] = "GENERATOR"
+ remove_pending_error(domain)
+
+ except BaseException as e:
+ # DEBUG: print(f"DEBUG: Cannot fetch / from '{domain}':", e)
+ update_last_error(domain, e)
+ pass
+
+ # DEBUG: print(f"DEBUG: software[]={type(software)}")
+ if type(software) is str and software == "":
+ # DEBUG: print(f"DEBUG: Corrected empty string to None for software of domain='{domain}'")
+ software = None
+ elif type(software) is str and ("." in software or " " in software):
+ # DEBUG: print(f"DEBUG: software='{software}' may contain a version number, domain='{domain}', removing it ...")
+ software = remove_version(software)
+
+ # DEBUG: print(f"DEBUG: software[]={type(software)}")
+ if type(software) is str and "powered by" in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has 'powered by' in it")
+ software = remove_version(strip_powered_by(software))
+ elif type(software) is str and " by " in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has ' by ' in it")
+ software = strip_until(software, " by ")
+ elif type(software) is str and " see " in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has ' see ' in it")
+ software = strip_until(software, " see ")
+
+ # DEBUG: print(f"DEBUG: software='{software}' - EXIT!")
+ return software
-def determine_software(domain: str) -> str:
- # NOISY-DEBUG: print("DEBUG: Determining software for domain:", domain)
+def determine_software(domain: str, path: str = None) -> str:
+ # DEBUG: print("DEBUG: Determining software for domain,path:", domain, path)
software = None
- json = fetch_nodeinfo(domain)
- # NOISY-DEBUG: print("DEBUG: json():", len(json))
- if json["software"]["name"] in ["akkoma", "rebased"]:
+ # DEBUG: print(f"DEBUG: Fetching nodeinfo from '{domain}' ...")
+ data = fetch_nodeinfo(domain, path)
+
+ # DEBUG: print("DEBUG: data[]:", type(data))
+ if not isinstance(data, dict) or len(data) == 0:
+ # DEBUG: print("DEBUG: Could not determine software type:", domain)
+ return fetch_generator_from_path(domain)
+
+ # DEBUG: print("DEBUG: data():", len(data), data)
+ if "status" in data and data["status"] == "error" and "message" in data:
+ print("WARNING: JSON response is an error:", data["message"])
+ update_last_error(domain, data["message"])
+ return fetch_generator_from_path(domain)
+ elif "software" not in data or "name" not in data["software"]:
+ # DEBUG: print(f"DEBUG: JSON response from {domain} does not include [software][name], fetching / ...")
+ software = fetch_generator_from_path(domain)
+
+ # DEBUG: print(f"DEBUG: Generator for domain='{domain}' is: {software}, EXIT!")
+ return software
+
+ software = tidyup(data["software"]["name"])
+
+ # DEBUG: print("DEBUG: sofware after tidyup():", software)
+ if software in ["akkoma", "rebased"]:
+ # DEBUG: print("DEBUG: Setting pleroma:", domain, software)
software = "pleroma"
- elif json["software"]["name"] in ["hometown", "ecko"]:
+ elif software in ["hometown", "ecko"]:
+ # DEBUG: print("DEBUG: Setting mastodon:", domain, software)
software = "mastodon"
- elif json["software"]["name"] in ["calckey", "groundpolis", "foundkey", "cherrypick"]:
+ elif software in ["calckey", "groundpolis", "foundkey", "cherrypick", "meisskey"]:
+ # DEBUG: print("DEBUG: Setting misskey:", domain, software)
software = "misskey"
- else:
- software = json["software"]["name"]
-
- # NOISY-DEBUG: print("DEBUG: Returning domain,software:", domain, software)
+ elif software.find("/") > 0:
+ print("WARNING: Spliting of slash:", software)
+ software = software.split("/")[-1];
+ elif software.find("|") > 0:
+ print("WARNING: Spliting of pipe:", software)
+ software = tidyup(software.split("|")[0]);
+ elif "powered by" in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has 'powered by' in it")
+ software = strip_powered_by(software)
+ elif type(software) is str and " by " in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has ' by ' in it")
+ software = strip_until(software, " by ")
+ elif type(software) is str and " see " in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has ' see ' in it")
+ software = strip_until(software, " see ")
+
+ # DEBUG: print(f"DEBUG: software[]={type(software)}")
+ if software == "":
+ print("WARNING: tidyup() left no software name behind:", domain)
+ software = None
+
+ # DEBUG: print(f"DEBUG: software[]={type(software)}")
+ if str(software) == "":
+ # DEBUG: print(f"DEBUG: software for '{domain}' was not detected, trying generator ...")
+ software = fetch_generator_from_path(domain)
+ elif len(str(software)) > 0 and ("." in software or " " in software):
+ # DEBUG: print(f"DEBUG: software='{software}' may contain a version number, domain='{domain}', removing it ...")
+ software = remove_version(software)
+
+ # DEBUG: print(f"DEBUG: software[]={type(software)}")
+ if type(software) is str and "powered by" in software:
+ # DEBUG: print(f"DEBUG: software='{software}' has 'powered by' in it")
+ software = remove_version(strip_powered_by(software))
+
+ # DEBUG: print("DEBUG: Returning domain,software:", domain, software)
return software
def update_block_reason(reason: str, blocker: str, blocked: str, block_level: str):
- # NOISY: print("--- Updating block reason:", reason, blocker, blocked, block_level)
+ # DEBUG: print("DEBUG: Updating block reason:", reason, blocker, blocked, block_level)
try:
- c.execute(
- "UPDATE blocks SET reason = ? WHERE blocker = ? AND blocked = ? AND block_level = ? AND reason = ''",
+ cursor.execute(
+ "UPDATE blocks SET reason = ?, last_seen = ? WHERE blocker = ? AND blocked = ? AND block_level = ? AND reason = ''",
(
reason,
+ time.time(),
blocker,
blocked,
block_level
),
)
- except:
- print("ERROR: failed SQL query")
+ # DEBUG: print(f"DEBUG: cursor.rowcount={cursor.rowcount}")
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: reason='{reason}',blocker='{blocker}',blocked='{blocked}',block_level='{block_level}',sql='{sql}',exception[{type(e)}]:'{str(e)}'")
sys.exit(255)
-def update_last_seen(last_seen: int, blocker: str, blocked: str, block_level: str):
- # NOISY: print("--- Updating last_seen:", last_seen, blocker, blocked, block_level)
+ # DEBUG: print("DEBUG: EXIT!")
+
+def update_last_seen(blocker: str, blocked: str, block_level: str):
+ # DEBUG: print("DEBUG: Updating last_seen for:", blocker, blocked, block_level)
try:
- c.execute(
+ cursor.execute(
"UPDATE blocks SET last_seen = ? WHERE blocker = ? AND blocked = ? AND block_level = ?",
(
- last_seen,
+ time.time(),
blocker,
blocked,
block_level
)
)
- except:
- print("ERROR: failed SQL query")
+ if cursor.rowcount == 0:
+ print("WARNING: Did not update any rows:", domain)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: last_seen='{last_seen}',blocker='{blocker}',blocked='{blocked}',block_level='{block_level}',exception[{type(e)}]:'{str(e)}'")
sys.exit(255)
-def block_instance(blocker: str, blocked: str, reason: str, block_level: str, first_added: int, last_seen: int):
- if blocker.find("@") > 0:
+ # DEBUG: print("DEBUG: EXIT!")
+
+def block_instance(blocker: str, blocked: str, reason: str, block_level: str):
+ # DEBUG: print("DEBUG: blocker,blocked,reason,block_level:", blocker, blocked, reason, block_level)
+ if not validators.domain(blocker.split("/")[0]):
print("WARNING: Bad blocker:", blocker)
raise
- elif blocked.find("@") > 0:
+ elif not validators.domain(blocked.split("/")[0]):
print("WARNING: Bad blocked:", blocked)
raise
- print("--- New block:", blocker, blocked, reason, block_level, first_added, last_seen)
+ print("INFO: New block:", blocker, blocked, reason, block_level, first_seen, last_seen)
try:
- c.execute(
- "INSERT INTO blocks SELECT ?, ?, ?, ?, ?, ?",
+ cursor.execute(
+ "INSERT INTO blocks (blocker, blocked, reason, block_level, first_seen, last_seen) VALUES(?, ?, ?, ?, ?, ?)",
(
blocker,
blocked,
reason,
block_level,
- first_added,
- last_seen
+ time.time(),
+ time.time()
),
)
- except:
- print("ERROR: failed SQL query")
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: blocker='{blocker}',blocked='{blocked}',reason='{reason}',block_level='{block_level}',first_seen='{first_seen}',last_seen='{last_seen}',exception[{type(e)}]:'{str(e)}'")
sys.exit(255)
-def add_instance(domain: str):
- if domain.find("@") > 0:
+ # DEBUG: print("DEBUG: EXIT!")
+
+def is_instance_registered(domain: str) -> bool:
+ # DEBUG: print(f"DEBUG: domain='{domain}' - CALLED!")
+ if not is_cache_initialized("is_registered"):
+ # DEBUG: print(f"DEBUG: Cache for 'is_registered' not initialized, fetching all rows ...")
+ try:
+ cursor.execute("SELECT domain FROM instances")
+
+ # Check Set all
+ set_all_cache_key("is_registered", cursor.fetchall(), True)
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
+ sys.exit(255)
+
+ # Is cache found?
+ registered = is_cache_key_set("is_registered", domain)
+
+ # DEBUG: print(f"DEBUG: registered='{registered}' - EXIT!")
+ return registered
+
+def add_instance(domain: str, origin: str, originator: str, path: str = None):
+ # DEBUG: print("DEBUG: domain,origin,originator,path:", domain, origin, originator, path)
+ if not validators.domain(domain.split("/")[0]):
print("WARNING: Bad domain name:", domain)
raise
+ elif origin is not None and not validators.domain(origin.split("/")[0]):
+ print("WARNING: Bad origin name:", origin)
+ raise
+
+ software = determine_software(domain, path)
+ # DEBUG: print("DEBUG: Determined software:", software)
- print("--- Adding new instance:", domain)
+ print(f"INFO: Adding instance {domain} (origin: {origin})")
try:
- c.execute(
- "INSERT INTO instances SELECT ?, ?, ?",
+ cursor.execute(
+ "INSERT INTO instances (domain, origin, originator, hash, software, first_seen) VALUES (?, ?, ?, ?, ?, ?)",
(
domain,
+ origin,
+ originator,
get_hash(domain),
- determine_software(domain)
+ software,
+ time.time()
),
)
- except:
- print("ERROR: failed SQL query")
+ set_cache_key("is_registered", domain, True)
+
+ if has_pending_nodeinfos(domain):
+ # DEBUG: print(f"DEBUG: domain='{domain}' has pending nodeinfo being updated ...")
+ update_nodeinfos(domain)
+ remove_pending_error(domain)
+
+ if domain in pending_errors:
+ # DEBUG: print("DEBUG: domain has pending error being updated:", domain)
+ update_last_error(domain, pending_errors[domain])
+ remove_pending_error(domain)
+
+ except BaseException as e:
+ print(f"ERROR: failed SQL query: domain='{domain}',exception[{type(e)}]:'{str(e)}'")
sys.exit(255)
+ else:
+ # DEBUG: print("DEBUG: Updating nodeinfo for domain:", domain)
+ update_last_nodeinfo(domain)
+
+ # DEBUG: print("DEBUG: EXIT!")
def send_bot_post(instance: str, blocks: dict):
message = instance + " has blocked the following instances:\n\n"
if truncated:
message = message + "(the list has been truncated to the first 20 entries)"
- botheaders = {**headers, **{"Authorization": "Bearer " + config["bot_token"]}}
+ botheaders = {**api_headers, **{"Authorization": "Bearer " + config["bot_token"]}}
- req = reqto.post(f"{config['bot_instance']}/api/v1/statuses",
- data={"status":message, "visibility":config['bot_visibility'], "content_type":"text/plain"},
- headers=botheaders, timeout=10).json()
+ req = reqto.post(
+ f"{config['bot_instance']}/api/v1/statuses",
+ data={
+ "status" : message,
+ "visibility" : config['bot_visibility'],
+ "content_type": "text/plain"
+ },
+ headers=botheaders,
+ timeout=10
+ ).json()
return True
def get_mastodon_blocks(domain: str) -> dict:
- # NOISY-DEBUG: print("DEBUG: Fetching mastodon blocks from domain:", domain)
+ # DEBUG: print("DEBUG: Fetching mastodon blocks from domain:", domain)
blocks = {
"Suspended servers": [],
- "Filtered media": [],
- "Limited servers": [],
- "Silenced servers": [],
- }
-
- translations = {
- "Silenced instances": "Silenced servers",
- "Suspended instances": "Suspended servers",
- "Gesperrte Server": "Suspended servers",
- "Gefilterte Medien": "Filtered media",
- "Stummgeschaltete Server": "Silenced servers",
- "停止済みのサーバー": "Suspended servers",
- "メディアを拒否しているサーバー": "Filtered media",
- "サイレンス済みのサーバー": "Silenced servers",
- "שרתים מושעים": "Suspended servers",
- "מדיה מסוננת": "Filtered media",
- "שרתים מוגבלים": "Silenced servers",
- "Serveurs suspendus": "Suspended servers",
- "Médias filtrés": "Filtered media",
- "Serveurs limités": "Silenced servers",
+ "Filtered media" : [],
+ "Limited servers" : [],
+ "Silenced servers" : [],
}
try:
- doc = BeautifulSoup(
- reqto.get(f"https://{domain}/about/more", headers=headers, timeout=5).text,
+ doc = bs4.BeautifulSoup(
+ reqto.get(f"https://{domain}/about/more", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"])).text,
"html.parser",
)
- except:
- print("ERROR: Cannot fetch from domain:", domain)
+ except BaseException as e:
+ print("ERROR: Cannot fetch from domain:", domain, e)
+ update_last_error(domain, e)
return {}
for header in doc.find_all("h3"):
- header_text = header.text
+ header_text = tidyup(header.text)
- if header_text in translations:
- header_text = translations[header_text]
+ if header_text in language_mapping:
+ # DEBUG: print(f"DEBUG: header_text='{header_text}'")
+ header_text = language_mapping[header_text]
- if header_text in blocks:
+ if header_text in blocks or header_text.lower() in blocks:
# replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
for line in header.find_all_next("table")[0].find_all("tr")[1:]:
blocks[header_text].append(
{
- "domain": line.find("span").text,
- "hash": line.find("span")["title"][9:],
- "reason": line.find_all("td")[1].text.strip(),
+ "domain": tidyup(line.find("span").text),
+ "hash" : tidyup(line.find("span")["title"][9:]),
+ "reason": tidyup(line.find_all("td")[1].text),
}
)
- # NOISY-DEBUG: print("DEBUG: Returning blocks for domain:", domain)
+ # DEBUG: print("DEBUG: Returning blocks for domain:", domain)
return {
- "reject": blocks["Suspended servers"],
- "media_removal": blocks["Filtered media"],
+ "reject" : blocks["Suspended servers"],
+ "media_removal" : blocks["Filtered media"],
"followers_only": blocks["Limited servers"] + blocks["Silenced servers"],
}
def get_friendica_blocks(domain: str) -> dict:
- # NOISY-DEBUG: print("DEBUG: Fetching friendica blocks from domain:", domain)
+ # DEBUG: print("DEBUG: Fetching friendica blocks from domain:", domain)
blocks = []
try:
- doc = BeautifulSoup(
- reqto.get(f"https://{domain}/friendica", headers=headers, timeout=5).text,
+ doc = bs4.BeautifulSoup(
+ reqto.get(f"https://{domain}/friendica", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"])).text,
"html.parser",
)
- except:
- print("WARNING: Failed to fetch /friendica from domain:", domain)
+ except BaseException as e:
+ print("WARNING: Failed to fetch /friendica from domain:", domain, e)
+ update_last_error(domain, e)
return {}
blocklist = doc.find(id="about_blocklist")
# Prevents exceptions:
if blocklist is None:
- # NOISY-DEBUG: print("DEBUG: Instance has no block list:", domain)
+ # DEBUG: print("DEBUG:Instance has no block list:", domain)
return {}
for line in blocklist.find("table").find_all("tr")[1:]:
+ # DEBUG: print(f"DEBUG: line='{line}'")
blocks.append({
- "domain": line.find_all("td")[0].text.strip(),
- "reason": line.find_all("td")[1].text.strip()
+ "domain": tidyup(line.find_all("td")[0].text),
+ "reason": tidyup(line.find_all("td")[1].text)
})
- # NOISY-DEBUG: print("DEBUG: Returning blocks() for domain:", domain, len(blocks))
+ # DEBUG: print("DEBUG: Returning blocks() for domain:", domain, len(blocks))
return {
"reject": blocks
}
def get_misskey_blocks(domain: str) -> dict:
- # NOISY-DEBUG: print("DEBUG: Fetching misskey blocks from domain:", domain)
+ # DEBUG: print("DEBUG: Fetching misskey blocks from domain:", domain)
blocks = {
"suspended": [],
- "blocked": []
+ "blocked" : []
}
- try:
- counter = 0
- step = 99
- while True:
- # iterating through all "suspended" (follow-only in its terminology)
- # instances page-by-page, since that troonware doesn't support
- # sending them all at once
- try:
- if counter == 0:
- # NOISY-DEBUG: print("DEBUG: Sending JSON API request to domain,step,counter:", domain, step, counter)
- doc = post_json_api(domain, "/api/federation/instances/", json.dumps({
- "sort": "+caughtAt",
- "host": None,
- "suspended": True,
- "limit": step
- }))
- else:
- # NOISY-DEBUG: print("DEBUG: Sending JSON API request to domain,step,counter:", domain, step, counter)
- doc = post_json_api(domain, "/api/federation/instances/", json.dumps({
- "sort": "+caughtAt",
- "host": None,
- "suspended": True,
- "limit": step,
- "offset": counter-1
- }))
-
- # NOISY-DEBUG: print("DEBUG: doc():", len(doc))
- if len(doc) == 0:
- # NOISY-DEBUG: print("DEBUG: Returned zero bytes, exiting loop:", domain)
- break
-
- for instance in doc:
- # just in case
- if instance["isSuspended"]:
- blocks["suspended"].append(
- {
- "domain": instance["host"],
- # no reason field, nothing
- "reason": ""
- }
- )
-
- if len(doc) < step:
- # NOISY-DEBUG: print("DEBUG: End of request:", len(doc), step)
- break
-
- # NOISY-DEBUG: print("DEBUG: Raising counter by step:", step)
- counter = counter + step
-
- except:
- print("WARNING: Caught error, exiting loop:", domain)
- counter = 0
+ offset = 0
+ step = config["misskey_offset"]
+ while True:
+ # iterating through all "suspended" (follow-only in its terminology)
+ # instances page-by-page, since that troonware doesn't support
+ # sending them all at once
+ try:
+ # DEBUG: print(f"DEBUG: Fetching offset='{offset}' from '{domain}' ...")
+ if offset == 0:
+ # DEBUG: print("DEBUG: Sending JSON API request to domain,step,offset:", domain, step, offset)
+ fetched = post_json_api(domain, "/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "suspended": True,
+ "limit" : step
+ }), {"Origin": domain})
+ else:
+ # DEBUG: print("DEBUG: Sending JSON API request to domain,step,offset:", domain, step, offset)
+ fetched = post_json_api(domain, "/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "suspended": True,
+ "limit" : step,
+ "offset" : offset - 1
+ }), {"Origin": domain})
+
+ # DEBUG: print("DEBUG: fetched():", len(fetched))
+ if len(fetched) == 0:
+ # DEBUG: print("DEBUG: Returned zero bytes, exiting loop:", domain)
break
-
- while True:
- # same shit, different asshole ("blocked" aka full suspend)
- try:
- if counter == 0:
- # NOISY-DEBUG: print("DEBUG: Sending JSON API request to domain,step,counter:", domain, step, counter)
- doc = post_json_api(domain,"/api/federation/instances", json.dumps({
- "sort": "+caughtAt",
- "host": None,
- "blocked": True,
- "limit": step
- }))
- else:
- # NOISY-DEBUG: print("DEBUG: Sending JSON API request to domain,step,counter:", domain, step, counter)
- doc = post_json_api(domain,"/api/federation/instances", json.dumps({
- "sort": "+caughtAt",
- "host": None,
- "blocked": True,
- "limit": step,
- "offset": counter-1
- }))
-
- # NOISY-DEBUG: print("DEBUG: doc():", len(doc))
- if len(doc) == 0:
- # NOISY-DEBUG: print("DEBUG: Returned zero bytes, exiting loop:", domain)
- break
-
- for instance in doc:
- if instance["isBlocked"]:
- blocks["blocked"].append({
- "domain": instance["host"],
- "reason": ""
- })
-
- if len(doc) < step:
- # NOISY-DEBUG: print("DEBUG: End of request:", len(doc), step)
- break
-
- # NOISY-DEBUG: print("DEBUG: Raising counter by step:", step)
- counter = counter + step
-
- except:
- counter = 0
+ elif len(fetched) != config["misskey_offset"]:
+ # DEBUG: print(f"DEBUG: Fetched '{len(fetched)}' row(s) but expected: '{config['misskey_offset']}'")
+ offset = offset + (config["misskey_offset"] - len(fetched))
+ else:
+ # DEBUG: print("DEBUG: Raising offset by step:", step)
+ offset = offset + step
+
+ for instance in fetched:
+ # just in case
+ if instance["isSuspended"]:
+ blocks["suspended"].append(
+ {
+ "domain": tidyup(instance["host"]),
+ # no reason field, nothing
+ "reason": None
+ }
+ )
+
+ except BaseException as e:
+ print("WARNING: Caught error, exiting loop:", domain, e)
+ update_last_error(domain, e)
+ offset = 0
+ break
+
+ while True:
+ # same shit, different asshole ("blocked" aka full suspend)
+ try:
+ if offset == 0:
+ # DEBUG: print("DEBUG: Sending JSON API request to domain,step,offset:", domain, step, offset)
+ fetched = post_json_api(domain,"/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "blocked": True,
+ "limit" : step
+ }), {"Origin": domain})
+ else:
+ # DEBUG: print("DEBUG: Sending JSON API request to domain,step,offset:", domain, step, offset)
+ fetched = post_json_api(domain,"/api/federation/instances", json.dumps({
+ "sort" : "+pubAt",
+ "host" : None,
+ "blocked": True,
+ "limit" : step,
+ "offset" : offset-1
+ }), {"Origin": domain})
+
+ # DEBUG: print("DEBUG: fetched():", len(fetched))
+ if len(fetched) == 0:
+ # DEBUG: print("DEBUG: Returned zero bytes, exiting loop:", domain)
break
+ elif len(fetched) != config["misskey_offset"]:
+ # DEBUG: print(f"DEBUG: Fetched '{len(fetched)}' row(s) but expected: '{config['misskey_offset']}'")
+ offset = offset + (config["misskey_offset"] - len(fetched))
+ else:
+ # DEBUG: print("DEBUG: Raising offset by step:", step)
+ offset = offset + step
+
+ for instance in fetched:
+ if instance["isBlocked"]:
+ blocks["blocked"].append({
+ "domain": tidyup(instance["host"]),
+ "reason": None
+ })
+
+ except BaseException as e:
+ print("ERROR: Exception during POST:", domain, e)
+ update_last_error(domain, e)
+ offset = 0
+ break
+
+ #print(f"DEBUG: Updating last_instance_fetch for domain='{domain}' ...")
+ update_last_instance_fetch(domain)
+
+ # DEBUG: print("DEBUG: Returning for domain,blocked(),suspended():", domain, len(blocks["blocked"]), len(blocks["suspended"]))
+ return {
+ "reject" : blocks["blocked"],
+ "followers_only": blocks["suspended"]
+ }
- # NOISY-DEBUG: print("DEBUG: Returning for domain,blocked(),suspended():", domain, len(blocks["blocked"]), len(blocks["suspended"]))
- return {
- "reject": blocks["blocked"],
- "followers_only": blocks["suspended"]
- }
-
- except:
- print("WARNING: API request failed for domain:", domain)
- return {}
-
-def tidyup(domain: str) -> str:
+def tidyup(string: str) -> str:
# some retards put their blocks in variable case
- domain = domain.lower()
+ string = string.lower().strip()
# other retards put the port
- domain = re.sub("\:\d+$", "", domain)
+ string = re.sub("\:\d+$", "", string)
# bigger retards put the schema in their blocklist, sometimes even without slashes
- domain = re.sub("^https?\:(\/*)", "", domain)
+ string = re.sub("^https?\:(\/*)", "", string)
# and trailing slash
- domain = re.sub("\/$", "", domain)
+ string = re.sub("\/$", "", string)
# and the @
- domain = re.sub("^\@", "", domain)
+ string = re.sub("^\@", "", string)
# the biggest retards of them all try to block individual users
- domain = re.sub("(.+)\@", "", domain)
+ string = re.sub("(.+)\@", "", string)
- return domain
+ return string