1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 from fba import database
24 from fba.helpers import config
25 from fba.helpers import domain as domain_helper
26 from fba.helpers import tidyup
28 from fba.http import federation
29 from fba.http import network
31 from fba.models import instances
33 logging.basicConfig(level=logging.INFO)
34 logger = logging.getLogger(__name__)
36 # Language mapping X -> English
39 "Reject": "Suspended servers",
42 def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
43 logger.debug("domain='%s',nodeinfo_url='%s' - CALLED!", domain, nodeinfo_url)
44 domain_helper.raise_on(domain)
46 if not isinstance(nodeinfo_url, str):
47 raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not 'str'")
48 elif nodeinfo_url == "":
49 raise ValueError("Parameter 'nodeinfo_url' is empty")
54 logger.debug("Fetching nodeinfo: domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
55 rows = federation.fetch_nodeinfo(domain, nodeinfo_url)
56 except network.exceptions as exception:
57 logger.warning("Exception '%s' during fetching nodeinfo from domain='%s'", type(exception), domain)
58 instances.set_last_error(domain, exception)
61 logger.warning("Could not fetch nodeinfo from domain='%s'", domain)
63 elif "metadata" not in rows:
64 logger.warning("rows()=%d does not have key 'metadata', domain='%s'", len(rows), domain)
66 elif "federation" not in rows["metadata"]:
67 logger.warning("rows()=%d does not have key 'federation', domain='%s'", len(rows["metadata"]), domain)
70 data = rows["metadata"]["federation"]
73 logger.debug("data[]='%s'", type(data))
74 if "mrf_simple" in data:
75 logger.debug("Found mrf_simple in API response from domain='%s'", domain)
77 for block_level, blocklist in (
81 "quarantined_instances": data["quarantined_instances"]
85 logger.debug("block_level='%s', blocklist()=%d", block_level, len(blocklist))
86 block_level = tidyup.domain(block_level)
87 logger.debug("block_level='%s' - AFTER!", block_level)
90 logger.warning("block_level is now empty!")
92 elif block_level == "accept":
93 logger.debug("domain='%s' skipping block_level='accept'", domain)
96 block_level = utils.alias_block_level(block_level)
98 logger.debug("Checking %d entries from domain='%s',block_level='%s' ...", len(blocklist), domain, block_level)
99 if len(blocklist) > 0:
100 for blocked in blocklist:
101 logger.debug("blocked='%s' - BEFORE!", blocked)
102 blocked = tidyup.domain(blocked)
103 logger.debug("blocked='%s' - AFTER!", blocked)
106 logger.warning("blocked is empty after tidyup.domain(): domain='%s',block_level='%s'", domain, block_level)
108 elif not utils.is_domain_wanted(blocked):
109 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
112 logger.debug("Invoking utils.deobfuscate_domain(%s, %s) ...", blocked, domain)
113 blocked = utils.deobfuscate_domain(blocked, domain)
115 logger.debug("blocked='%s' - DEOBFUSCATED!", blocked)
116 if not utils.is_domain_wanted(blocked):
117 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
120 logger.debug("Appending blocker='%s',blocked='%s',block_level='%s' ...", domain, blocked, block_level)
125 "block_level": block_level,
128 elif "quarantined_instances" in data:
129 logger.debug("Found 'quarantined_instances' in JSON response: domain='%s'", domain)
131 block_level = "quarantined"
133 for blocked in data["quarantined_instances"]:
134 logger.debug("blocked='%s' - BEFORE!", blocked)
135 blocked = tidyup.domain(blocked)
136 logger.debug("blocked='%s' - AFTER!", blocked)
139 logger.warning("blocked is empty after tidyup.domain(): domain='%s',block_level='%s'", domain, block_level)
141 elif not utils.is_domain_wanted(blocked):
142 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
145 logger.debug("Invoking utils.deobfuscate_domain(%s, %s) ...", blocked, domain)
146 blocked = utils.deobfuscate_domain(blocked, domain)
148 logger.debug("blocked='%s' - DEOBFUSCATED!", blocked)
149 if not utils.is_domain_wanted(blocked):
150 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
153 logger.debug("Appending blocker='%s',blocked='%s',block_level='%s' ...", domain, blocked, block_level)
158 "block_level": block_level,
162 logger.warning("Cannot find 'mrf_simple' or 'quarantined_instances' in JSON reply: domain='%s'", domain)
164 logger.debug("Invoking commit() ...")
165 database.connection.commit()
168 if "mrf_simple_info" in data:
169 logger.debug("Found mrf_simple_info in API response: domain='%s'", domain)
171 for block_level, info in (
173 **data["mrf_simple_info"],
174 **(data["quarantined_instances_info"] if "quarantined_instances_info" in data else {})
177 logger.debug("block_level='%s', info.items()=%d", block_level, len(info.items()))
178 block_level = tidyup.domain(block_level)
179 logger.debug("block_level='%s' - AFTER!", block_level)
181 if block_level == "":
182 logger.warning("block_level is now empty!")
184 elif block_level == "accept":
185 logger.debug("domain='%s': Skipping block_level='%s' ...", domain, block_level)
188 block_level = utils.alias_block_level(block_level)
190 logger.debug("Checking %d entries from domain='%s',block_level='%s' ...", len(info.items()), domain, block_level)
191 for blocked, reason in info.items():
192 logger.debug("blocked='%s',reason[%s]='%s' - BEFORE!", blocked, type(reason), reason)
193 blocked = tidyup.domain(blocked)
194 logger.debug("blocked='%s' - AFTER!", blocked)
196 if isinstance(reason, str):
197 logger.debug("reason[] is a string")
198 reason = tidyup.reason(reason)
199 elif isinstance(reason, dict) and "reason" in reason:
200 logger.debug("reason[] is a dict")
201 reason = tidyup.reason(reason["reason"]) if isinstance(reason["reason"], str) else None
202 elif reason is not None:
203 raise ValueError(f"Cannot handle reason[]='{type(reason)}'")
205 logger.debug("blocked='%s',reason='%s' - AFTER!", blocked, reason)
208 logger.warning("blocked is empty after tidyup.domain(): domain='%s',block_level='%s'", domain, block_level)
210 elif not utils.is_domain_wanted(blocked):
211 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
214 logger.debug("Invoking utils.deobfuscate_domain(%s, %s) ...", blocked, domain)
215 blocked = utils.deobfuscate_domain(blocked, domain)
216 logger.debug("blocked='%s' - DEOBFUSCATED!", blocked)
218 logger.debug("Checking %d blockdict records ...", len(blockdict))
219 for block in blockdict:
220 logger.debug("block[blocked]='%s',blocked='%s'", block["blocked"], blocked)
221 if block["blocked"] == blocked:
222 logger.debug("Updating reason='%s' for blocker='%s'", reason, block["blocked"])
223 block["reason"] = reason
225 elif "quarantined_instances_info" in data and "quarantined_instances" in data["quarantined_instances_info"]:
226 logger.debug("Found 'quarantined_instances_info' in JSON response: domain='%s'", domain)
228 block_level = "quarantined"
230 #print(data["quarantined_instances_info"])
231 rows = data["quarantined_instances_info"]["quarantined_instances"]
233 logger.debug("blocked='%s' - BEFORE!", blocked)
234 blocked = tidyup.domain(blocked)
235 logger.debug("blocked='%s' - AFTER!", blocked)
237 if blocked not in rows or "reason" not in rows[blocked]:
238 logger.warning("Cannot find blocked='%s' in rows()=%d,domain='%s' - BREAK!", blocked, len(rows), domain)
241 reason = rows[blocked]["reason"]
242 logger.debug("reason='%s'", reason)
245 logger.warning("blocked is empty after tidyup.domain(): domain='%s',block_level='%s'", domain, block_level)
247 elif not utils.is_domain_wanted(blocked):
248 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
251 logger.debug("Invoking utils.deobfuscate_domain(%s, %s) ...", blocked, domain)
252 blocked = utils.deobfuscate_domain(blocked, domain)
254 logger.debug("blocked='%s' - DEOBFUSCATED!", blocked)
255 if not utils.is_domain_wanted(blocked):
256 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
259 logger.debug("Checking %d blockdict records ...", len(blockdict))
260 for block in blockdict:
261 logger.debug("block[blocked]='%s',blocked='%s'", block["blocked"], blocked)
262 if block["blocked"] == blocked:
263 logger.debug("Updating reason='%s' for blocker='%s'", reason, block["blocked"])
264 block["reason"] = reason
266 logger.warning("Cannot find 'mrf_simple_info' or 'quarantined_instances_info' in JSON reply: domain='%s'", domain)
269 logger.debug("Did not find any useable JSON elements, domain='%s', continuing with /about page ...", domain)
270 blocklist = fetch_blocks_from_about(domain)
272 logger.debug("blocklist()=%d", len(blocklist))
273 if len(blocklist) > 0:
274 logger.info("Checking %d record(s) ...", len(blocklist))
275 for block_level in blocklist:
276 logger.debug("block_level='%s'", block_level)
277 rows = blocklist[block_level]
279 logger.debug("rows[%s]()=%d'", type(rows), len(rows))
281 logger.debug("record[]='%s'", type(record))
282 blocked = tidyup.domain(record["blocked"])
283 reason = tidyup.reason(record["reason"])
284 logger.debug("blocked='%s',reason='%s' - AFTER!", blocked, reason)
287 logger.warning("blocked is empty after tidyup.domain(): domain='%s',block_level='%s'", domain, block_level)
289 elif not utils.is_domain_wanted(blocked):
290 logger.warning("blocked='%s' is not wanted - SKIPPED!", blocked)
293 logger.debug("Invoking utils.deobfuscate_domain(%s, %s) ...", blocked, domain)
294 blocked = utils.deobfuscate_domain(blocked, domain)
295 logger.debug("blocked='%s' - DEOBFUSCATED!", blocked)
297 logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s' ...",domain, blocked, reason, block_level)
302 "block_level": block_level,
305 logger.debug("blockdict()=%d - EXIT!", len(blockdict))
308 def fetch_blocks_from_about(domain: str) -> dict:
309 logger.debug("domain='%s' - CALLED!", domain)
310 domain_helper.raise_on(domain)
312 logger.debug("Fetching mastodon blocks from domain='%s'", domain)
314 for path in ["/instance/about/index.html"]:
319 logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
320 response = network.fetch_response(
324 (config.get("connection_timeout"), config.get("read_timeout"))
327 logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
328 if not response.ok or response.text.strip() == "":
329 logger.warning("path='%s' does not exist on domain='%s' - SKIPPED!", path, domain)
332 logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
333 doc = bs4.BeautifulSoup(
338 logger.debug("doc[]='%s'", type(doc))
339 if doc.find("h2") is not None:
340 logger.debug("Found 'h2' header in path='%s' - BREAK!", path)
343 except network.exceptions as exception:
344 logger.warning("Cannot fetch from domain='%s',exception[%s]='%s'", domain, type(exception), str(exception))
345 instances.set_last_error(domain, exception)
349 "Suspended servers": [],
350 "Filtered media" : [],
351 "Limited servers" : [],
352 "Silenced servers" : [],
355 logger.debug("doc[]='%s'", type(doc))
357 logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain)
360 for header in doc.find_all("h2"):
361 logger.debug("header[%s]='%s'", type(header), header)
362 header_text = tidyup.reason(header.text)
364 logger.debug("header_text='%s' - BEFORE!", header_text)
365 if header_text in language_mapping:
366 logger.debug("header_text='%s' - FOUND!", header_text)
367 header_text = language_mapping[header_text]
369 logger.warning("header_text='%s' not found in language mapping table", header_text)
371 logger.debug("header_text='%s - AFTER!'", header_text)
372 if header_text in blocklist or header_text.lower() in blocklist:
373 # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
374 logger.debug("Found header_text='%s', importing domain blocks ...", header_text)
375 for line in header.find_next("table").find_all("tr")[1:]:
376 logger.debug("line[]='%s'", type(line))
377 blocklist[header_text].append({
378 "blocked": tidyup.domain(line.find_all("td")[0].text),
379 "reason" : tidyup.reason(line.find_all("td")[1].text),
382 logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist))
384 logger.debug("Returning blocklist for domain='%s' - EXIT!", domain)
386 "reject" : blocklist["Suspended servers"],
387 "media_removal" : blocklist["Filtered media"],
388 "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"],