1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <https://www.gnu.org/licenses/>.
22 from fba.helpers import blacklist
23 from fba.helpers import config
24 from fba.helpers import domain as domain_helper
25 from fba.helpers import tidyup
27 from fba.http import network
29 from fba.models import blocks
30 from fba.models import instances
32 logging.basicConfig(level=logging.INFO)
33 logger = logging.getLogger(__name__)
35 # Language mapping X -> English
38 "Silenced instances" : "Silenced servers",
39 "Suspended instances" : "Suspended servers",
40 "Suspended servers" : "Suspended servers",
41 "Limited instances" : "Limited servers",
42 "Filtered media" : "Filtered media",
43 # Mappuing German -> English
44 "Gesperrte Server" : "Suspended servers",
45 "Gefilterte Medien" : "Filtered media",
46 "Stummgeschaltete Server" : "Silenced servers",
48 "停止済みのサーバー" : "Suspended servers",
49 "制限中のサーバー" : "Limited servers",
50 "メディアを拒否しているサーバー": "Filtered media",
51 "サイレンス済みのサーバー" : "Silenced servers",
53 "שרתים מושעים" : "Suspended servers",
54 "מדיה מסוננת" : "Filtered media",
55 "שרתים מוגבלים" : "Silenced servers",
57 "Serveurs suspendus" : "Suspended servers",
58 "Médias filtrés" : "Filtered media",
59 "Serveurs limités" : "Limited servers",
60 "Serveurs modérés" : "Limited servers",
63 def fetch_blocks_from_about(domain: str) -> dict:
64 logger.debug("domain='%s' - CALLED!", domain)
65 domain_helper.raise_on(domain)
67 if blacklist.is_blacklisted(domain):
68 raise Exception(f"domain='{domain}' is blacklisted but function is invoked.")
69 elif not instances.is_registered(domain):
70 raise Exception(f"domain='{domain}' is not registered but function is invoked.")
75 logger.info("Fetching mastodon blocks from domain='%s'", domain)
76 for path in ["/about/more", "/about"]:
78 logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
79 doc = bs4.BeautifulSoup(
80 network.fetch_response(
84 (config.get("connection_timeout"), config.get("read_timeout"))
89 if len(doc.find_all("h3")) > 0:
90 logger.debug("path='%s' had some headlines - BREAK!", path)
93 except network.exceptions as exception:
94 logger.warning("Cannot fetch from domain='%s',exception='%s'", domain, type(exception))
95 instances.set_last_error(domain, exception)
99 "Suspended servers": [],
100 "Filtered media" : [],
101 "Limited servers" : [],
102 "Silenced servers" : [],
105 logger.debug("doc[]='%s'", type(doc))
107 logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain)
110 for header in doc.find_all("h3"):
111 header_text = tidyup.reason(header.text)
113 logger.debug("header_text='%s'", header_text)
114 if header_text in language_mapping:
115 logger.debug("Translating header_text='%s' ...", header_text)
116 header_text = language_mapping[header_text]
118 logger.warning("header_text='%s' not found in language mapping table", header_text)
120 if header_text in blocklist or header_text.lower() in blocklist:
121 # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
122 for line in header.find_all_next("table")[0].find_all("tr")[1:]:
123 domain = line.find("span").text
124 digest = line.find("span")["title"][9:]
125 reason = line.find_all("td")[1].text
127 logger.debug("domain='%s',reason='%s' - BEFORE!", domain, reason)
128 domain = tidyup.domain(domain) if domain != "" else None
129 reason = tidyup.reason(reason) if reason != "" else None
131 logger.debug("domain='%s',reason='%s' - AFTER!", domain, reason)
132 if domain in [None, ""]:
133 logger.warning("domain='%s' is empty,line='%s' - SKIPPED!", domain, line)
136 logger.debug("Appending domain='%s',digest='%s',reason='%s' to blocklist header_text='%s' ...", domain, digest, reason, blocklist)
137 blocklist[header_text].append({
143 logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist))
145 logger.debug("Returning blocklist for domain='%s' - EXIT!", domain)
147 "reject" : blocklist["Suspended servers"],
148 "media_removal" : blocklist["Filtered media"],
149 "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"],
152 def fetch_blocks(domain: str) -> list:
153 logger.debug("domain='%s' - CALLED!", domain)
154 domain_helper.raise_on(domain)
156 if blacklist.is_blacklisted(domain):
157 raise Exception(f"domain='{domain}' is blacklisted but function is invoked.")
158 elif not instances.is_registered(domain):
159 raise Exception(f"domain='{domain}' is not registered but function is invoked.")
164 logger.debug("Invoking fetch_blocks_from_about(%s) ...", domain)
165 rows = fetch_blocks_from_about(domain)
167 logger.debug("rows[%s]()=%d", type(rows), len(rows))
169 logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain)
172 logger.debug("block[]='%s'", type(block))
173 if not isinstance(block, dict):
174 logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block))
176 elif "domain" not in block:
177 logger.debug("block='%s'", block)
178 logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block))
180 elif not domain_helper.is_wanted(block["domain"]):
181 logger.debug("block[domain]='%s' is not wanted - SKIPPED!", block["domain"])
183 elif "severity" not in block:
184 logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block))
186 elif block["severity"] in ["accept", "accepted"]:
187 logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"])
189 elif "digest" in block and not validators.hashes.sha256(block["digest"]):
190 logger.warning("block[domain]='%s' has invalid block[digest]='%s' - SKIPPED!", block["domain"], block["digest"])
193 reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None
195 logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s' ...", domain, block["domain"], reason, block["severity"])
198 "blocked" : block["domain"],
199 "digest" : block["digest"] if "digest" in block else None,
201 "block_level": blocks.alias_block_level(block["severity"]),
204 logger.debug("domain='%s' has no block list", domain)
206 logger.debug("blocklist()=%d - EXIT!", len(blocklist))