1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <https://www.gnu.org/licenses/>.
23 from fba.helpers import config
24 from fba.helpers import domain as domain_helper
25 from fba.helpers import tidyup
27 from fba.http import network
29 from fba.models import instances
31 logging.basicConfig(level=logging.INFO)
32 logger = logging.getLogger(__name__)
34 # Language mapping X -> English
37 "Silenced instances" : "Silenced servers",
38 "Suspended instances" : "Suspended servers",
39 "Limited instances" : "Limited servers",
40 "Filtered media" : "Filtered media",
41 # Mappuing German -> English
42 "Gesperrte Server" : "Suspended servers",
43 "Gefilterte Medien" : "Filtered media",
44 "Stummgeschaltete Server" : "Silenced servers",
46 "停止済みのサーバー" : "Suspended servers",
47 "制限中のサーバー" : "Limited servers",
48 "メディアを拒否しているサーバー": "Filtered media",
49 "サイレンス済みのサーバー" : "Silenced servers",
51 "שרתים מושעים" : "Suspended servers",
52 "מדיה מסוננת" : "Filtered media",
53 "שרתים מוגבלים" : "Silenced servers",
55 "Serveurs suspendus" : "Suspended servers",
56 "Médias filtrés" : "Filtered media",
57 "Serveurs limités" : "Limited servers",
58 "Serveurs modérés" : "Limited servers",
61 def fetch_blocks_from_about(domain: str) -> dict:
62 logger.debug("domain='%s' - CALLED!", domain)
63 domain_helper.raise_on(domain)
65 logger.debug("Fetching mastodon blocks from domain='%s'", domain)
67 for path in ["/about/more", "/about"]:
69 logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
70 doc = bs4.BeautifulSoup(
71 network.fetch_response(
75 (config.get("connection_timeout"), config.get("read_timeout"))
80 if len(doc.find_all("h3")) > 0:
81 logger.debug("path='%s' had some headlines - BREAK!", path)
84 except network.exceptions as exception:
85 logger.warning("Cannot fetch from domain='%s',exception='%s'", domain, type(exception))
86 instances.set_last_error(domain, exception)
90 "Suspended servers": [],
91 "Filtered media" : [],
92 "Limited servers" : [],
93 "Silenced servers" : [],
96 logger.debug("doc[]='%s'", type(doc))
98 logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain)
101 for header in doc.find_all("h3"):
102 header_text = tidyup.reason(header.text)
104 logger.debug("header_text='%s'", header_text)
105 if header_text in language_mapping:
106 logger.debug("Translating header_text='%s' ...", header_text)
107 header_text = language_mapping[header_text]
109 logger.warning("header_text='%s' not found in language mapping table", header_text)
111 if header_text in blocklist or header_text.lower() in blocklist:
112 # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
113 for line in header.find_all_next("table")[0].find_all("tr")[1:]:
114 blocklist[header_text].append({
115 "domain": tidyup.domain(line.find("span").text),
116 "hash" : tidyup.domain(line.find("span")["title"][9:]),
117 "reason": tidyup.reason(line.find_all("td")[1].text),
120 logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist))
122 logger.debug("Returning blocklist for domain='%s' - EXIT!", domain)
124 "reject" : blocklist["Suspended servers"],
125 "media_removal" : blocklist["Filtered media"],
126 "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"],
129 def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
130 logger.debug("domain='%s',nodeinfo_url='%s' - CALLED!", domain, nodeinfo_url)
131 domain_helper.raise_on(domain)
133 if not isinstance(nodeinfo_url, str):
134 raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not of type 'str'")
135 elif nodeinfo_url == "":
136 raise ValueError("Parameter 'nodeinfo_url' is empty")
141 # No CSRF by default, you don't have to add network.api_headers by yourself here
145 logger.debug("Checking CSRF for domain='%s'", domain)
146 headers = csrf.determine(domain, dict())
147 except network.exceptions as exception:
148 logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__)
149 instances.set_last_error(domain, exception)
151 logger.debug("Returning empty list ... - EXIT!")
155 # json endpoint for newer mastodongs
156 logger.debug("Querying API domain_blocks: domain='%s'", domain)
157 data = network.get_json_api(
159 "/api/v1/instance/domain_blocks",
161 (config.get("connection_timeout"), config.get("read_timeout"))
165 logger.debug("data[]='%s'", type(data))
166 if "error_message" in data:
167 logger.debug("Was not able to fetch domain_blocks from domain='%s': status_code=%d,error_message='%s'", domain, data['status_code'], data['error_message'])
168 instances.set_last_error(domain, data)
170 elif "json" in data and "error" in data["json"]:
171 logger.warning("JSON API returned error message: '%s'", data["json"]["error"])
172 instances.set_last_error(domain, data)
178 logger.debug("Marking domain='%s' as successfully handled ...", domain)
179 instances.set_success(domain)
181 logger.debug("rows[%s]()=%d", type(rows), len(rows))
183 logger.debug("domain='%s' has returned zero rows, trying /about/more page ...", domain)
184 rows = fetch_blocks_from_about(domain)
186 logger.debug("rows[%s]()=%d", type(rows), len(rows))
188 logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain)
191 logger.debug("block[]='%s'", type(block))
192 if not isinstance(block, dict):
193 logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block))
195 elif "domain" not in block:
196 logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block))
198 elif "severity" not in block:
199 logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block))
202 reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None
204 logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s'", domain, block["domain"], reason, block["severity"])
207 "blocked" : block["domain"],
208 "hash" : block["digest"],
210 "block_level": block["severity"]
213 logger.debug("domain='%s' has no block list", domain)
215 except network.exceptions as exception:
216 logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception))
217 instances.set_last_error(domain, exception)
219 logger.debug("blocklist()=%d - EXIT!", len(blocklist))