1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <https://www.gnu.org/licenses/>.
23 from fba.helpers import config
24 from fba.helpers import domain as domain_helper
25 from fba.helpers import tidyup
27 from fba.http import network
29 from fba.models import blocks
30 from fba.models import instances
32 logging.basicConfig(level=logging.INFO)
33 logger = logging.getLogger(__name__)
35 # Language mapping X -> English
38 "Silenced instances" : "Silenced servers",
39 "Suspended instances" : "Suspended servers",
40 "Limited instances" : "Limited servers",
41 "Filtered media" : "Filtered media",
42 # Mappuing German -> English
43 "Gesperrte Server" : "Suspended servers",
44 "Gefilterte Medien" : "Filtered media",
45 "Stummgeschaltete Server" : "Silenced servers",
47 "停止済みのサーバー" : "Suspended servers",
48 "制限中のサーバー" : "Limited servers",
49 "メディアを拒否しているサーバー": "Filtered media",
50 "サイレンス済みのサーバー" : "Silenced servers",
52 "שרתים מושעים" : "Suspended servers",
53 "מדיה מסוננת" : "Filtered media",
54 "שרתים מוגבלים" : "Silenced servers",
56 "Serveurs suspendus" : "Suspended servers",
57 "Médias filtrés" : "Filtered media",
58 "Serveurs limités" : "Limited servers",
59 "Serveurs modérés" : "Limited servers",
62 def fetch_blocks_from_about(domain: str) -> dict:
63 logger.debug("domain='%s' - CALLED!", domain)
64 domain_helper.raise_on(domain)
66 logger.debug("Fetching mastodon blocks from domain='%s'", domain)
68 for path in ["/about/more", "/about"]:
70 logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
71 doc = bs4.BeautifulSoup(
72 network.fetch_response(
76 (config.get("connection_timeout"), config.get("read_timeout"))
81 if len(doc.find_all("h3")) > 0:
82 logger.debug("path='%s' had some headlines - BREAK!", path)
85 except network.exceptions as exception:
86 logger.warning("Cannot fetch from domain='%s',exception='%s'", domain, type(exception))
87 instances.set_last_error(domain, exception)
91 "Suspended servers": [],
92 "Filtered media" : [],
93 "Limited servers" : [],
94 "Silenced servers" : [],
97 logger.debug("doc[]='%s'", type(doc))
99 logger.warning("Cannot fetch any /about pages for domain='%s' - EXIT!", domain)
102 for header in doc.find_all("h3"):
103 header_text = tidyup.reason(header.text)
105 logger.debug("header_text='%s'", header_text)
106 if header_text in language_mapping:
107 logger.debug("Translating header_text='%s' ...", header_text)
108 header_text = language_mapping[header_text]
110 logger.warning("header_text='%s' not found in language mapping table", header_text)
112 if header_text in blocklist or header_text.lower() in blocklist:
113 # replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
114 for line in header.find_all_next("table")[0].find_all("tr")[1:]:
115 blocklist[header_text].append({
116 "domain": tidyup.domain(line.find("span").text),
117 "hash" : tidyup.domain(line.find("span")["title"][9:]),
118 "reason": tidyup.reason(line.find_all("td")[1].text),
121 logger.warning("header_text='%s' not found in blocklist()=%d", header_text, len(blocklist))
123 logger.debug("Returning blocklist for domain='%s' - EXIT!", domain)
125 "reject" : blocklist["Suspended servers"],
126 "media_removal" : blocklist["Filtered media"],
127 "followers_only": blocklist["Limited servers"] + blocklist["Silenced servers"],
130 def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
131 logger.debug("domain='%s',nodeinfo_url='%s' - CALLED!", domain, nodeinfo_url)
132 domain_helper.raise_on(domain)
134 if not isinstance(nodeinfo_url, str):
135 raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not of type 'str'")
136 elif nodeinfo_url == "":
137 raise ValueError("Parameter 'nodeinfo_url' is empty")
142 # No CSRF by default, you don't have to add network.api_headers by yourself here
146 logger.debug("Checking CSRF for domain='%s'", domain)
147 headers = csrf.determine(domain, dict())
148 except network.exceptions as exception:
149 logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__)
150 instances.set_last_error(domain, exception)
152 logger.debug("Returning empty list ... - EXIT!")
156 # json endpoint for newer mastodongs
157 logger.debug("Querying API domain_blocks: domain='%s'", domain)
158 data = network.get_json_api(
160 "/api/v1/instance/domain_blocks",
162 (config.get("connection_timeout"), config.get("read_timeout"))
166 logger.debug("data[]='%s'", type(data))
167 if "error_message" in data:
168 logger.debug("Was not able to fetch domain_blocks from domain='%s': status_code=%d,error_message='%s'", domain, data['status_code'], data['error_message'])
169 instances.set_last_error(domain, data)
171 elif "json" in data and "error" in data["json"]:
172 logger.warning("JSON API returned error message: '%s'", data["json"]["error"])
173 instances.set_last_error(domain, data)
179 logger.debug("Marking domain='%s' as successfully handled ...", domain)
180 instances.set_success(domain)
182 logger.debug("rows[%s]()=%d", type(rows), len(rows))
184 logger.debug("domain='%s' has returned zero rows, trying /about/more page ...", domain)
185 rows = fetch_blocks_from_about(domain)
187 logger.debug("rows[%s]()=%d", type(rows), len(rows))
189 logger.debug("Checking %d entries from domain='%s' ...", len(rows), domain)
192 logger.debug("block[]='%s'", type(block))
193 if not isinstance(block, dict):
194 logger.debug("block[]='%s' is of type 'dict' - SKIPPED!", type(block))
196 elif "domain" not in block:
197 logger.warning("block()=%d does not contain element 'domain' - SKIPPED!", len(block))
199 elif "severity" not in block:
200 logger.warning("block()=%d does not contain element 'severity' - SKIPPED!", len(block))
202 elif block["severity"] in ["accept", "accepted"]:
203 logger.debug("block[domain]='%s' has unwanted severity level '%s' - SKIPPED!", block["domain"], block["severity"])
206 reason = tidyup.reason(block["comment"]) if "comment" in block and block["comment"] is not None and block["comment"] != "" else None
208 logger.debug("Appending blocker='%s',blocked='%s',reason='%s',block_level='%s'", domain, block["domain"], reason, block["severity"])
211 "blocked" : block["domain"],
212 "hash" : block["digest"],
214 "block_level": blocks.alias_block_level(block["severity"]),
217 logger.debug("domain='%s' has no block list", domain)
219 except network.exceptions as exception:
220 logger.warning("domain='%s',exception[%s]='%s'", domain, type(exception), str(exception))
221 instances.set_last_error(domain, exception)
223 logger.debug("blocklist()=%d - EXIT!", len(blocklist))