1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU Affero General Public License for more details.
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <https://www.gnu.org/licenses/>.
24 from fba.helpers import config
25 from fba.helpers import domain as domain_helper
26 from fba.helpers import tidyup
28 from fba.http import federation
29 from fba.http import network
31 from fba.models import instances
33 logging.basicConfig(level=logging.INFO)
34 logger = logging.getLogger(__name__)
36 def fetch_peers(domain: str) -> list:
37 logger.debug("domain='%s' - CALLED!", domain)
38 domain_helper.raise_on(domain)
42 # No CSRF by default, you don't have to add network.api_headers by yourself here
46 logger.debug("Checking CSRF for domain='%s'", domain)
47 headers = csrf.determine(domain, dict())
48 except network.exceptions as exception:
49 logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
50 instances.set_last_error(domain, exception)
54 logger.debug("Fetching '/api/v3/site' from domain='%s' ...", domain)
55 data = network.get_json_api(
59 (config.get("connection_timeout"), config.get("read_timeout"))
62 logger.debug("data[]='%s'", type(data))
63 if "error_message" in data:
64 logger.warning("Could not reach any JSON API: domain='%s'", domain)
65 instances.set_last_error(domain, data)
66 elif "federated_instances" in data["json"] and isinstance(data["json"]["federated_instances"], dict):
67 logger.debug("Found federated_instances for domain='%s'", domain)
68 peers = peers + federation.add_peers(data["json"]["federated_instances"])
69 logger.debug("Added instance(s) to peers")
71 logger.warning("JSON response does not contain 'federated_instances', domain='%s'", domain)
72 instances.set_last_error(domain, data)
74 except network.exceptions as exception:
75 logger.warning("Exception during fetching JSON: domain='%s',exception[%s]:'%s'", domain, type(exception), str(exception))
76 instances.set_last_error(domain, exception)
78 logger.debug("Adding %d for domain='%s'", len(peers), domain)
79 instances.set_total_peers(domain, peers)
81 logger.debug("peers()=%d - EXIT!", len(peers))
84 def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
85 logger.debug("domain='%s,nodeinfo_url='%s' - CALLED!", domain, nodeinfo_url)
86 domain_helper.raise_on(domain)
88 if not isinstance(nodeinfo_url, str):
89 raise ValueError(f"Parameter nodeinfo_url[]='{type(nodeinfo_url)}' is not 'str'")
90 elif nodeinfo_url == "":
91 raise ValueError("Parameter 'nodeinfo_url' is empty")
95 "Instàncies bloquejades",
99 "Geblokkeerde instanties",
100 "Blockerade instanser",
101 "Instàncias blocadas",
103 "Instances bloquées",
104 "Letiltott példányok",
105 "Instancias bloqueadas",
106 "Blokeatuta dauden instantziak",
108 "Peladen Yang Diblokir",
111 "Блокирани Инстанции",
112 "Blockierte Instanzen",
113 "Estetyt instanssit",
114 "Instâncias bloqueadas",
115 "Zablokowane instancje",
116 "Blokované inštancie",
118 "Užblokuoti serveriai",
120 "Блокированные Инстансы",
121 "Αποκλεισμένοι διακομιστές",
123 "Instâncias bloqueadas",
129 # json endpoint for newer mastodongs
130 logger.debug("Fetching /instances from domain='%s'", domain)
131 response = network.fetch_response(
135 (config.get("connection_timeout"), config.get("read_timeout"))
138 logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
139 if response.ok and response.status_code < 300 and response.text != "":
140 logger.debug("Parsing %s Bytes ...", len(response.text))
142 doc = bs4.BeautifulSoup(response.text, "html.parser")
143 logger.debug("doc[]='%s'", type(doc))
145 headers = doc.findAll("h5")
147 logger.debug("Search in %d header(s) ...", len(headers))
148 for header in headers:
149 logger.debug("header[]='%s'", type(header))
150 content = header.contents[0]
152 logger.debug("content[%s]='%s'", type(content), content)
153 if content in translations:
154 logger.debug("Found header with blocked instances - BREAK!")
158 logger.debug("found[]='%s'", type(found))
160 logger.debug("domain='%s' is not blocking any instances - EXIT!", domain)
163 blocking = found.find_next("ul").findAll("a")
164 logger.debug("Found %d blocked instance(s) ...", len(blocking))
166 logger.debug("tag[]='%s'", type(tag))
167 blocked = tidyup.domain(tag.contents[0])
168 logger.debug("blocked='%s'", blocked)
170 if not utils.is_domain_wanted(blocked):
171 logger.debug("blocked='%s' is not wanted - SKIPPED!", blocked)
174 logger.debug("Appending blocker='%s',blocked='%s',block_level='reject'", domain, blocked)
179 "block_level": "reject",
182 except network.exceptions as exception:
183 logger.warning("domain='%s',exception[%s]:'%s'", domain, type(exception), str(exception))
184 instances.set_last_error(domain, exception)
186 logger.debug("blocklist()=%d - EXIT!", len(blocklist))