]> git.mxchange.org Git - fba.git/blob - fba/commands.py
dc98d4d6c3d8b063b9383fd300f1fd778f7678ae
[fba.git] / fba / commands.py
1 # Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
2 # Copyright (C) 2023 Free Software Foundation
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License as published
6 # by the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 # GNU Affero General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
16
17 import csv
18 import inspect
19 import json
20 import time
21
22 import argparse
23 import atoma
24 import bs4
25 import markdown
26 import reqto
27 import validators
28
29 from fba import blacklist
30 from fba import config
31 from fba import federation
32 from fba import fba
33 from fba import network
34
35 from fba.helpers import locking
36 from fba.helpers import tidyup
37
38 from fba.models import blocks
39 from fba.models import instances
40
41 from fba.networks import friendica
42 from fba.networks import mastodon
43 from fba.networks import misskey
44 from fba.networks import pleroma
45
46 def check_instance(args: argparse.Namespace) -> int:
47     # DEBUG: print(f"DEBUG: args.domain='{args.domain}' - CALLED!")
48     status = 0
49     if not validators.domain(args.domain):
50         print(f"WARNING: args.domain='{args.domain}' is not valid")
51         status = 100
52     elif blacklist.is_blacklisted(args.domain):
53         print(f"WARNING: args.domain='{args.domain}' is blacklisted")
54         status = 101
55     elif instances.is_registered(args.domain):
56         print(f"WARNING: args.domain='{args.domain}' is already registered")
57         status = 102
58     else:
59         print(f"INFO: args.domain='{args.domain}' is not known")
60
61     # DEBUG: print(f"DEBUG: status={status} - EXIT!")
62     return status
63
64 def fetch_bkali(args: argparse.Namespace) -> int:
65     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
66     domains = list()
67     try:
68         fetched = network.post_json_api("gql.api.bka.li", "/v1/graphql", json.dumps({
69             "query": "query domainlist {nodeinfo(order_by: {domain: asc}) {domain}}"
70         }))
71
72         # DEBUG: print(f"DEBUG: fetched[]='{type(fetched)}'")
73         if "error_message" in fetched:
74             print(f"WARNING: post_json_api() for 'gql.api.bka.li' returned error message: {fetched['error_message']}")
75             return 100
76         elif isinstance(fetched["json"], dict) and "error" in fetched["json"] and "message" in fetched["json"]["error"]:
77             print(f"WARNING: post_json_api() returned error: {fetched['error']['message']}")
78             return 101
79
80         rows = fetched["json"]
81
82         # DEBUG: print(f"DEBUG: rows({len(rows)})[]='{type(rows)}'")
83         if len(rows) == 0:
84             raise Exception("WARNING: Returned no records")
85         elif "data" not in rows:
86             raise Exception(f"WARNING: rows()={len(rows)} does not contain key 'data'")
87         elif "nodeinfo" not in rows["data"]:
88             raise Exception(f"WARNING: rows()={len(rows['data'])} does not contain key 'nodeinfo'")
89
90         for entry in rows["data"]["nodeinfo"]:
91             # DEBUG: print(f"DEBUG: entry['{type(entry)}']='{entry}'")
92             if not "domain" in entry:
93                 print(f"WARNING: entry()={len(entry)} does not contain 'domain' - SKIPPED!")
94                 continue
95             elif not validators.domain(entry["domain"]):
96                 print(f"WARNING: domain='{entry['domain']}' is not a valid domain - SKIPPED!")
97                 continue
98             elif blacklist.is_blacklisted(entry["domain"]):
99                 # DEBUG: print(f"DEBUG: domain='{entry['domain']}' is blacklisted - SKIPPED!")
100                 continue
101             elif instances.is_registered(entry["domain"]):
102                 # DEBUG: print(f"DEBUG: domain='{entry['domain']}' is already registered - SKIPPED!")
103                 continue
104             elif instances.is_recent(entry["domain"]):
105                 # DEBUG: print(f"DEBUG: domain='{entry['domain']}' has been recently fetched - SKIPPED!")
106                 continue
107
108             # DEBUG: print(f"DEBUG: Adding domain='{entry['domain']}' ...")
109             domains.append(entry["domain"])
110
111     except network.exceptions as exception:
112         print(f"ERROR: Cannot fetch graphql,exception[{type(exception)}]:'{str(exception)}' - EXIT!")
113         return 102
114
115     # DEBUG: print(f"DEBUG: domains()={len(domains)}")
116     if len(domains) > 0:
117         locking.acquire()
118
119         print(f"INFO: Adding {len(domains)} new instances ...")
120         for domain in domains:
121             try:
122                 print(f"INFO: Fetching instances from domain='{domain}' ...")
123                 federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
124             except network.exceptions as exception:
125                 print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_bkali) from domain='{domain}'")
126                 instances.set_last_error(domain, exception)
127
128     # DEBUG: print("DEBUG: Success - EXIT!")
129     return 0
130
131 def fetch_blocks(args: argparse.Namespace):
132     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
133     if args.domain is not None and args.domain != "":
134         # DEBUG: print(f"DEBUG: args.domain='{args.domain}' - checking ...")
135         if not validators.domain(args.domain):
136             print(f"WARNING: domain='{args.domain}' is not valid.")
137             return
138         elif blacklist.is_blacklisted(args.domain):
139             print(f"WARNING: domain='{args.domain}' is blacklisted, won't check it!")
140             return
141         elif not instances.is_registered(args.domain):
142             print(f"WARNING: domain='{args.domain}' is not registered, please run ./fba.py fetch_instances {args.domain} first.")
143             return
144
145     locking.acquire()
146
147     if args.domain is not None and args.domain != "":
148         # Re-check single domain
149         # DEBUG: print(f"DEBUG: Querying database for single args.domain='{args.domain}' ...")
150         fba.cursor.execute(
151             "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE domain = ?", [args.domain]
152         )
153     else:
154         # Re-check after "timeout" (aka. minimum interval)
155         fba.cursor.execute(
156             "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'bookwyrm', 'takahe') AND (last_blocked IS NULL OR last_blocked < ?) ORDER BY rowid DESC", [time.time() - config.get("recheck_block")]
157         )
158
159     rows = fba.cursor.fetchall()
160     print(f"INFO: Checking {len(rows)} entries ...")
161     for blocker, software, origin, nodeinfo_url in rows:
162         # DEBUG: print("DEBUG: BEFORE blocker,software,origin,nodeinfo_url:", blocker, software, origin, nodeinfo_url)
163         blockdict = list()
164         blocker = tidyup.domain(blocker)
165         # DEBUG: print("DEBUG: AFTER blocker,software:", blocker, software)
166
167         if blocker == "":
168             print("WARNING: blocker is now empty!")
169             continue
170         elif blacklist.is_blacklisted(blocker):
171             print(f"WARNING: blocker='{blocker}' is blacklisted now!")
172             continue
173
174         # DEBUG: print(f"DEBUG: blocker='{blocker}'")
175         instances.set_last_blocked(blocker)
176
177         if software == "pleroma":
178             print(f"INFO: blocker='{blocker}',software='{software}'")
179             pleroma.fetch_blocks(blocker, origin, nodeinfo_url)
180         elif software == "mastodon":
181             print(f"INFO: blocker='{blocker}',software='{software}'")
182             mastodon.fetch_blocks(blocker, origin, nodeinfo_url)
183         elif software == "friendica" or software == "misskey":
184             print(f"INFO: blocker='{blocker}',software='{software}'")
185
186             blocking = list()
187             if software == "friendica":
188                 blocking = friendica.fetch_blocks(blocker)
189             elif software == "misskey":
190                 blocking = misskey.fetch_blocks(blocker)
191
192             print(f"INFO: Checking {len(blocking.items())} entries from blocker='{blocker}',software='{software}' ...")
193             for block_level, blocklist in blocking.items():
194                 # DEBUG: print("DEBUG: blocker,block_level,blocklist():", blocker, block_level, len(blocklist))
195                 block_level = tidyup.domain(block_level)
196                 # DEBUG: print("DEBUG: AFTER-block_level:", block_level)
197                 if block_level == "":
198                     print("WARNING: block_level is empty, blocker:", blocker)
199                     continue
200
201                 # DEBUG: print(f"DEBUG: Checking {len(blocklist)} entries from blocker='{blocker}',software='{software}',block_level='{block_level}' ...")
202                 for block in blocklist:
203                     blocked, reason = block.values()
204                     # DEBUG: print(f"DEBUG: blocked='{blocked}',reason='{reason}' - BEFORE!")
205                     blocked = tidyup.domain(blocked)
206                     reason  = tidyup.reason(reason) if reason is not None and reason != "" else None
207                     # DEBUG: print(f"DEBUG: blocked='{blocked}',reason='{reason}' - AFTER!")
208
209                     if blocked == "":
210                         print("WARNING: blocked is empty:", blocker)
211                         continue
212                     elif blacklist.is_blacklisted(blocked):
213                         # DEBUG: print(f"DEBUG: blocked='{blocked}' is blacklisted - skipping!")
214                         continue
215                     elif blocked.count("*") > 0:
216                         # Some friendica servers also obscure domains without hash
217                         row = instances.deobscure("*", blocked)
218
219                         # DEBUG: print(f"DEBUG: row[]='{type(row)}'")
220                         if row is None:
221                             print(f"WARNING: Cannot deobsfucate blocked='{blocked}',blocker='{blocker}',software='{software}' - SKIPPED!")
222                             continue
223
224                         blocked      = row[0]
225                         origin       = row[1]
226                         nodeinfo_url = row[2]
227                     elif blocked.count("?") > 0:
228                         # Some obscure them with question marks, not sure if that's dependent on version or not
229                         row = instances.deobscure("?", blocked)
230
231                         # DEBUG: print(f"DEBUG: row[]='{type(row)}'")
232                         if row is None:
233                             print(f"WARNING: Cannot deobsfucate blocked='{blocked}',blocker='{blocker}',software='{software}' - SKIPPED!")
234                             continue
235
236                         blocked      = row[0]
237                         origin       = row[1]
238                         nodeinfo_url = row[2]
239
240                     # DEBUG: print("DEBUG: Looking up instance by domain:", blocked)
241                     if not validators.domain(blocked):
242                         print(f"WARNING: blocked='{blocked}',software='{software}' is not a valid domain name - SKIPPED!")
243                         continue
244                     elif blocked.endswith(".arpa"):
245                         # DEBUG: print(f"DEBUG: blocked='{blocked}' is ending with '.arpa' - SKIPPED!")
246                         continue
247                     elif not instances.is_registered(blocked):
248                         # DEBUG: print("DEBUG: Hash wasn't found, adding:", blocked, blocker)
249                         try:
250                             instances.add(blocked, blocker, inspect.currentframe().f_code.co_name, nodeinfo_url)
251                         except network.exceptions as exception:
252                             print(f"Exception during adding blocked='{blocked}',blocker='{blocker}': '{type(exception)}'")
253                             continue
254
255                     if not blocks.is_instance_blocked(blocker, blocked, block_level):
256                         blocks.add_instance(blocker, blocked, reason, block_level)
257
258                         if block_level == "reject":
259                             blockdict.append({
260                                 "blocked": blocked,
261                                 "reason" : reason
262                             })
263                     else:
264                         # DEBUG: print(f"DEBUG: Updating block last seen and reason for blocker='{blocker}',blocked='{blocked}' ...")
265                         blocks.update_last_seen(blocker, blocked, block_level)
266                         blocks.update_reason(reason, blocker, blocked, block_level)
267
268             # DEBUG: print("DEBUG: Committing changes ...")
269             fba.connection.commit()
270         else:
271             print("WARNING: Unknown software:", blocker, software)
272
273         if instances.has_pending(blocker):
274             # DEBUG: print(f"DEBUG: Invoking instances.update_data({blocker}) ...")
275             instances.update_data(blocker)
276
277         if config.get("bot_enabled") and len(blockdict) > 0:
278             network.send_bot_post(blocker, blockdict)
279
280     # DEBUG: print("DEBUG: EXIT!")
281
282 def fetch_cs(args: argparse.Namespace):
283     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
284     extensions = [
285         'extra',
286         'abbr',
287         'attr_list',
288         'def_list',
289         'fenced_code',
290         'footnotes',
291         'md_in_html',
292         'admonition',
293         'codehilite',
294         'legacy_attrs',
295         'legacy_em',
296         'meta',
297         'nl2br',
298         'sane_lists',
299         'smarty',
300         'toc',
301         'wikilinks'
302     ]
303
304     domains = {
305         "silenced": list(),
306         "reject"  : list(),
307     }
308
309     raw = fba.fetch_url("https://raw.githubusercontent.com/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
310     # DEBUG: print(f"DEBUG: raw()={len(raw)}[]='{type(raw)}'")
311
312     doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features='html.parser')
313
314     # DEBUG: print(f"DEBUG: doc()={len(doc)}[]='{type(doc)}'")
315     silenced = doc.find("h2", {"id": "silenced-instances"}).findNext("table").find("tbody")
316     # DEBUG: print(f"DEBUG: silenced[]='{type(silenced)}'")
317     domains["silenced"] = domains["silenced"] + federation.find_domains(silenced)
318
319     blocked = doc.find("h2", {"id": "blocked-instances"}).findNext("table").find("tbody")
320     # DEBUG: print(f"DEBUG: blocked[]='{type(blocked)}'")
321     domains["reject"] = domains["reject"] + federation.find_domains(blocked)
322
323     # DEBUG: print(f"DEBUG: domains()={len(domains)}")
324     if len(domains) > 0:
325         locking.acquire()
326
327         print(f"INFO: Adding {len(domains)} new instances ...")
328         for block_level in domains:
329             # DEBUG: print(f"DEBUG: block_level='{block_level}'")
330
331             for row in domains[block_level]:
332                 # DEBUG: print(f"DEBUG: row='{row}'")
333                 if not blocks.is_instance_blocked('chaos.social', row["domain"], block_level):
334                     # DEBUG: print(f"DEBUG: domain='{row['domain']}',block_level='{block_level}' blocked by chaos.social, adding ...")
335                     blocks.add_instance('chaos.social', row["domain"], row["reason"], block_level)
336
337                 if not instances.is_registered(row["domain"]):
338                     try:
339                         print(f"INFO: Fetching instances from domain='{row['domain']}' ...")
340                         federation.fetch_instances(row["domain"], 'chaos.social', None, inspect.currentframe().f_code.co_name)
341                     except network.exceptions as exception:
342                         print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_cs) from domain='{row['domain']}'")
343                         instances.set_last_error(row["domain"], exception)
344
345         # DEBUG: print("DEBUG: Committing changes ...")
346         fba.connection.commit()
347
348     # DEBUG: print("DEBUG: EXIT!")
349
350 def fetch_fba_rss(args: argparse.Namespace):
351     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
352     domains = list()
353
354     print(f"INFO: Fetch FBA-specific RSS args.feed='{args.feed}' ...")
355     response = fba.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
356
357     # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code='{response.status_code}',response.text()={len(response.text)}")
358     if response.ok and response.status_code < 300 and len(response.text) > 0:
359         # DEBUG: print(f"DEBUG: Parsing RSS feed ({len(response.text)} Bytes) ...")
360         rss = atoma.parse_rss_bytes(response.content)
361
362         # DEBUG: print(f"DEBUG: rss[]='{type(rss)}'")
363         for item in rss.items:
364             # DEBUG: print(f"DEBUG: item={item}")
365             domain = item.link.split("=")[1]
366
367             if blacklist.is_blacklisted(domain):
368                 # DEBUG: print(f"DEBUG: domain='{domain}' is blacklisted - SKIPPED!")
369                 continue
370             elif domain in domains:
371                 # DEBUG: print(f"DEBUG: domain='{domain}' is already added - SKIPPED!")
372                 continue
373             elif instances.is_registered(domain):
374                 # DEBUG: print(f"DEBUG: domain='{domain}' is already registered - SKIPPED!")
375                 continue
376
377             # DEBUG: print(f"DEBUG: Adding domain='{domain}'")
378             domains.append(domain)
379
380     # DEBUG: print(f"DEBUG: domains()={len(domains)}")
381     if len(domains) > 0:
382         locking.acquire()
383
384         print(f"INFO: Adding {len(domains)} new instances ...")
385         for domain in domains:
386             try:
387                 print(f"INFO: Fetching instances from domain='{domain}' ...")
388                 federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
389             except network.exceptions as exception:
390                 print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_fba_rss) from domain='{domain}'")
391                 instances.set_last_error(domain, exception)
392
393     # DEBUG: print("DEBUG: EXIT!")
394
395 def fetch_fbabot_atom(args: argparse.Namespace):
396     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
397     feed = "https://ryona.agency/users/fba/feed.atom"
398
399     domains = list()
400
401     print(f"INFO: Fetching ATOM feed='{feed}' from FBA bot account ...")
402     response = fba.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
403
404     # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code='{response.status_code}',response.text()={len(response.text)}")
405     if response.ok and response.status_code < 300 and len(response.text) > 0:
406         # DEBUG: print(f"DEBUG: Parsing ATOM feed ({len(response.text)} Bytes) ...")
407         atom = atoma.parse_atom_bytes(response.content)
408
409         # DEBUG: print(f"DEBUG: atom[]='{type(atom)}'")
410         for entry in atom.entries:
411             # DEBUG: print(f"DEBUG: entry[]='{type(entry)}'")
412             doc = bs4.BeautifulSoup(entry.content.value, "html.parser")
413             # DEBUG: print(f"DEBUG: doc[]='{type(doc)}'")
414             for element in doc.findAll("a"):
415                 for href in element["href"].split(","):
416                     # DEBUG: print(f"DEBUG: href[{type(href)}]={href}")
417                     domain = tidyup.domain(href)
418
419                     # DEBUG: print(f"DEBUG: domain='{domain}'")
420                     if blacklist.is_blacklisted(domain):
421                         # DEBUG: print(f"DEBUG: domain='{domain}' is blacklisted - SKIPPED!")
422                         continue
423                     elif domain in domains:
424                         # DEBUG: print(f"DEBUG: domain='{domain}' is already added - SKIPPED!")
425                         continue
426                     elif instances.is_registered(domain):
427                         # DEBUG: print(f"DEBUG: domain='{domain}' is already registered - SKIPPED!")
428                         continue
429
430                     # DEBUG: print(f"DEBUG: Adding domain='{domain}',domains()={len(domains)}")
431                     domains.append(domain)
432
433     # DEBUG: print(f"DEBUG: domains({len(domains)})={domains}")
434     if len(domains) > 0:
435         locking.acquire()
436
437         print(f"INFO: Adding {len(domains)} new instances ...")
438         for domain in domains:
439             try:
440                 print(f"INFO: Fetching instances from domain='{domain}' ...")
441                 federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
442             except network.exceptions as exception:
443                 print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_fbabot_atom) from domain='{domain}'")
444                 instances.set_last_error(domain, exception)
445
446     # DEBUG: print("DEBUG: EXIT!")
447
448 def fetch_instances(args: argparse.Namespace) -> int:
449     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
450     locking.acquire()
451
452     # Initial fetch
453     try:
454         print(f"INFO: Fetching instances from args.domain='{args.domain}' ...")
455         federation.fetch_instances(args.domain, None, None, inspect.currentframe().f_code.co_name)
456     except network.exceptions as exception:
457         print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_instances) from args.domain='{args.domain}'")
458         instances.set_last_error(args.domain, exception)
459
460         return 100
461
462     if args.single:
463         # DEBUG: print("DEBUG: Not fetching more instances - EXIT!")
464         return 0
465
466     # Loop through some instances
467     fba.cursor.execute(
468         "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'bookwyrm', 'takahe', 'lemmy') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY rowid DESC", [time.time() - config.get("recheck_instance")]
469     )
470
471     rows = fba.cursor.fetchall()
472     print(f"INFO: Checking {len(rows)} entries ...")
473     for row in rows:
474         # DEBUG: print(f"DEBUG: domain='{row[0]}'")
475         if blacklist.is_blacklisted(row[0]):
476             print("WARNING: domain is blacklisted:", row[0])
477             continue
478
479         try:
480             print(f"INFO: Fetching instances for instance '{row[0]}' ('{row[2]}') of origin='{row[1]}',nodeinfo_url='{row[3]}'")
481             federation.fetch_instances(row[0], row[1], row[2], inspect.currentframe().f_code.co_name, row[3])
482         except network.exceptions as exception:
483             print(f"WARNING: Exception '{type(exception)}' during fetching instances (fetch_instances) from domain='{row[0]}'")
484             instances.set_last_error(row[0], exception)
485
486     # DEBUG: print("DEBUG: Success - EXIT!")
487     return 0
488
489 def fetch_oliphant(args: argparse.Namespace):
490     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
491     locking.acquire()
492
493     # Base URL
494     base_url = "https://codeberg.org/oliphant/blocklists/raw/branch/main/blocklists"
495
496     # URLs to fetch
497     blocklists = (
498         {
499             "blocker": "artisan.chat",
500             "csv_url": "mastodon/artisan.chat.csv",
501         },{
502             "blocker": "mastodon.art",
503             "csv_url": "mastodon/mastodon.art.csv",
504         },{
505             "blocker": "pleroma.envs.net",
506             "csv_url": "mastodon/pleroma.envs.net.csv",
507         },{
508             "blocker": "oliphant.social",
509             "csv_url": "mastodon/_unified_tier3_blocklist.csv",
510         },{
511             "blocker": "mastodon.online",
512             "csv_url": "mastodon/mastodon.online.csv",
513         },{
514             "blocker": "mastodon.social",
515             "csv_url": "mastodon/mastodon.social.csv",
516         },{
517             "blocker": "mastodon.social",
518             "csv_url": "other/missing-tier0-mastodon.social.csv",
519         },{
520             "blocker": "rage.love",
521             "csv_url": "mastodon/rage.love.csv",
522         },{
523             "blocker": "sunny.garden",
524             "csv_url": "mastodon/sunny.garden.csv",
525         },{
526             "blocker": "solarpunk.moe",
527             "csv_url": "mastodon/solarpunk.moe.csv",
528         },{
529             "blocker": "toot.wales",
530             "csv_url": "mastodon/toot.wales.csv",
531         },{
532             "blocker": "union.place",
533             "csv_url": "mastodon/union.place.csv",
534         }
535     )
536
537     domains = list()
538     for block in blocklists:
539         # Is domain given and not equal blocker?
540         if isinstance(args.domain, str) and args.domain != block["blocker"]:
541             # DEBUG: print(f"DEBUG: Skipping blocker='{block['blocker']}', not matching args.domain='{args.domain}'")
542             continue
543         elif args.domain in domains:
544             # DEBUG: print(f"DEBUG: args.domain='{args.domain}' already handled - SKIPPED!")
545             continue
546
547         # Fetch this URL
548         print(f"INFO: Fetching csv_url='{block['csv_url']}' for blocker='{block['blocker']}' ...")
549         response = fba.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
550
551         # DEBUG: print(f"DEBUG: response[]='{type(response)}'")
552         if response.ok and response.content != "":
553             # DEBUG: print(f"DEBUG: Fetched {len(response.content)} Bytes, parsing CSV ...")
554             reader = csv.DictReader(response.content.decode('utf-8').splitlines(), dialect="unix")
555
556             # DEBUG: print(f"DEBUG: reader[]='{type(reader)}'")
557             for row in reader:
558                 domain = None
559                 if "#domain" in row:
560                     domain = row["#domain"]
561                 elif "domain" in row:
562                     domain = row["domain"]
563                 else:
564                     # DEBUG: print(f"DEBUG: row='{row}' does not contain domain column")
565                     continue
566
567                 # DEBUG: print(f"DEBUG: Marking domain='{domain}' as handled")
568                 domains.append(domain)
569
570                 # DEBUG: print(f"DEBUG: Processing domain='{domain}' ...")
571                 processed = fba.process_domain(domain, block["blocker"], inspect.currentframe().f_code.co_name)
572
573                 # DEBUG: print(f"DEBUG: processed='{processed}'")
574
575     # DEBUG: print("DEBUG: EXIT!")
576
577 def fetch_txt(args: argparse.Namespace):
578     # DEBUG: print(f"DEBUG: args[]='{type(args)}' - CALLED!")
579     locking.acquire()
580
581     # Static URLs
582     urls = (
583         "https://seirdy.one/pb/bsl.txt",
584     )
585
586     print(f"INFO: Checking {len(urls)} text file(s) ...")
587     for url in urls:
588         # DEBUG: print(f"DEBUG: Fetching url='{url}' ...")
589         response = fba.fetch_url(url, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
590
591         # DEBUG: print(f"DEBUG: response[]='{type(response)}'")
592         if response.ok and response.text != "":
593             # DEBUG: print(f"DEBUG: Returned {len(response.text.strip())} Bytes for processing")
594             domains = response.text.split("\n")
595
596             print(f"INFO: Processing {len(domains)} domains ...")
597             for domain in domains:
598                 if domain == "":
599                     continue
600
601                 # DEBUG: print(f"DEBUG: domain='{domain}'")
602                 processed = fba.process_domain(domain, 'seirdy.one', inspect.currentframe().f_code.co_name)
603
604                 # DEBUG: print(f"DEBUG: processed='{processed}'")
605                 if not processed:
606                     # DEBUG: print(f"DEBUG: domain='{domain}' was not generically processed - SKIPPED!")
607                     continue
608
609     # DEBUG: print("DEBUG: EXIT!")