From 470892a11b3244521d97c8dae0f01bf6b65d215f Mon Sep 17 00:00:00 2001 From: =?utf8?q?Roland=20H=C3=A4der?= Date: Tue, 29 Apr 2025 21:36:46 +0200 Subject: [PATCH] Continued: - ops, `value` is no parameter in daemon's function - introduced --force-recrawl (to include recently crawled instances) parameter to 2 commands - updated --force-all help text --- daemon.py | 2 +- fba/boot.py | 6 ++++-- fba/commands.py | 4 ++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/daemon.py b/daemon.py index f007e72..98fce7e 100755 --- a/daemon.py +++ b/daemon.py @@ -83,7 +83,7 @@ SELECT COUNT(domain) FROM instances) AS total_websites, \ @router.get(_base_url + "/api/scoreboard.json", response_class=JSONResponse) def api_scoreboard(mode: str, amount: int) -> None: - if mode is None or value is None or amount is None: + if mode is None or amount is None: raise HTTPException(status_code=500, detail="No filter specified") elif amount <= 0: raise HTTPException(status_code=500, detail=f"amount={amount} is to small") diff --git a/fba/boot.py b/fba/boot.py index f25ea98..5730b16 100644 --- a/fba/boot.py +++ b/fba/boot.py @@ -148,7 +148,8 @@ def init_parser() -> None: ) parser.set_defaults(command=commands.fetch_instances) parser.add_argument("--domain", help="Instance name (aka. domain) to fetch further instances from. Start with a large instance, for example mastodon.social .") - parser.add_argument("--force-all", action="store_true", help="Include also already existing instances, otherwise only new are checked") + parser.add_argument("--force-all", action="store_true", help="Forces update of data, no matter what. Replaces all force parameters below.") + parser.add_argument("--force-recrawl", action="store_true", help="Forces recrawling all found instances. Can still be limited by --software or any --no-* parameters.") parser.add_argument("--software", help="Name of software, for example 'lemmy'") ### Fetch blocks from static text file(s) ### @@ -157,7 +158,8 @@ def init_parser() -> None: help="Fetches text/plain files as simple domain lists", ) parser.set_defaults(command=commands.fetch_txt) - parser.add_argument("--force-all", action="store_true", help="Forces update of data, no matter what.") + parser.add_argument("--force-all", action="store_true", help="Forces update of data, no matter what. Replaces all force parameters below.") + parser.add_argument("--force-recrawl", action="store_true", help="Forces recrawling all found instances. Can still be limited by --software or any --no-* parameters.") ### Fetch blocks from joinfediverse.wiki ### #parser = subparser_command.add_parser( diff --git a/fba/commands.py b/fba/commands.py index d48a769..adc2c87 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -1071,7 +1071,7 @@ ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC" elif software_helper.is_relay(row["software"]): logger.warning("row[domain]='%s' is a relay of type '%s' which is not supported by this command. Please invoke fetch_relays instead - SKIPPED!", row["domain"], row["software"]) continue - elif not args.force_all and not args.software in [None, ""] and instances.is_recent(row["domain"]): + elif not args.force_all and not args.force_recrawl and instances.is_recent(row["domain"]): logger.debug("row[domain]='%s' has recently been crawled - SKIPPED!", row["domain"]) continue @@ -1174,7 +1174,7 @@ def fetch_txt(args: argparse.Namespace) -> int: elif not domain_helper.is_wanted(domain): logger.debug("domain='%s' is not wanted - SKIPPED!", domain) continue - elif not args.force_all and instances.is_registered(domain): + elif not args.force_all and not args.force_recrawl and instances.is_registered(domain): logger.debug("domain='%s' is already registered - SKIPPED!", domain) continue -- 2.39.5