From: Roland Häder Date: Fri, 23 Jun 2023 18:12:47 +0000 (+0200) Subject: Continued: X-Git-Url: https://git.mxchange.org/?a=commitdiff_plain;h=c28e1f0c1aed04c21737626c8068bf03e275f800;p=fba.git Continued: - added command 'fetch_pixelfed_api' to fetch instances from pixelfed.org's API --- diff --git a/fba/boot.py b/fba/boot.py index cced510..5521867 100644 --- a/fba/boot.py +++ b/fba/boot.py @@ -141,6 +141,13 @@ def init_parser(): ) parser.set_defaults(command=commands.fetch_fedipact) + ### Fetch from pixelfed.org's API ### + parser = subparser_command.add_parser( + "fetch_pixelfed_api", + help="Fetches domain names from pixelfed.org's API", + ) + parser.set_defaults(command=commands.fetch_pixelfed_api) + logger.debug("EXIT!") def run_command(): diff --git a/fba/commands.py b/fba/commands.py index 62e90c4..0c88e21 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -27,6 +27,7 @@ import markdown import reqto import validators +from fba import csrf from fba import database from fba import utils @@ -70,6 +71,61 @@ def check_instance(args: argparse.Namespace) -> int: logger.debug("status='%d' - EXIT!", status) return status +def fetch_pixelfed_api(args: argparse.Namespace) -> int: + logger.debug("args[]='%s' - CALLED!", type(args)) + + # No CSRF by default, you don't have to add network.api_headers by yourself here + headers = tuple() + + try: + logger.debug("Checking CSRF from pixelfed.org") + headers = csrf.determine("pixelfed.org", dict()) + except network.exceptions as exception: + logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) + return list() + + domains = list() + try: + logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers)) + fetched = network.get_json_api( + "pixelfed.org", + "/api/v1/servers/all.json?scope=All&country=all&language=all", + headers, + (config.get("connection_timeout"), config.get("read_timeout")) + ) + + logger.debug("JSON API returned %d elements", len(fetched)) + if "error_message" in fetched: + logger.warning("API returned error_message='%s' - EXIT!", fetched["error_message"]) + return 101 + elif "data" not in fetched["json"]: + logger.warning("API did not return JSON with 'data' element - EXIT!") + return 102 + + rows = fetched["json"]["data"] + logger.info("Checking %d fetched rows ...", len(rows)) + for row in rows: + logger.debug("row[]='%s'", type(row)) + if "domain" not in row: + logger.warning("row='%s' does not contain element 'domain' - SKIPPED!") + continue + elif not utils.is_domain_wanted(row['domain']): + logger.debug("row[domain]='%s' is not wanted - SKIPPED!", row['domain']) + continue + elif instances.is_registered(row['domain']): + logger.debug("row[domain]='%s' is already registered - SKIPPED!", row['domain']) + continue + + logger.debug("Fetching instances from row[domain]='%s' ...", row['domain']) + federation.fetch_instances(row['domain'], None, None, inspect.currentframe().f_code.co_name) + + except network.exceptions as exception: + logger.warning("Cannot fetch graphql,exception[%s]:'%s' - EXIT!", type(exception), str(exception)) + return 103 + + logger.debug("Success! - EXIT!") + return 0 + def fetch_bkali(args: argparse.Namespace) -> int: logger.debug("args[]='%s' - CALLED!", type(args)) domains = list() @@ -265,11 +321,7 @@ def fetch_blocks(args: argparse.Namespace) -> int: continue elif not instances.is_registered(block['blocked']): logger.debug("Hash wasn't found, adding: blocked='%s',blocker='%s'", block['blocked'], blocker) - try: - instances.add(block['blocked'], blocker, inspect.currentframe().f_code.co_name, nodeinfo_url) - except network.exceptions as exception: - logger.warning("Exception during adding blocked='%s',blocker='%s': '%s'", block['blocked'], blocker, type(exception)) - continue + federation.fetch_instances(block['blocked'], blocker, None, inspect.currentframe().f_code.co_name) if block['block_level'] == "silence": logger.debug("Block level 'silence' has been changed to 'silenced'") @@ -358,7 +410,11 @@ def fetch_observer(args: argparse.Namespace) -> int: try: logger.debug("Fetching table data for software='%s' ...", software) - raw = utils.fetch_url(f"https://fediverse.observer/app/views/tabledata.php?software={software}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text + raw = utils.fetch_url( + f"https://fediverse.observer/app/views/tabledata.php?software={software}", + network.web_headers, + (config.get("connection_timeout"), config.get("read_timeout")) + ).text logger.debug("raw[%s]()=%d", type(raw), len(raw)) doc = bs4.BeautifulSoup(raw, features='html.parser')