]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Fri, 23 Jun 2023 18:12:47 +0000 (20:12 +0200)
committerRoland Häder <roland@mxchange.org>
Fri, 23 Jun 2023 18:12:47 +0000 (20:12 +0200)
- added command 'fetch_pixelfed_api' to fetch instances from pixelfed.org's API

fba/boot.py
fba/commands.py

index cced5102be8d83970091d81ae4b37acb79942dfa..55218674fda8778eb4474e8b816410ed49536c89 100644 (file)
@@ -141,6 +141,13 @@ def init_parser():
     )
     parser.set_defaults(command=commands.fetch_fedipact)
 
+    ### Fetch from pixelfed.org's API ###
+    parser = subparser_command.add_parser(
+        "fetch_pixelfed_api",
+        help="Fetches domain names from pixelfed.org's API",
+    )
+    parser.set_defaults(command=commands.fetch_pixelfed_api)
+
     logger.debug("EXIT!")
 
 def run_command():
index 62e90c475503de4409d119756769ad6a769c1434..0c88e21cbcfbd11f6bffc35514b9776293500430 100644 (file)
@@ -27,6 +27,7 @@ import markdown
 import reqto
 import validators
 
+from fba import csrf
 from fba import database
 from fba import utils
 
@@ -70,6 +71,61 @@ def check_instance(args: argparse.Namespace) -> int:
     logger.debug("status='%d' - EXIT!", status)
     return status
 
+def fetch_pixelfed_api(args: argparse.Namespace) -> int:
+    logger.debug("args[]='%s' - CALLED!", type(args))
+
+    # No CSRF by default, you don't have to add network.api_headers by yourself here
+    headers = tuple()
+
+    try:
+        logger.debug("Checking CSRF from pixelfed.org")
+        headers = csrf.determine("pixelfed.org", dict())
+    except network.exceptions as exception:
+        logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+        return list()
+
+    domains = list()
+    try:
+        logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
+        fetched = network.get_json_api(
+            "pixelfed.org",
+            "/api/v1/servers/all.json?scope=All&country=all&language=all",
+            headers,
+            (config.get("connection_timeout"), config.get("read_timeout"))
+        )
+
+        logger.debug("JSON API returned %d elements", len(fetched))
+        if "error_message" in fetched:
+            logger.warning("API returned error_message='%s' - EXIT!", fetched["error_message"])
+            return 101
+        elif "data" not in fetched["json"]:
+            logger.warning("API did not return JSON with 'data' element - EXIT!")
+            return 102
+
+        rows = fetched["json"]["data"]
+        logger.info("Checking %d fetched rows ...", len(rows))
+        for row in rows:
+            logger.debug("row[]='%s'", type(row))
+            if "domain" not in row:
+                logger.warning("row='%s' does not contain element 'domain' - SKIPPED!")
+                continue
+            elif not utils.is_domain_wanted(row['domain']):
+                logger.debug("row[domain]='%s' is not wanted - SKIPPED!", row['domain'])
+                continue
+            elif instances.is_registered(row['domain']):
+                logger.debug("row[domain]='%s' is already registered - SKIPPED!", row['domain'])
+                continue
+
+            logger.debug("Fetching instances from row[domain]='%s' ...", row['domain'])
+            federation.fetch_instances(row['domain'], None, None, inspect.currentframe().f_code.co_name)
+
+    except network.exceptions as exception:
+        logger.warning("Cannot fetch graphql,exception[%s]:'%s' - EXIT!", type(exception), str(exception))
+        return 103
+
+    logger.debug("Success! - EXIT!")
+    return 0
+
 def fetch_bkali(args: argparse.Namespace) -> int:
     logger.debug("args[]='%s' - CALLED!", type(args))
     domains = list()
@@ -265,11 +321,7 @@ def fetch_blocks(args: argparse.Namespace) -> int:
                 continue
             elif not instances.is_registered(block['blocked']):
                 logger.debug("Hash wasn't found, adding: blocked='%s',blocker='%s'", block['blocked'], blocker)
-                try:
-                    instances.add(block['blocked'], blocker, inspect.currentframe().f_code.co_name, nodeinfo_url)
-                except network.exceptions as exception:
-                    logger.warning("Exception during adding blocked='%s',blocker='%s': '%s'", block['blocked'], blocker, type(exception))
-                    continue
+                federation.fetch_instances(block['blocked'], blocker, None, inspect.currentframe().f_code.co_name)
 
             if block['block_level'] == "silence":
                 logger.debug("Block level 'silence' has been changed to 'silenced'")
@@ -358,7 +410,11 @@ def fetch_observer(args: argparse.Namespace) -> int:
 
         try:
             logger.debug("Fetching table data for software='%s' ...", software)
-            raw = utils.fetch_url(f"https://fediverse.observer/app/views/tabledata.php?software={software}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+            raw = utils.fetch_url(
+                f"https://fediverse.observer/app/views/tabledata.php?software={software}",
+                network.web_headers,
+                (config.get("connection_timeout"), config.get("read_timeout"))
+            ).text
             logger.debug("raw[%s]()=%d", type(raw), len(raw))
 
             doc = bs4.BeautifulSoup(raw, features='html.parser')