import reqto
import validators
+from fba import csrf
from fba import database
from fba import utils
logger.debug("status='%d' - EXIT!", status)
return status
+def fetch_pixelfed_api(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ # No CSRF by default, you don't have to add network.api_headers by yourself here
+ headers = tuple()
+
+ try:
+ logger.debug("Checking CSRF from pixelfed.org")
+ headers = csrf.determine("pixelfed.org", dict())
+ except network.exceptions as exception:
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ return list()
+
+ domains = list()
+ try:
+ logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
+ fetched = network.get_json_api(
+ "pixelfed.org",
+ "/api/v1/servers/all.json?scope=All&country=all&language=all",
+ headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+
+ logger.debug("JSON API returned %d elements", len(fetched))
+ if "error_message" in fetched:
+ logger.warning("API returned error_message='%s' - EXIT!", fetched["error_message"])
+ return 101
+ elif "data" not in fetched["json"]:
+ logger.warning("API did not return JSON with 'data' element - EXIT!")
+ return 102
+
+ rows = fetched["json"]["data"]
+ logger.info("Checking %d fetched rows ...", len(rows))
+ for row in rows:
+ logger.debug("row[]='%s'", type(row))
+ if "domain" not in row:
+ logger.warning("row='%s' does not contain element 'domain' - SKIPPED!")
+ continue
+ elif not utils.is_domain_wanted(row['domain']):
+ logger.debug("row[domain]='%s' is not wanted - SKIPPED!", row['domain'])
+ continue
+ elif instances.is_registered(row['domain']):
+ logger.debug("row[domain]='%s' is already registered - SKIPPED!", row['domain'])
+ continue
+
+ logger.debug("Fetching instances from row[domain]='%s' ...", row['domain'])
+ federation.fetch_instances(row['domain'], None, None, inspect.currentframe().f_code.co_name)
+
+ except network.exceptions as exception:
+ logger.warning("Cannot fetch graphql,exception[%s]:'%s' - EXIT!", type(exception), str(exception))
+ return 103
+
+ logger.debug("Success! - EXIT!")
+ return 0
+
def fetch_bkali(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
domains = list()
continue
elif not instances.is_registered(block['blocked']):
logger.debug("Hash wasn't found, adding: blocked='%s',blocker='%s'", block['blocked'], blocker)
- try:
- instances.add(block['blocked'], blocker, inspect.currentframe().f_code.co_name, nodeinfo_url)
- except network.exceptions as exception:
- logger.warning("Exception during adding blocked='%s',blocker='%s': '%s'", block['blocked'], blocker, type(exception))
- continue
+ federation.fetch_instances(block['blocked'], blocker, None, inspect.currentframe().f_code.co_name)
if block['block_level'] == "silence":
logger.debug("Block level 'silence' has been changed to 'silenced'")
try:
logger.debug("Fetching table data for software='%s' ...", software)
- raw = utils.fetch_url(f"https://fediverse.observer/app/views/tabledata.php?software={software}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+ raw = utils.fetch_url(
+ f"https://fediverse.observer/app/views/tabledata.php?software={software}",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))
doc = bs4.BeautifulSoup(raw, features='html.parser')