logger.debug(f"raw[{type(raw)}]()={len(raw)}")
doc = bs4.BeautifulSoup(raw, features='html.parser')
- logger.debug("doc[]='%'", type(doc))
+ logger.debug("doc[]='%s'", type(doc))
except network.exceptions as exception:
logger.warning(f"Cannot fetch software='{software}' from fediverse.observer: '{type(exception)}'")
continue
logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
- logger.debug(f"domain='{domain}' is already registered - SKIPPED!")
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
logger.info(f"Fetching instances for domain='{domain}',software='{software}'")
logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
continue
elif domain in domains:
- logger.debug(f"domain='{domain}' is already added - SKIPPED!")
+ logger.debug("domain='%s' is already added - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
- logger.debug(f"domain='{domain}' is already registered - SKIPPED!")
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
logger.debug(f"Adding domain='{domain}'")
for entry in atom.entries:
logger.debug(f"entry[]='{type(entry)}'")
doc = bs4.BeautifulSoup(entry.content.value, "html.parser")
- logger.debug("doc[]='%'", type(doc))
+ logger.debug("doc[]='%s'", type(doc))
for element in doc.findAll("a"):
for href in element["href"].split(","):
- logger.debug(f"href[{type(href)}]={href}")
+ logger.debug("href[%s]='%s", type(href), href)
domain = tidyup.domain(href)
logger.debug("domain='%s'", domain)
logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
continue
elif domain in domains:
- logger.debug(f"domain='{domain}' is already added - SKIPPED!")
+ logger.debug("domain='%s' is already added - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
- logger.debug(f"domain='{domain}' is already registered - SKIPPED!")
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
logger.debug(f"Adding domain='{domain}',domains()={len(domains)}")
if "error_message" in data:
logger.warning("Could not reach any JSON API:", domain)
instances.set_last_error(domain, data)
- elif "federated_instances" in data["json"]:
+ elif "federated_instances" in data["json"] and isinstance(data["json"]["federated_instances"], dict):
logger.debug(f"Found federated_instances for domain='{domain}'")
peers = peers + federation.add_peers(data["json"]["federated_instances"])
logger.debug("Added instance(s) to peers")
else:
- logger.warning("JSON response does not contain 'federated_instances':", domain)
+ logger.warning("JSON response does not contain 'federated_instances', domain='%s'", domain)
instances.set_last_error(domain, data)
except network.exceptions as exception:
"html.parser",
)
- logger.debug("doc[]='%'", type(doc))
+ logger.debug("doc[]='%s'", type(doc))
if doc.find("h2") is not None:
logger.debug(f"Found 'h2' header in path='{path}' - BREAK!")
break
"Silenced servers" : [],
}
- logger.debug("doc[]='%'", type(doc))
+ logger.debug("doc[]='%s'", type(doc))
if doc is None:
logger.warning(f"Cannot fetch any /about pages for domain='{domain}' - EXIT!")
return blocklist