logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
paths = [
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching instances from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
logger.debug("block_tag[%s]='%s'", type(block_tag), block_tag)
if block_tag is None:
- logger.debug("Instance has no block list: domain='%s'", domain)
+ logger.debug("Instance has no block list: domain='%s' - EXIT!", domain)
return list()
table = block_tag.find("table")
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
try:
try:
parsed = json.loads(iso_data)
except json.decoder.JSONDecodeError as exception:
- logger.warning("Exception '%s' during parsing %d Bytes: '%s'", type(exception), len(iso_data), str(exception))
+ logger.warning("Exception '%s' during parsing %d Bytes: '%s' - EXIT!", type(exception), len(iso_data), str(exception))
return list()
logger.debug("parsed[%s]()=%d", type(parsed), len(parsed))
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
try:
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
# iterating through all "suspended" (follow-only in its terminology)
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_blocks,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
blocklist = list()
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
for mode in ["followers", "following"]:
rows = federation.fetch_nodeinfo(domain, nodeinfo_url)
if "error_message" in rows:
- logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", rows["error_message"], domain, nodeinfo_url)
+ logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s'", rows["error_message"], domain, nodeinfo_url)
instances.set_last_error(domain, rows)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
elif "exception" in rows:
logger.warning("Exception '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", type(rows["exception"]), domain, nodeinfo_url)
elif "json" in rows:
logger.debug("rows[json] found for domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
rows = rows["json"]
+
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching nodeinfo from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)
if rows is None:
- logger.warning("Could not fetch nodeinfo from domain='%s'", domain)
+ logger.warning("Could not fetch nodeinfo from domain='%s' - EXIT!", domain)
return list()
elif "metadata" not in rows:
- logger.warning("rows()=%d does not have key 'metadata', domain='%s'", len(rows), domain)
+ logger.warning("rows()=%d does not have key 'metadata', domain='%s' - EXIT!", len(rows), domain)
return list()
elif "federation" not in rows["metadata"]:
- logger.warning("rows()=%d does not have key 'federation', domain='%s'", len(rows["metadata"]), domain)
+ logger.warning("rows()=%d does not have key 'federation', domain='%s' - EXIT!", len(rows["metadata"]), domain)
return list()
data = rows["metadata"]["federation"]