if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 1
- else:
- logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
- sources.update(source_domain)
+
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
logger.info("Fetching list from source_domain='%s' ...", source_domain)
rows = network.fetch_json_rows(
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[%s]='%s' - BEFORE!", type(row), row)
+ if "url" not in row:
+ logger.warning("row='%s' has no required element 'url' - SKIPPED!", row)
+ continue
+
domain = urlparse(row["url"]).netloc.lower().split(":")[0]
logger.debug("domain='%s' - AFTER!", domain)
domain = domain_helper.encode_idna(domain)
logger.debug("domain='%s' - AFTER!", domain)
- if not domain_helper.is_wanted(domain):
- logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
- continue
- elif domain in domains:
+ if domain in domains:
logger.debug("domain='%s' is already added - SKIPPED!", domain)
continue
+ elif not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
elif instances.is_registered(domain):
logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
if not args.force_all and instances.is_recent(row["domain"]):
logger.debug("row[domain]='%s' has recently been fetched - SKIPPED!", row["domain"])
continue
- elif row["nodeinfo_url"] is None:
- logger.warning("row[domain]='%s' has empty nodeinfo_url but this is required - SKIPPED!", row["domain"])
- continue
peers = []
try:
logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
raw = network.fetch_url(
f"https://{row['domain']}",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))