- logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
- raw = utils.fetch_url(
- f"https://{row['domain']}",
- network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- ).text
- logger.debug("raw[%s]()=%d", type(raw), len(raw))
+ if row["software"] == "pub-relay":
+ logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
+ raw = network.fetch_api_url(
+ row["nodeinfo_url"],
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+
+ logger.debug("raw[%s]()=%d", type(raw), len(raw))
+ if "exception" in raw:
+ logger.warning("row[domain]='%s' has caused an exception: '%s' - raising again ...", row["domain"], type(raw["exception"]))
+ raise raw["exception"]
+ elif "error_message" in raw:
+ logger.warning("row[domain]='%s' has caused error message: '%s' - SKIPPED!", row["domain"], raw["error_message"])
+ instances.set_last_error(row["domain"], raw)
+ instances.set_last_instance_fetch(row["domain"])
+ instances.update(row["domain"])
+ continue
+ elif not "json" in raw:
+ logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
+ continue
+ elif not "metadata" in raw["json"]:
+ logger.warning("raw[json]()=%d does not contain key 'json' in response - SKIPPED!", len(raw["json"]))
+ continue
+ elif not "peers" in raw["json"]["metadata"]:
+ logger.warning("raw[json][metadata()=%d does not contain key 'json' in response - SKIPPED!", len(raw["json"]["metadata"]))
+ continue
+ else:
+ logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
+ raw = utils.fetch_url(
+ f"https://{row['domain']}",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
+ logger.debug("raw[%s]()=%d", type(raw), len(raw))
+
+ doc = bs4.BeautifulSoup(raw, features="html.parser")
+ logger.debug("doc[]='%s'", type(doc))
+