locking.acquire()
if args.domain is not None and args.domain != "":
- database.cursor.execute("SELECT domain, software FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay') AND domain = ? LIMIT 1", [args.domain])
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND domain = ? LIMIT 1", [args.domain])
+ elif args.software is not None and args.software != "":
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND software = ?", [args.software])
else:
- database.cursor.execute("SELECT domain, software FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay')")
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay')")
domains = list()
rows = database.cursor.fetchall()
continue
try:
- logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
- raw = utils.fetch_url(
- f"https://{row['domain']}",
- network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- ).text
- logger.debug("raw[%s]()=%d", type(raw), len(raw))
+ if row["software"] == "pub-relay":
+ logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
+ raw = network.fetch_api_url(
+ row["nodeinfo_url"],
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+
+ logger.debug("raw[%s]()=%d", type(raw), len(raw))
+ if "exception" in raw:
+ logger.warning("row[domain]='%s' has caused an exception: '%s' - raising again ...", row["domain"], type(raw["exception"]))
+ raise raw["exception"]
+ elif "error_message" in raw:
+ logger.warning("row[domain]='%s' has caused error message: '%s' - SKIPPED!", row["domain"], raw["error_message"])
+ instances.set_last_error(row["domain"], raw)
+ instances.set_last_instance_fetch(row["domain"])
+ instances.update(row["domain"])
+ continue
+ elif not "json" in raw:
+ logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
+ continue
+ elif not "metadata" in raw["json"]:
+ logger.warning("raw[json]()=%d does not contain key 'json' in response - SKIPPED!", len(raw["json"]))
+ continue
+ elif not "peers" in raw["json"]["metadata"]:
+ logger.warning("raw[json][metadata()=%d does not contain key 'json' in response - SKIPPED!", len(raw["json"]["metadata"]))
+ continue
+ else:
+ logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
+ raw = utils.fetch_url(
+ f"https://{row['domain']}",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
+ logger.debug("raw[%s]()=%d", type(raw), len(raw))
+
+ doc = bs4.BeautifulSoup(raw, features="html.parser")
+ logger.debug("doc[]='%s'", type(doc))
+
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching from relay '%s': '%s'", type(exception), row["domain"], str(exception))
instances.set_last_error(row["domain"], exception)
instances.update(row["domain"])
continue
- doc = bs4.BeautifulSoup(raw, features="html.parser")
- logger.debug("doc[]='%s'", type(doc))
-
logger.debug("row[software]='%s'", row["software"])
if row["software"] == "activityrelay":
logger.debug("Checking row[domain]='%s' ...", row["domain"])
components = urlparse(link["href"])
domain = components.netloc.lower()
- if not domain_helper.is_wanted(domain):
- logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
- continue
-
logger.debug("domain='%s' - BEFORE!", domain)
domain = tidyup.domain(domain)
logger.debug("domain='%s' - AFTER!", domain)
if dict_helper.has_key(domains, "domain", domain):
logger.debug("domain='%s' already added", domain)
continue
+ elif not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
+
+ logger.debug("Appending domain='%s',origin='%s',software='%s'", domain, row["domain"], row["software"])
+ domains.append({
+ "domain": domain,
+ "origin": row["domain"],
+ })
+ elif row["software"] == "pub-relay":
+ logger.debug("Checking %d peer(s) row[domain]='%s' ...", len(raw["json"]["metadata"]["peers"]), row["domain"])
+ for domain in raw["json"]["metadata"]["peers"]:
+ logger.debug("domain='%s'", domain)
+ if domain not in peers:
+ logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
+ peers.append(domain)
+
+ if dict_helper.has_key(domains, "domain", domain):
+ logger.debug("domain='%s' already added", domain)
+ continue
+ elif not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
logger.debug("Appending domain='%s',origin='%s',software='%s'", domain, row["domain"], row["software"])
domains.append({
})
else:
logger.warning("row[domain]='%s',row[software]='%s' is not supported", row["domain"], row["software"])
+ continue
logger.debug("Updating last_instance_fetch for row[domain]='%s' ...", row["domain"])
instances.set_last_instance_fetch(row["domain"])