logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
-# "rel" identifiers (no real URLs)
-nodeinfo_identifier = [
- "https://nodeinfo.diaspora.software/ns/schema/2.1",
- "https://nodeinfo.diaspora.software/ns/schema/2.0",
- "https://nodeinfo.diaspora.software/ns/schema/1.1",
- "https://nodeinfo.diaspora.software/ns/schema/1.0",
- "http://nodeinfo.diaspora.software/ns/schema/2.1",
- "http://nodeinfo.diaspora.software/ns/schema/2.0",
- "http://nodeinfo.diaspora.software/ns/schema/1.1",
- "http://nodeinfo.diaspora.software/ns/schema/1.0",
-]
-
def fetch_instances(domain: str, origin: str, software: str, command: str, path: str = None):
logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s' - CALLED!", domain, origin, software, command, path)
domain_helper.raise_on(domain)
logger.debug("domain='%s' - CALLED!", domain)
domain_helper.raise_on(domain)
+ # "rel" identifiers (no real URLs)
+ nodeinfo_identifier = [
+ "https://nodeinfo.diaspora.software/ns/schema/2.1",
+ "http://nodeinfo.diaspora.software/ns/schema/2.1",
+ "https://nodeinfo.diaspora.software/ns/schema/2.0",
+ "http://nodeinfo.diaspora.software/ns/schema/2.0",
+ "https://nodeinfo.diaspora.software/ns/schema/1.1",
+ "http://nodeinfo.diaspora.software/ns/schema/1.1",
+ "https://nodeinfo.diaspora.software/ns/schema/1.0",
+ "http://nodeinfo.diaspora.software/ns/schema/1.0",
+ ]
+
# No CSRF by default, you don't have to add network.api_headers by yourself here
headers = tuple()
(config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
)
+ logger.debug("data[]='%s'", type(data))
if "error_message" not in data:
nodeinfo = data["json"]
logger.debug("Found entries: nodeinfo()=%d,domain='%s'", len(nodeinfo), domain)
if "links" in nodeinfo:
- logger.debug("Found nodeinfo[links]()=%d record(s)", len(nodeinfo["links"]))
- for link in nodeinfo["links"]:
- logger.debug("link[%s]='%s'", type(link), link)
- if not isinstance(link, dict) or not "rel" in link:
- logger.warning("link[]='%s' is not 'dict' or no element 'rel' found", type(link))
- elif link["rel"] in nodeinfo_identifier:
- # Default is that 'href' has a complete URL, but some hosts don't send that
- url = link["href"]
- components = urlparse(link["href"])
-
- logger.debug("components[%s]='%s'", type(components), components)
- if components.scheme == "" and components.netloc == "":
- logger.debug("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
- url = f"https://{domain}{url}"
- components = urlparse(url)
-
- if not utils.is_domain_wanted(components.netloc):
- logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
- continue
-
- logger.debug("Fetching nodeinfo from url='%s' ...", url)
- data = network.fetch_api_url(
- url,
- (config.get("connection_timeout"), config.get("read_timeout"))
- )
-
- logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
- if "error_message" not in data and "json" in data:
- logger.debug("Found JSON data()=%d", len(data))
- instances.set_detection_mode(domain, "AUTO_DISCOVERY")
- instances.set_nodeinfo_url(domain, link["href"])
-
- logger.debug("Marking domain='%s' as successfully handled ...", domain)
- instances.set_success(domain)
- break
- else:
- logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
- instances.set_last_error(domain, data)
- else:
- logger.warning("Unknown 'rel' value: domain='%s',link[rel]='%s'", domain, link["rel"])
+ logger.debug("Found nodeinfo[links]()=%d record(s),", len(nodeinfo["links"]))
+ for niid in nodeinfo_identifier:
+ data = dict()
+
+ logger.debug("Checking niid='%s' ...", niid)
+ for link in nodeinfo["links"]:
+ logger.debug("link[%s]='%s'", type(link), link)
+ if not isinstance(link, dict) or not "rel" in link:
+ logger.warning("link[]='%s' is not 'dict' or no element 'rel' found", type(link))
+ elif link["rel"] == niid:
+ # Default is that 'href' has a complete URL, but some hosts don't send that
+ logger.debug("link[href]='%s' matches niid='%s'", link["href"], niid)
+ url = link["href"]
+ components = urlparse(link["href"])
+
+ logger.debug("components[%s]='%s'", type(components), components)
+ if components.scheme == "" and components.netloc == "":
+ logger.debug("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
+ url = f"https://{domain}{url}"
+ components = urlparse(url)
+
+ if not utils.is_domain_wanted(components.netloc):
+ logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
+ continue
+
+ logger.debug("Fetching nodeinfo from url='%s' ...", url)
+ data = network.fetch_api_url(
+ url,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+
+ logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
+ if "error_message" not in data and "json" in data:
+ logger.debug("Found JSON data()=%d,link[href]='%s'", len(data), link["href"])
+ instances.set_detection_mode(domain, "AUTO_DISCOVERY")
+ instances.set_nodeinfo_url(domain, link["href"])
+
+ logger.debug("Marking domain='%s' as successfully handled ...", domain)
+ instances.set_success(domain)
+ break
+ else:
+ logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
+ instances.set_last_error(domain, data)
+
+ logger.debug("data()=%d", len(data))
+ if "error_message" not in data and "json" in data:
+ logger.debug("Auto-discovery successful: domain='%s'", domain)
+ break
else:
logger.warning("nodeinfo does not contain 'links': domain='%s'", domain)