]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Sun, 2 Jul 2023 18:49:58 +0000 (20:49 +0200)
committerRoland Häder <roland@mxchange.org>
Sun, 2 Jul 2023 18:49:58 +0000 (20:49 +0200)
- return whole nodeinfo dict, including "json"
- also need to handle this for pleroma

fba/http/federation.py
fba/http/network.py
fba/networks/pleroma.py

index 09397221bc7bb3a4cd06d94262acc5b78a20556e..339b4117241042841c155a26dfa2f54d3b5031d9 100644 (file)
@@ -215,7 +215,7 @@ def fetch_nodeinfo(domain: str, path: str = None) -> dict:
     logger.debug("nodeinfo[%s](%d='%s'", type(nodeinfo), len(nodeinfo), nodeinfo)
     if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0:
         logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json']))
-        return nodeinfo["json"]
+        return nodeinfo
 
     # No CSRF by default, you don't have to add network.api_headers by yourself here
     headers = tuple()
@@ -457,7 +457,7 @@ def determine_software(domain: str, path: str = None) -> str:
     logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
     data = fetch_nodeinfo(domain, path)
 
-    logger.debug("data[]='%s'", type(data))
+    logger.debug("data[%s]='%s'", type(data), data)
     if "exception" in data:
         # Continue raising it
         logger.debug("data()=%d contains exception='%s' - raising ...", len(data), type(data["exception"]))
index c7816c628ecdb0d0efb0afd56c1c7794bfa6b7ca..00a31e4476494f991e1a6a12de1d67abb2d5e994 100644 (file)
@@ -126,7 +126,7 @@ def fetch_api_url(url: str, timeout: tuple) -> dict:
         logger.debug("Parsing JSON response from url='%s' ...", url)
         json_reply["json"] = json_helper.from_response(response)
 
-        logger.debug("response.ok='%s',response.status_code='%s',response.text()=%d", response.ok, response.status_code, len(response.text), response.text)
+        logger.debug("response.ok='%s',response.status_code='%s',response.text()=%d", response.ok, response.status_code, len(response.text))
         if not response.ok or response.status_code >= 300 or len(response.text) == 0:
             logger.warning("Cannot query JSON API: url='%s',response.status_code=%d,response.text()=%d", url, response.status_code, len(response.text))
             json_reply["status_code"]   = response.status_code
index 82d3e4ef39058a05385274a83674b54595e5f06b..ebb7a6ec48bfc1bf66d700ddd7663536768e6453 100644 (file)
@@ -63,6 +63,17 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
     try:
         logger.debug("Fetching nodeinfo: domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
         rows = federation.fetch_nodeinfo(domain, nodeinfo_url)
+
+        if "error_message" in rows:
+            logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", rows["error_message"], domain, nodeinfo_url)
+            instances.set_last_error(domain, rows)
+            return list()
+        elif "exception" in rows:
+            logger.warning("Exception '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", type(rows["exception"]), domain, nodeinfo_url)
+            return list()
+        elif "json" in rows:
+            logger.debug("rows[json] found for domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
+            rows = rows["json"]
     except network.exceptions as exception:
         logger.warning("Exception '%s' during fetching nodeinfo from domain='%s'", type(exception), domain)
         instances.set_last_error(domain, exception)