logger.debug("nodeinfo[%s](%d='%s'", type(nodeinfo), len(nodeinfo), nodeinfo)
if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0:
logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json']))
- return nodeinfo["json"]
+ return nodeinfo
# No CSRF by default, you don't have to add network.api_headers by yourself here
headers = tuple()
logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
data = fetch_nodeinfo(domain, path)
- logger.debug("data[]='%s'", type(data))
+ logger.debug("data[%s]='%s'", type(data), data)
if "exception" in data:
# Continue raising it
logger.debug("data()=%d contains exception='%s' - raising ...", len(data), type(data["exception"]))
logger.debug("Parsing JSON response from url='%s' ...", url)
json_reply["json"] = json_helper.from_response(response)
- logger.debug("response.ok='%s',response.status_code='%s',response.text()=%d", response.ok, response.status_code, len(response.text), response.text)
+ logger.debug("response.ok='%s',response.status_code='%s',response.text()=%d", response.ok, response.status_code, len(response.text))
if not response.ok or response.status_code >= 300 or len(response.text) == 0:
logger.warning("Cannot query JSON API: url='%s',response.status_code=%d,response.text()=%d", url, response.status_code, len(response.text))
json_reply["status_code"] = response.status_code
try:
logger.debug("Fetching nodeinfo: domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
rows = federation.fetch_nodeinfo(domain, nodeinfo_url)
+
+ if "error_message" in rows:
+ logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", rows["error_message"], domain, nodeinfo_url)
+ instances.set_last_error(domain, rows)
+ return list()
+ elif "exception" in rows:
+ logger.warning("Exception '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s' - EXIT!", type(rows["exception"]), domain, nodeinfo_url)
+ return list()
+ elif "json" in rows:
+ logger.debug("rows[json] found for domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
+ rows = rows["json"]
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching nodeinfo from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)