]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Thu, 27 Jul 2023 10:59:53 +0000 (12:59 +0200)
committerRoland Häder <roland@mxchange.org>
Thu, 27 Jul 2023 10:59:53 +0000 (12:59 +0200)
- move nodeinfo handling to new module 'nodeinfo'
- also had to renamed variable nodeinfo to other names
- first newest version at /.well-known/x-nodeinfo2

fba/http/__init__.py
fba/http/federation.py
fba/http/nodeinfo.py [new file with mode: 0644]
fba/networks/pleroma.py

index 5d1da3a9dec5ec21044d9c1ca7f08567dee7e5a7..fcfdfd9e067551cdb7871b687ba6e75dd351ecfe 100644 (file)
@@ -16,4 +16,5 @@
 __all__ = [
     'federation',
     'network',
+    'nodeinfo',
 ]
index 0395eb94d19b9619577febca5dbc4b9defa75652..3b0e990c5f077c10177fbaba30f8563a2e204de9 100644 (file)
@@ -31,6 +31,7 @@ from fba.helpers import tidyup
 from fba.helpers import version
 
 from fba.http import network
+from fba.http import nodeinfo
 
 from fba.models import instances
 
@@ -233,229 +234,6 @@ def fetch_peers(domain: str, software: str, origin: str) -> list:
     logger.debug("peers()=%d - EXIT!", len(peers))
     return peers
 
-def fetch_nodeinfo(domain: str, path: str = None) -> dict:
-    logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
-    domain_helper.raise_on(domain)
-
-    if not isinstance(path, str) and path is not None:
-        raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
-
-    logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
-    nodeinfo = fetch_wellknown_nodeinfo(domain)
-
-    logger.debug("nodeinfo[%s](%d)='%s'", type(nodeinfo), len(nodeinfo), nodeinfo)
-    if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0:
-        logger.debug("Invoking instances.set_last_nodeinfo(%s) ...", domain)
-        instances.set_last_nodeinfo(domain)
-
-        logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json']))
-        return nodeinfo
-
-    # No CSRF by default, you don't have to add network.api_headers by yourself here
-    headers = tuple()
-    data = dict()
-
-    try:
-        logger.debug("Checking CSRF for domain='%s'", domain)
-        headers = csrf.determine(domain, dict())
-    except network.exceptions as exception:
-        logger.warning("Exception '%s' during checking CSRF (nodeinfo,%s) - EXIT!", type(exception), __name__)
-        instances.set_last_error(domain, exception)
-        instances.set_software(domain, None)
-        instances.set_detection_mode(domain, None)
-        instances.set_nodeinfo_url(domain, None)
-        return {
-            "status_code"  : 500,
-            "error_message": f"exception[{type(exception)}]='{str(exception)}'",
-            "exception"    : exception,
-        }
-
-    request_paths = [
-       "/nodeinfo/2.1.json",
-       "/nodeinfo/2.1",
-       "/nodeinfo/2.0.json",
-       "/nodeinfo/2.0",
-       "/nodeinfo/1.0.json",
-       "/nodeinfo/1.0",
-       "/api/v1/instance",
-    ]
-
-    for request in request_paths:
-        logger.debug("request='%s'", request)
-        http_url  = f"http://{domain}{str(path)}"
-        https_url = f"https://{domain}{str(path)}"
-
-        logger.debug("path[%s]='%s',request='%s',http_url='%s',https_url='%s'", type(path), path, request, http_url, https_url)
-        if path is None or path in [request, http_url, https_url]:
-            logger.debug("path='%s',http_url='%s',https_url='%s'", path, http_url, https_url)
-            if path in [http_url, https_url]:
-                logger.debug("domain='%s',path='%s' has protocol in path, splitting ...", domain, path)
-                components = urlparse(path)
-                path = components.path
-
-            logger.debug("Fetching request='%s' from domain='%s' ...", request, domain)
-            data = network.get_json_api(
-                domain,
-                request,
-                headers,
-                (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
-            )
-
-            logger.debug("data[]='%s'", type(data))
-            if "error_message" not in data and "json" in data:
-                logger.debug("Success: request='%s' - Setting detection_mode=STATIC_CHECK ...", request)
-                instances.set_last_nodeinfo(domain)
-                instances.set_detection_mode(domain, "STATIC_CHECK")
-                instances.set_nodeinfo_url(domain, request)
-                break
-
-            logger.warning("Failed fetching nodeinfo from domain='%s',status_code='%s',error_message='%s'", domain, data['status_code'], data['error_message'])
-
-    logger.debug("data()=%d - EXIT!", len(data))
-    return data
-
-def fetch_wellknown_nodeinfo(domain: str) -> dict:
-    logger.debug("domain='%s' - CALLED!", domain)
-    domain_helper.raise_on(domain)
-
-    # "rel" identifiers (no real URLs)
-    nodeinfo_identifier = [
-        "https://nodeinfo.diaspora.software/ns/schema/2.1",
-        "http://nodeinfo.diaspora.software/ns/schema/2.1",
-        "https://nodeinfo.diaspora.software/ns/schema/2.0",
-        "http://nodeinfo.diaspora.software/ns/schema/2.0",
-        "https://nodeinfo.diaspora.software/ns/schema/1.1",
-        "http://nodeinfo.diaspora.software/ns/schema/1.1",
-        "https://nodeinfo.diaspora.software/ns/schema/1.0",
-        "http://nodeinfo.diaspora.software/ns/schema/1.0",
-    ]
-
-    # No CSRF by default, you don't have to add network.api_headers by yourself here
-    headers = tuple()
-
-    try:
-        logger.debug("Checking CSRF for domain='%s'", domain)
-        headers = csrf.determine(domain, dict())
-    except network.exceptions as exception:
-        logger.warning("Exception '%s' during checking CSRF (fetch_wellknown_nodeinfo,%s) - EXIT!", type(exception), __name__)
-        instances.set_last_error(domain, exception)
-        return {
-            "status_code"  : 500,
-            "error_message": type(exception),
-            "exception"    : exception,
-        }
-
-    data = dict()
-
-    logger.debug("Fetching .well-known info for domain='%s'", domain)
-    for path in ["/.well-known/nodeinfo", "/.well-known/x-nodeinfo2"]:
-        logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
-        data = network.get_json_api(
-            domain,
-            path,
-            headers,
-            (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
-        )
-        logger.debug("data[]='%s'", type(data))
-
-        if "error_message" not in data and "json" in data:
-            logger.debug("path='%s' returned valid json()=%d", path, len(data["json"]))
-            break
-
-    logger.debug("data[]='%s'", type(data))
-    if "exception" in data:
-        logger.warning("domain='%s' returned exception '%s'", domain, str(data["exception"]))
-        raise data["exception"]
-    elif "error_message" in data:
-        logger.warning("domain='%s' returned error message: '%s'", domain, data["error_message"])
-        return data
-    elif "json" not in data:
-        logger.warning("domain='%s' returned no 'json' key", domain)
-        return dict()
-
-    nodeinfo = data["json"]
-    logger.debug("nodeinfo()=%d has been returned", len(nodeinfo))
-
-    if "links" in nodeinfo:
-        logger.debug("Marking domain='%s' as successfully handled ...", domain)
-        instances.set_success(domain)
-
-        logger.debug("Found nodeinfo[links]()=%d record(s),", len(nodeinfo["links"]))
-        for niid in nodeinfo_identifier:
-            data = dict()
-
-            logger.debug("Checking niid='%s' ...", niid)
-            for link in nodeinfo["links"]:
-                logger.debug("link[%s]='%s'", type(link), link)
-                if not isinstance(link, dict) or not "rel" in link:
-                    logger.debug("link[]='%s' is not of type 'dict' or no element 'rel' found - SKIPPED!", type(link))
-                    continue
-                elif link["rel"] != niid:
-                    logger.debug("link[re]='%s' does not matched niid='%s' - SKIPPED!", link["rel"], niid)
-                    continue
-                elif "href" not in link:
-                    logger.warning("link[rel]='%s' has no element 'href' - SKIPPED!", link["rel"])
-                    continue
-                elif link["href"] is None:
-                    logger.debug("link[href] is None, link[rel]='%s' - SKIPPED!", link["rel"])
-                    continue
-
-                # Default is that 'href' has a complete URL, but some hosts don't send that
-                logger.debug("link[rel]='%s' matches niid='%s'", link["rel"], niid)
-                url = link["href"]
-                components = urlparse(url)
-
-                logger.debug("components[%s]='%s'", type(components), components)
-                if components.scheme == "" and components.netloc == "":
-                    logger.warning("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
-                    url = f"https://{domain}{url}"
-                    components = urlparse(url)
-                elif components.netloc == "":
-                    logger.warning("link[href]='%s' has no netloc set, setting domain='%s'", link["href"], domain)
-                    url = f"{components.scheme}://{domain}{components.path}"
-                    components = urlparse(url)
-
-                logger.debug("components.netloc[]='%s'", type(components.netloc))
-                if not domain_helper.is_wanted(components.netloc):
-                    logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
-                    continue
-
-                logger.debug("Fetching nodeinfo from url='%s' ...", url)
-                data = network.fetch_api_url(
-                    url,
-                    (config.get("connection_timeout"), config.get("read_timeout"))
-                 )
-
-                logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
-                if "error_message" not in data and "json" in data:
-                    logger.debug("Found JSON data()=%d,link[href]='%s' - Setting detection_mode=AUTO_DISCOVERY ...", len(data), link["href"])
-                    instances.set_detection_mode(domain, "AUTO_DISCOVERY")
-                    instances.set_nodeinfo_url(domain, link["href"])
-
-                    logger.debug("Marking domain='%s' as successfully handled ...", domain)
-                    instances.set_success(domain)
-                    break
-                else:
-                    logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
-                    instances.set_last_error(domain, data)
-
-            logger.debug("data()=%d", len(data))
-            if "error_message" not in data and "json" in data:
-                logger.debug("Auto-discovery successful: domain='%s'", domain)
-                break
-    elif "server" in nodeinfo:
-        logger.debug("Found nodeinfo[server][software]='%s'", nodeinfo["server"]["software"])
-        instances.set_detection_mode(domain, "AUTO_DISCOVERY")
-        instances.set_nodeinfo_url(domain, f"https://{domain}/.well-known/x-nodeinfo2")
-
-        logger.debug("Marking domain='%s' as successfully handled ...", domain)
-        instances.set_success(domain)
-    else:
-        logger.warning("nodeinfo does not contain 'links' or 'server': domain='%s'", domain)
-
-    logger.debug("Returning data[]='%s' - EXIT!", type(data))
-    return data
-
 def fetch_generator_from_path(domain: str, path: str = "/") -> str:
     logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
     domain_helper.raise_on(domain)
@@ -566,7 +344,7 @@ def determine_software(domain: str, path: str = None) -> str:
     software = None
 
     logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
-    data = fetch_nodeinfo(domain, path)
+    data = nodeinfo.fetch_nodeinfo(domain, path)
 
     logger.debug("data[%s]='%s'", type(data), data)
     if "exception" in data:
diff --git a/fba/http/nodeinfo.py b/fba/http/nodeinfo.py
new file mode 100644 (file)
index 0000000..36220bd
--- /dev/null
@@ -0,0 +1,255 @@
+# Copyright (C) 2023 Free Software Foundation
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+import logging
+
+from urllib.parse import urlparse
+
+from fba import csrf
+
+from fba.helpers import config
+from fba.helpers import domain as domain_helper
+
+from fba.http import network
+
+from fba.models import instances
+
+_DEPTH = 0
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+def fetch_nodeinfo(domain: str, path: str = None) -> dict:
+    logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
+    domain_helper.raise_on(domain)
+
+    if not isinstance(path, str) and path is not None:
+        raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
+
+    logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
+    data = fetch_wellknown_nodeinfo(domain)
+
+    logger.debug("data[%s](%d)='%s'", type(data), len(data), data)
+    if "error_message" not in data and "json" in data and len(data["json"]) > 0:
+        logger.debug("Invoking instances.set_last_nodeinfo(%s) ...", domain)
+        instances.set_last_nodeinfo(domain)
+
+        logger.debug("Found data[json]()=%d - EXIT!", len(data['json']))
+        return data
+
+    # No CSRF by default, you don't have to add network.api_headers by yourself here
+    headers = tuple()
+    data = dict()
+
+    try:
+        logger.debug("Checking CSRF for domain='%s'", domain)
+        headers = csrf.determine(domain, dict())
+    except network.exceptions as exception:
+        logger.warning("Exception '%s' during checking CSRF (nodeinfo,%s) - EXIT!", type(exception), __name__)
+        instances.set_last_error(domain, exception)
+        instances.set_software(domain, None)
+        instances.set_detection_mode(domain, None)
+        instances.set_nodeinfo_url(domain, None)
+        return {
+            "status_code"  : 500,
+            "error_message": f"exception[{type(exception)}]='{str(exception)}'",
+            "exception"    : exception,
+        }
+
+    request_paths = [
+       "/nodeinfo/2.1.json",
+       "/nodeinfo/2.1",
+       "/nodeinfo/2.0.json",
+       "/nodeinfo/2.0",
+       "/nodeinfo/1.0.json",
+       "/nodeinfo/1.0",
+       "/api/v1/instance",
+    ]
+
+    for request in request_paths:
+        logger.debug("request='%s'", request)
+        http_url  = f"http://{domain}{str(path)}"
+        https_url = f"https://{domain}{str(path)}"
+
+        logger.debug("path[%s]='%s',request='%s',http_url='%s',https_url='%s'", type(path), path, request, http_url, https_url)
+        if path is None or path in [request, http_url, https_url]:
+            logger.debug("path='%s',http_url='%s',https_url='%s'", path, http_url, https_url)
+            if path in [http_url, https_url]:
+                logger.debug("domain='%s',path='%s' has protocol in path, splitting ...", domain, path)
+                components = urlparse(path)
+                path = components.path
+
+            logger.debug("Fetching request='%s' from domain='%s' ...", request, domain)
+            data = network.get_json_api(
+                domain,
+                request,
+                headers,
+                (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
+            )
+
+            logger.debug("data[]='%s'", type(data))
+            if "error_message" not in data and "json" in data:
+                logger.debug("Success: request='%s' - Setting detection_mode=STATIC_CHECK ...", request)
+                instances.set_last_nodeinfo(domain)
+                instances.set_detection_mode(domain, "STATIC_CHECK")
+                instances.set_nodeinfo_url(domain, request)
+                break
+
+            logger.warning("Failed fetching nodeinfo from domain='%s',status_code='%s',error_message='%s'", domain, data['status_code'], data['error_message'])
+
+    logger.debug("data()=%d - EXIT!", len(data))
+    return data
+
+def fetch_wellknown_nodeinfo(domain: str) -> dict:
+    logger.debug("domain='%s' - CALLED!", domain)
+    domain_helper.raise_on(domain)
+
+    # "rel" identifiers (no real URLs)
+    nodeinfo_identifier = [
+        "https://nodeinfo.diaspora.software/ns/schema/2.1",
+        "http://nodeinfo.diaspora.software/ns/schema/2.1",
+        "https://nodeinfo.diaspora.software/ns/schema/2.0",
+        "http://nodeinfo.diaspora.software/ns/schema/2.0",
+        "https://nodeinfo.diaspora.software/ns/schema/1.1",
+        "http://nodeinfo.diaspora.software/ns/schema/1.1",
+        "https://nodeinfo.diaspora.software/ns/schema/1.0",
+        "http://nodeinfo.diaspora.software/ns/schema/1.0",
+    ]
+
+    # No CSRF by default, you don't have to add network.api_headers by yourself here
+    headers = tuple()
+
+    try:
+        logger.debug("Checking CSRF for domain='%s'", domain)
+        headers = csrf.determine(domain, dict())
+    except network.exceptions as exception:
+        logger.warning("Exception '%s' during checking CSRF (fetch_wellknown_nodeinfo,%s) - EXIT!", type(exception), __name__)
+        instances.set_last_error(domain, exception)
+        return {
+            "status_code"  : 500,
+            "error_message": type(exception),
+            "exception"    : exception,
+        }
+
+    data = dict()
+
+    logger.debug("Fetching .well-known info for domain='%s'", domain)
+    for path in ["/.well-known/x-nodeinfo2", "/.well-known/nodeinfo"]:
+        logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
+        data = network.get_json_api(
+            domain,
+            path,
+            headers,
+            (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
+        )
+        logger.debug("data[]='%s'", type(data))
+
+        if "error_message" not in data and "json" in data:
+            logger.debug("path='%s' returned valid json()=%d", path, len(data["json"]))
+            break
+
+    logger.debug("data[]='%s'", type(data))
+    if "exception" in data:
+        logger.warning("domain='%s' returned exception '%s'", domain, str(data["exception"]))
+        raise data["exception"]
+    elif "error_message" in data:
+        logger.warning("domain='%s' returned error message: '%s'", domain, data["error_message"])
+        return data
+    elif "json" not in data:
+        logger.warning("domain='%s' returned no 'json' key", domain)
+        return dict()
+
+    infos = data["json"]
+    logger.debug("infos()=%d has been returned", len(infos))
+
+    if "links" in infos:
+        logger.debug("Marking domain='%s' as successfully handled ...", domain)
+        instances.set_success(domain)
+
+        logger.debug("Found infos[links]()=%d record(s),", len(infos["links"]))
+        for niid in nodeinfo_identifier:
+            data = dict()
+
+            logger.debug("Checking niid='%s' ...", niid)
+            for link in infos["links"]:
+                logger.debug("link[%s]='%s'", type(link), link)
+                if not isinstance(link, dict) or not "rel" in link:
+                    logger.debug("link[]='%s' is not of type 'dict' or no element 'rel' found - SKIPPED!", type(link))
+                    continue
+                elif link["rel"] != niid:
+                    logger.debug("link[re]='%s' does not matched niid='%s' - SKIPPED!", link["rel"], niid)
+                    continue
+                elif "href" not in link:
+                    logger.warning("link[rel]='%s' has no element 'href' - SKIPPED!", link["rel"])
+                    continue
+                elif link["href"] is None:
+                    logger.debug("link[href] is None, link[rel]='%s' - SKIPPED!", link["rel"])
+                    continue
+
+                # Default is that 'href' has a complete URL, but some hosts don't send that
+                logger.debug("link[rel]='%s' matches niid='%s'", link["rel"], niid)
+                url = link["href"]
+                components = urlparse(url)
+
+                logger.debug("components[%s]='%s'", type(components), components)
+                if components.scheme == "" and components.netloc == "":
+                    logger.warning("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
+                    url = f"https://{domain}{url}"
+                    components = urlparse(url)
+                elif components.netloc == "":
+                    logger.warning("link[href]='%s' has no netloc set, setting domain='%s'", link["href"], domain)
+                    url = f"{components.scheme}://{domain}{components.path}"
+                    components = urlparse(url)
+
+                logger.debug("components.netloc[]='%s'", type(components.netloc))
+                if not domain_helper.is_wanted(components.netloc):
+                    logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
+                    continue
+
+                logger.debug("Fetching nodeinfo from url='%s' ...", url)
+                data = network.fetch_api_url(
+                    url,
+                    (config.get("connection_timeout"), config.get("read_timeout"))
+                 )
+
+                logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
+                if "error_message" not in data and "json" in data:
+                    logger.debug("Found JSON data()=%d,link[href]='%s' - Setting detection_mode=AUTO_DISCOVERY ...", len(data), link["href"])
+                    instances.set_detection_mode(domain, "AUTO_DISCOVERY")
+                    instances.set_nodeinfo_url(domain, link["href"])
+
+                    logger.debug("Marking domain='%s' as successfully handled ...", domain)
+                    instances.set_success(domain)
+                    break
+                else:
+                    logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
+                    instances.set_last_error(domain, data)
+
+            logger.debug("data()=%d", len(data))
+            if "error_message" not in data and "json" in data:
+                logger.debug("Auto-discovery successful: domain='%s'", domain)
+                break
+    elif "server" in infos:
+        logger.debug("Found infos[server][software]='%s'", infos["server"]["software"])
+        instances.set_detection_mode(domain, "AUTO_DISCOVERY")
+        instances.set_nodeinfo_url(domain, f"https://{domain}/.well-known/x-nodeinfo2")
+
+        logger.debug("Marking domain='%s' as successfully handled ...", domain)
+        instances.set_success(domain)
+    else:
+        logger.warning("nodeinfo does not contain 'links' or 'server': domain='%s'", domain)
+
+    logger.debug("Returning data[]='%s' - EXIT!", type(data))
+    return data
index 320e3f576ec14b911043bcbc1b62e6c9358c623e..5a52f9cd58e823cdb37497d01489a13d8888dad2 100644 (file)
@@ -25,8 +25,8 @@ from fba.helpers import config
 from fba.helpers import domain as domain_helper
 from fba.helpers import tidyup
 
-from fba.http import federation
 from fba.http import network
+from fba.http import nodeinfo
 
 from fba.models import blocks
 from fba.models import instances
@@ -63,7 +63,7 @@ def fetch_blocks(domain: str, nodeinfo_url: str) -> list:
     rows = None
     try:
         logger.debug("Fetching nodeinfo: domain='%s',nodeinfo_url='%s'", domain, nodeinfo_url)
-        rows = federation.fetch_nodeinfo(domain, nodeinfo_url)
+        rows = nodeinfo.fetch_nodeinfo(domain, nodeinfo_url)
 
         if "error_message" in rows:
             logger.warning("Error message '%s' during fetching nodeinfo for domain='%s',nodeinfo_url='%s'", rows["error_message"], domain, nodeinfo_url)