]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Fri, 10 Jan 2025 08:38:38 +0000 (09:38 +0100)
committerRoland Häder <roland@mxchange.org>
Fri, 10 Jan 2025 08:38:38 +0000 (09:38 +0100)
- introduced network.get_generic() to encapsulate some repeated parameters

fba/http/csrf.py
fba/http/federation.py
fba/http/network.py
fba/networks/friendica.py
fba/networks/lemmy.py
fba/networks/mastodon.py
fba/networks/pleroma.py

index b004c163cd4d98fbff7b88ce9b9a771a1f89c0ae..f3b8c563096982b7fbc6e2aee881254e0ee925f4 100644 (file)
@@ -47,11 +47,9 @@ def determine(domain: str, headers: dict) -> dict:
 
     # Fetch / to check for meta tag indicating csrf
     logger.debug("Fetching / from domain='%s' for CSRF check ...", domain)
-    response = network.fetch_response(
+    response = network.get_generic(
         domain,
-        "/",
-        headers=network.web_headers,
-        timeout=(config.get("connection_timeout"), config.get("read_timeout"))
+        "/"
     )
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
index dad20a80cb86e50d7b50f0b4011699392cc2645e..aa05e37367c87bb6986274767c66c331185d7156 100644 (file)
@@ -305,11 +305,9 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str:
     software = None
 
     logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
-    response = network.fetch_response(
+    response = network.get_generic(
         domain,
         path,
-        headers=network.web_headers,
-        timeout=(config.get("connection_timeout"), config.get("read_timeout")),
         allow_redirects=True
     )
 
index 5237d8774ae88fe3c7e2279c1b2909de34867944..c551957e21c0c926a5e24f4ed406ac7f3f4868ba 100644 (file)
@@ -294,6 +294,8 @@ def fetch_response(domain: str, path: str, headers: dict, timeout: tuple, allow_
         raise ValueError(f"headers[]='{type(headers)}' is not of type 'dict'")
     elif not isinstance(timeout, tuple):
         raise ValueError(f"timeout[]='{type(timeout)}' is not of type 'tuple'")
+    elif not isinstance(allow_redirects, bool):
+        raise ValueError(f"allow_redirects[]='{type(allow_redirects)}' is not of type 'bool'")
 
     start = 0
     try:
@@ -415,3 +417,29 @@ def fetch_json_rows(hostname: str, path: str, headers: dict = {}, rows_key: str
 
     logger.debug("rows()=%d - EXIT!", len(rows))
     return rows
+
+def get_generic(domain: str, path: str, allow_redirects: bool = False) -> requests.models.Response:
+    logger.debug("domain='%s',path='%s',allow_redirects='%s' - CALLED!", domain, path, allow_redirects)
+    domain_helper.raise_on(domain)
+
+    if blacklist.is_blacklisted(domain):
+        raise ValueError(f"domain='{domain}' is blacklisted but function was invoked")
+    elif not isinstance(path, str):
+        raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
+    elif path == "":
+        raise ValueError("Parameter 'path' is empty")
+    elif not path.startswith("/"):
+        raise ValueError(f"path='{path}' does not start with / but should")
+    elif not isinstance(allow_redirects, bool):
+        raise ValueError(f"allow_redirects[]='{type(allow_redirects)}' is not of type 'bool'")
+
+    logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
+    response = fetch_response(
+        domain,
+        path,
+        headers=web_headers,
+        allow_redirects=allow_redirects
+    )
+
+    logger.debug("response[]='%s' - EXIT!", type(response))
+    return response
index abcda4c9f5f36b2234d0cc4d7fd6931bd7dcb07c..2fd53b72a594f0547d9061748ca2699e788a6a0e 100644 (file)
@@ -45,11 +45,9 @@ def fetch_blocks(domain: str) -> list:
 
     try:
         logger.debug("Fetching friendica blocks from domain='%s' ...", domain)
-        raw = network.fetch_response(
+        raw = network.get_generic(
             domain,
-            "/friendica",
-            network.web_headers,
-            (config.get("connection_timeout"), config.get("read_timeout"))
+            "/friendica"
         ).text
         logger.debug("Parsing %d Bytes ...", len(raw))
 
index 785e5b7b9dbc8d77eb9be617539a4f75b646f14e..e899f5515da9ff810634b8818fb6e30ff580a06c 100644 (file)
@@ -140,11 +140,9 @@ def fetch_blocks(domain: str) -> list:
     try:
         # json endpoint for newer mastodongs
         logger.debug("Fetching /instances from domain='%s'", domain)
-        response = network.fetch_response(
+        response = network.get_generic(
             domain,
-            "/instances",
-            network.web_headers,
-            (config.get("connection_timeout"), config.get("read_timeout"))
+            "/instances"
         )
 
         logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
@@ -251,11 +249,9 @@ def fetch_instances(domain: str, origin: str) -> list:
     try:
         # json endpoint for newer mastodongs
         logger.debug("Fetching /instances from domain='%s'", domain)
-        response = network.fetch_response(
+        response = network.get_generic(
             domain,
-            "/instances",
-            network.web_headers,
-            (config.get("connection_timeout"), config.get("read_timeout"))
+            "/instances"
         )
 
         logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
index df4ba927ce4402d94eaf11c8bcb6f8d7875ac91b..adc3055a98dd461d3f51157b47c51101fcdf71d9 100644 (file)
@@ -75,14 +75,13 @@ def fetch_blocks_from_about(domain: str) -> dict:
 
     logger.info("Fetching mastodon blocks from domain='%s'", domain)
     for path in ["/about/more", "/about"]:
+        logger.debug("path='%s'", path)
         try:
             logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
             doc = bs4.BeautifulSoup(
-                network.fetch_response(
+                network.get_generic(
                     domain,
-                    path,
-                    network.web_headers,
-                    (config.get("connection_timeout"), config.get("read_timeout"))
+                    path
                 ).text,
                 "html.parser",
             )
index 024a4f5f5016d25788fb5da88ff6f16a91baf7bf..1abb75136617baa0c24f5c3a191650544e37d98a 100644 (file)
@@ -302,11 +302,9 @@ def fetch_blocks_from_about(domain: str) -> dict:
             doc = None
 
             logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
-            response = network.fetch_response(
+            response = network.get_generic(
                 domain,
-                path,
-                network.web_headers,
-                (config.get("connection_timeout"), config.get("read_timeout"))
+                path
             )
 
             logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))