]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Sat, 11 Jan 2025 15:05:55 +0000 (16:05 +0100)
committerRoland Häder <roland@mxchange.org>
Sat, 11 Jan 2025 15:07:15 +0000 (16:07 +0100)
- lets better move these values to a central place

daemon.py
fba/commands.py
fba/helpers/config.py
fba/helpers/processing.py
fba/http/network.py
fba/http/nodeinfo.py
fba/networks/lemmy.py

index f4d8f434b02ed2d028d239d1e8a5791fbc349e66..5ce1dad3c32cd5fd6a8ae7ac1fca7182ba323879 100755 (executable)
--- a/daemon.py
+++ b/daemon.py
@@ -45,9 +45,6 @@ from fba.helpers import tidyup
 from fba.models import blocks
 from fba.models import instances
 
-# Timeout
-_timeout = (config.get("connection_timeout"), config.get("read_timeout"))
-
 router = fastapi.FastAPI(docs_url=config.get("base_url") + "/docs", redoc_url=config.get("base_url") + "/redoc")
 router.mount(
     "/static",
@@ -363,7 +360,10 @@ def scoreboard(request: Request, mode: str, amount: int) -> None:
     elif amount <= 0:
         raise HTTPException(status_code=500, detail="Invalid amount specified")
 
-    response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/scoreboard.json?mode={mode}&amount={amount}", timeout=_timeout)
+    response = requests.get(
+        f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/scoreboard.json?mode={mode}&amount={amount}",
+        timeout=config.timeout
+    )
 
     if response is None:
         raise HTTPException(status_code=500, detail="Could not determine scores")
@@ -386,7 +386,10 @@ def list_domains(request: Request, mode: str, value: str, amount: int = config.g
     if mode == "detection_mode" and not instances.valid(value, "detection_mode"):
         raise HTTPException(status_code=500, detail="Invalid detection mode provided")
 
-    response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/list.json?mode={mode}&value={value}&amount={amount}", timeout=_timeout)
+    response = requests.get(
+        f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/list.json?mode={mode}&value={value}&amount={amount}",
+        timeout=config.timeout
+    )
 
     domainlist = list()
     if response is not None and response.ok:
@@ -414,7 +417,10 @@ def top(request: Request, mode: str, value: str, amount: int = config.get("api_l
     elif mode in ["domain", "reverse"] and not domain_helper.is_wanted(value):
         raise HTTPException(status_code=500, detail="Invalid or blocked domain specified")
 
-    response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/top.json?mode={mode}&value={value}&amount={amount}", timeout=_timeout)
+    response = requests.get(
+        f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/top.json?mode={mode}&value={value}&amount={amount}",
+        timeout=config.timeout
+    )
 
     found = 0
     blocklist = list()
@@ -450,7 +456,10 @@ def infos(request: Request, domain: str) -> None:
     if not domain_helper.is_wanted(domain):
         raise HTTPException(status_code=500, detail=f"domain='{domain}' is not wanted")
 
-    response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/domain.json?domain={domain}", timeout=_timeout)
+    response = requests.get(
+        f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/domain.json?domain={domain}",
+        timeout=config.timeout
+    )
 
     if not response.ok or response.status_code > 200 or response.text.strip() == "":
         raise HTTPException(status_code=response.status_code, detail=response.reason)
@@ -536,7 +545,10 @@ def robots(request: Request) -> None:
 @router.get(config.get("base_url") + "/")
 def index(request: Request) -> None:
     # Get info
-    response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/info.json", timeout=_timeout)
+    response = requests.get(
+        f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/info.json",
+        timeout=config.timeout
+    )
 
     if not response.ok:
         raise HTTPException(status_code=response.status_code, detail=response.text)
index 1f1b7e4b193d8fa7695cb3689ab7b8afd5d62808..54bd55e4877807c37b612e54833ef19f19213d35 100644 (file)
@@ -61,7 +61,6 @@ from fba.networks import misskey
 from fba.networks import pleroma
 
 # Locally "cached" values to speedup code and keep massive debug log shorter
-_timeout = (config.get("connection_timeout"), config.get("read_timeout"))
 _bot_enabled = config.get("bot_enabled")
 
 logging.basicConfig(level=logging.INFO)
@@ -507,7 +506,7 @@ def fetch_observer(args: argparse.Namespace) -> int:
         raw = network.fetch_url(
             f"https://{source_domain}",
             network.web_headers,
-            timeout=_timeout
+            timeout=config.timeout
         ).text
         logger.debug("raw[%s]()=%d", type(raw), len(raw))
 
@@ -637,7 +636,7 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int:
     raw = network.fetch_url(
         f"https://{source_domain}/todon/domainblocks",
         network.web_headers,
-        timeout=_timeout
+        timeout=config.timeout
     ).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
@@ -750,7 +749,7 @@ def fetch_cs(args: argparse.Namespace):
     raw = network.fetch_url(
         f"https://{source_domain}/federation",
         network.web_headers,
-        timeout=_timeout
+        timeout=config.timeout
     ).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
@@ -834,7 +833,7 @@ def fetch_fba_rss(args: argparse.Namespace) -> int:
         sources.update(domain)
 
     logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
-    response = network.fetch_url(args.feed, network.web_headers, _timeout)
+    response = network.fetch_url(args.feed, network.web_headers, config.timeout)
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
     if response.ok and response.status_code == 200 and len(response.text) > 0:
@@ -913,7 +912,7 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int:
     domains = list()
 
     logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
-    response = network.fetch_url(feed, network.web_headers, _timeout)
+    response = network.fetch_url(feed, network.web_headers, config.timeout)
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
     if response.ok and response.status_code == 200 and len(response.text) > 0:
@@ -1140,7 +1139,7 @@ def fetch_txt(args: argparse.Namespace) -> int:
     logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
     for row in blocklists.txt_files:
         logger.debug("Fetching row[url]='%s' ...", row["url"])
-        response = network.fetch_url(row["url"], network.web_headers, _timeout)
+        response = network.fetch_url(row["url"], network.web_headers, config.timeout)
 
         logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
         if response.ok and response.status_code == 200 and response.text != "":
@@ -1188,7 +1187,7 @@ def fetch_fedipact(args: argparse.Namespace) -> int:
     response = network.fetch_url(
         f"https://{source_domain}",
         network.web_headers,
-        timeout=_timeout
+        timeout=config.timeout
     )
 
     logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
@@ -1247,7 +1246,7 @@ def fetch_joinmobilizon(args: argparse.Namespace) -> int:
     raw = network.fetch_url(
         f"https://{source_domain}/api/v1/instances",
         network.web_headers,
-        timeout=_timeout
+        timeout=config.timeout
     ).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
@@ -1298,7 +1297,7 @@ def fetch_joinmisskey(args: argparse.Namespace) -> int:
     raw = network.fetch_url(
         f"https://{source_domain}/instances.json",
         network.web_headers,
-        timeout=_timeout
+        timeout=config.timeout
     ).text
     logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
 
@@ -1505,7 +1504,7 @@ def fetch_fedilist(args: argparse.Namespace) -> int:
     response = reqto.get(
         url,
         headers=network.web_headers,
-        timeout=_timeout,
+        timeout=config.timeout,
         allow_redirects=False
     )
 
@@ -1790,7 +1789,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                 logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
                 raw = network.fetch_api_url(
                     row["nodeinfo_url"],
-                    timeout=_timeout
+                    timeout=config.timeout
                 )
 
                 logger.debug("raw[%s]()=%d", type(raw), len(raw))
@@ -1817,7 +1816,7 @@ def fetch_relays(args: argparse.Namespace) -> int:
                 raw = network.fetch_url(
                     f"https://{row['domain']}",
                     network.web_headers,
-                    timeout=_timeout
+                    timeout=config.timeout
                 ).text
                 logger.debug("raw[%s]()=%d", type(raw), len(raw))
 
index 5f6f0aa99306ea985be4e9ce4fdfb4c759aba69f..426e9a63451e885aa13b264548d4faf12ebdcfa4 100644 (file)
@@ -22,6 +22,10 @@ import sys
 # config.get("foo") instead
 _config = {}
 
+# Locally "cached" values to speedup code and keep massive debug log shorter
+timeout = ()
+nodeinfo_timeout = ()
+
 logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger(__name__)
 #logger.setLevel(logging.DEBUG)
@@ -32,6 +36,10 @@ with open("config.json") as f:
     _config["max_crawl_depth"] = min(_config["max_crawl_depth"], (sys.getrecursionlimit() - 50))
     logger.debug("LOADED!")
 
+    # "Cached" values
+    timeout = (_config["connection_timeout"], _config["read_timeout"])
+    nodeinfo_timeout = (_config["nodeinfo_connection_timeout"], _config["nodeinfo_read_timeout"])
+
 def get(key: str) -> any:
     logger.debug("key[%s]='%s' - CALLED!", type(key), key)
 
index 0bed7638ae4a086ca757a88957c3b533e58138eb..88704610da4bb72939e16093b5d986738e2fca2b 100644 (file)
@@ -149,7 +149,7 @@ def csv_block(blocker: str, url: str, command: str) -> None:
     response = network.fetch_url(
         url,
         network.web_headers,
-        (config.get("connection_timeout"), config.get("read_timeout"))
+        timeout=config.timeout
     )
 
     logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
index c551957e21c0c926a5e24f4ed406ac7f3f4868ba..989d28b16bc843c6fefcd7f35d418e958836fc50 100644 (file)
@@ -438,6 +438,7 @@ def get_generic(domain: str, path: str, allow_redirects: bool = False) -> reques
         domain,
         path,
         headers=web_headers,
+        timeout=config.timeout,
         allow_redirects=allow_redirects
     )
 
index 238f6798b54161bf2444606c6a751ff7e72cb1f4..78143fa7cc27742c3a68a8e256456f7c1d5b3729 100644 (file)
@@ -56,16 +56,6 @@ _nodeinfo_identifier = [
     "http://nodeinfo.diaspora.software/ns/schema/1.0",
 ]
 
-# Locally "cached" values to speedup code and keep massive debug log shorter
-_timeout = (
-    config.get("connection_timeout"),
-    config.get("read_timeout")
-)
-_nodeinfo_timeout = (
-    config.get("nodeinfo_connection_timeout"),
-    config.get("nodeinfo_read_timeout")
-)
-
 logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger(__name__)
 #logger.setLevel(logging.DEBUG)
@@ -128,7 +118,7 @@ def fetch(domain: str, path: str = None, update_mode: bool = True) -> dict:
                 domain,
                 request,
                 headers=headers,
-                timeout=_nodeinfo_timeout
+                timeout=config.nodeinfo_timeout
             )
 
             logger.debug("data(%d)[]='%s'", len(data), type(data))
@@ -183,7 +173,7 @@ def fetch_wellknown_nodeinfo(domain: str) -> dict:
             domain,
             path,
             headers=headers,
-            timeout=_nodeinfo_timeout
+            timeout=config.nodeinfo_timeout
         )
 
         logger.debug("data(%d)[]='%s'", len(data), type(data))
@@ -258,7 +248,7 @@ def fetch_wellknown_nodeinfo(domain: str) -> dict:
                 logger.debug("Fetching nodeinfo from url='%s' ...", url)
                 data = network.fetch_api_url(
                     url,
-                    timeout=_timeout
+                    timeout=config.timeout
                  )
 
                 logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
@@ -278,6 +268,7 @@ def fetch_wellknown_nodeinfo(domain: str) -> dict:
             if "error_message" not in data and "json" in data:
                 logger.debug("Auto-discovery successful: domain='%s' - BREAK!", domain)
                 break
+
     elif "server" in infos:
         logger.debug("Found infos[server][software]='%s'", infos["server"]["software"])
         instances.set_detection_mode(domain, "AUTO_DISCOVERY")
index e899f5515da9ff810634b8818fb6e30ff580a06c..9fcfbfe6a90463d6668433e822d8c65c8721c836 100644 (file)
@@ -98,7 +98,7 @@ def fetch_peers(domain: str, origin: str) -> list:
             domain,
             "/api/v3/site",
             headers,
-            (config.get("connection_timeout"), config.get("read_timeout"))
+            timeout=config.timeout
         )
 
         logger.debug("data[]='%s'", type(data))