]> git.mxchange.org Git - fba.git/commitdiff
Continued:
authorRoland Häder <roland@mxchange.org>
Fri, 9 Jun 2023 03:45:04 +0000 (05:45 +0200)
committerRoland Häder <roland@mxchange.org>
Fri, 9 Jun 2023 03:45:04 +0000 (05:45 +0200)
- moved headers,api_headers to fba.network module
- fixed more references

fba/boot.py
fba/commands.py
fba/fba.py
fba/federation/lemmy.py
fba/federation/mastodon.py
fba/federation/peertube.py
fba/network.py

index fcdef23f0d699f7c9f3c826eca3ad494540acc4d..d0bc4564e58bff11675ded616cc54d02fe7c5cd6 100644 (file)
@@ -19,6 +19,7 @@ import os
 import sys
 import tempfile
 import zc.lockfile
+
 from fba import commands
 from fba import fba
 
index 1db0c0872220b96e957cd7393575ef2624e6e679..98d8ba6c3859bc0be48fd24d11192d02031175e1 100644 (file)
@@ -284,7 +284,7 @@ def fetch_cs(args: argparse.Namespace):
     }
 
     try:
-        raw = fba.fetch_url("https://raw.githubusercontent.com/chaossocial/meta/master/federation.md", fba.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+        raw = fba.fetch_url("https://raw.githubusercontent.com/chaossocial/meta/master/federation.md", network.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
         # DEBUG: print(f"DEBUG: raw()={len(raw)}[]={type(raw)}")
 
         doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features='html.parser')
@@ -331,7 +331,7 @@ def fetch_fba_rss(args: argparse.Namespace):
 
     try:
         print(f"INFO: Fetch FBA-specific RSS args.feed='{args.feed}' ...")
-        response = fba.fetch_url(args.feed, fba.headers, (config.get("connection_timeout"), config.get("read_timeout")))
+        response = fba.fetch_url(args.feed, network.headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code='{response.status_code}',response.text()={len(response.text)}")
         if response.ok and response.status_code < 300 and len(response.text) > 0:
@@ -378,7 +378,7 @@ def fetch_fbabot_atom(args: argparse.Namespace):
     domains = list()
     try:
         print(f"INFO: Fetching ATOM feed='{feed}' from FBA bot account ...")
-        response = fba.fetch_url(feed, fba.headers, (config.get("connection_timeout"), config.get("read_timeout")))
+        response = fba.fetch_url(feed, network.headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code='{response.status_code}',response.text()={len(response.text)}")
         if response.ok and response.status_code < 300 and len(response.text) > 0:
@@ -458,7 +458,7 @@ def fetch_federater(args: argparse.Namespace):
     boot.acquire_lock()
 
     # Fetch this URL
-    response = fba.fetch_url("https://github.com/federater/blocks_recommended/raw/main/federater.csv", fba.headers, (config.get("connection_timeout"), config.get("read_timeout")))
+    response = fba.fetch_url("https://github.com/federater/blocks_recommended/raw/main/federater.csv", network.headers, (config.get("connection_timeout"), config.get("read_timeout")))
     # DEBUG: print(f"DEBUG: response[]='{type(response)}'")
     if response.ok and response.content != "":
         # DEBUG: print(f"DEBUG: Fetched {len(response.content)} Bytes, parsing CSV ...")
index 7f0c140d3dac2ade2041077ea01d6ebb2e37992e..95c916c6d250443a6d8e3e8cf246673bb6b66251 100644 (file)
@@ -51,17 +51,6 @@ nodeinfo_identifier = [
     "http://nodeinfo.diaspora.software/ns/schema/1.0",
 ]
 
-# HTTP headers for non-API requests
-headers = {
-    "User-Agent": config.get("useragent"),
-}
-
-# HTTP headers for API requests
-api_headers = {
-    "User-Agent": config.get("useragent"),
-    "Content-Type": "application/json",
-}
-
 # Connect to database
 connection = sqlite3.connect("blocks.db")
 cursor = connection.cursor()
@@ -371,14 +360,14 @@ def fetch_peers(domain: str, software: str) -> list:
     # DEBUG: print(f"DEBUG: Fetching peers from '{domain}',software='{software}' ...")
     peers = list()
     try:
-        response = network.fetch_response(domain, "/api/v1/instance/peers", api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+        response = network.fetch_response(domain, "/api/v1/instance/peers", network.api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         data = json_from_response(response)
 
         # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code={response.status_code},data[]='{type(data)}'")
         if not response.ok or response.status_code >= 400:
             # DEBUG: print(f"DEBUG: Was not able to fetch peers, trying alternative ...")
-            response = network.fetch_response(domain, "/api/v3/site", api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+            response = network.fetch_response(domain, "/api/v3/site", network.api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
             data = json_from_response(response)
             # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code={response.status_code},data[]='{type(data)}'")
@@ -446,7 +435,7 @@ def fetch_nodeinfo(domain: str, path: str = None) -> list:
 
         try:
             # DEBUG: print(f"DEBUG: Fetching request='{request}' from domain='{domain}' ...")
-            response = network.fetch_response(domain, request, api_headers, (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")))
+            response = network.fetch_response(domain, request, network.api_headers, (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")))
 
             data = json_from_response(response)
             # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code={response.status_code},data[]='{type(data)}'")
@@ -482,7 +471,7 @@ def fetch_wellknown_nodeinfo(domain: str) -> list:
     data = {}
 
     try:
-        response = network.fetch_response(domain, "/.well-known/nodeinfo", api_headers, (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")))
+        response = network.fetch_response(domain, "/.well-known/nodeinfo", network.api_headers, (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout")))
 
         data = json_from_response(response)
         # DEBUG: print("DEBUG: domain,response.ok,data[]:", domain, response.ok, type(data))
@@ -495,7 +484,7 @@ def fetch_wellknown_nodeinfo(domain: str) -> list:
                     # DEBUG: print("DEBUG: rel,href:", link["rel"], link["href"])
                     if link["rel"] in nodeinfo_identifier:
                         # DEBUG: print("DEBUG: Fetching nodeinfo from:", link["href"])
-                        response = fetch_url(link["href"], api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+                        response = fetch_url(link["href"], network.api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
                         data = json_from_response(response)
                         # DEBUG: print("DEBUG: href,response.ok,response.status_code:", link["href"], response.ok, response.status_code)
@@ -533,7 +522,7 @@ def fetch_generator_from_path(domain: str, path: str = "/") -> str:
 
     try:
         # DEBUG: print(f"DEBUG: Fetching path='{path}' from '{domain}' ...")
-        response = network.fetch_response(domain, path, headers, (config.get("connection_timeout"), config.get("read_timeout")))
+        response = network.fetch_response(domain, path, network.headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         # DEBUG: print("DEBUG: domain,response.ok,response.status_code,response.text[]:", domain, response.ok, response.status_code, type(response.text))
         if response.ok and response.status_code < 300 and len(response.text) > 0:
index 7e3c513257dfd0cb97a29d0ebd09d080fce4a97c..f578abf5a61665ab9650308530d5a5fa6ecbf3bb 100644 (file)
@@ -29,7 +29,7 @@ def fetch_peers(domain: str) -> list:
     peers = list()
     try:
         # DEBUG: print(f"DEBUG: domain='{domain}' is Lemmy, fetching JSON ...")
-        response = network.fetch_response(domain, "/api/v3/site", fba.api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+        response = network.fetch_response(domain, "/api/v3/site", network.api_headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
         data = fba.json_from_response(response)
 
index 31060654b1dd3e761413ef356ef8635d96ff90d9..b2213a5e58f23e9588de948316f9f9af9c1d093d 100644 (file)
@@ -68,7 +68,7 @@ def fetch_blocks_from_about(domain: str) -> dict:
 
     try:
         doc = bs4.BeautifulSoup(
-            network.fetch_response(domain, "/about/more", fba.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text,
+            network.fetch_response(domain, "/about/more", network.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text,
             "html.parser",
         )
     except BaseException as exception:
@@ -135,16 +135,16 @@ def fetch_blocks(domain: str, origin: str, nodeinfo_url: str):
             # handling CSRF, I've saw at least one server requiring it to access the endpoint
             # DEBUG: print("DEBUG: Fetching meta:", domain)
             meta = bs4.BeautifulSoup(
-                network.fetch_response(domain, "/", fba.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text,
+                network.fetch_response(domain, "/", network.headers, (config.get("connection_timeout"), config.get("read_timeout"))).text,
                 "html.parser",
             )
             try:
                 csrf = meta.find("meta", attrs={"name": "csrf-token"})["content"]
                 # DEBUG: print("DEBUG: Adding CSRF token:", domain, csrf)
-                reqheaders = {**fba.api_headers, **{"X-CSRF-Token": csrf}}
+                reqheaders = {**network.api_headers, **{"X-CSRF-Token": csrf}}
             except BaseException as exception:
                 # DEBUG: print("DEBUG: No CSRF token found, using normal headers:", domain, exception)
-                reqheaders = fba.api_headers
+                reqheaders = network.api_headers
 
             # DEBUG: print("DEBUG: Querying API domain_blocks:", domain)
             blocklist = network.fetch_response(domain, "/api/v1/instance/domain_blocks", reqheaders, (config.get("connection_timeout"), config.get("read_timeout"))).json()
index 4c9c782fb2f7f2f27073403249ab1f965aad99a4..cc9dd7a22d23ed51051581b8cec2eb04e90ac490 100644 (file)
@@ -33,7 +33,7 @@ def fetch_peers(domain: str) -> list:
         # DEBUG: print(f"DEBUG: domain='{domain}',mode='{mode}'")
         while True:
             try:
-                response = network.fetch_response(domain, "/api/v1/server/{mode}?start={start}&count=100", headers, (config.get("connection_timeout"), config.get("read_timeout")))
+                response = network.fetch_response(domain, "/api/v1/server/{mode}?start={start}&count=100", network.headers, (config.get("connection_timeout"), config.get("read_timeout")))
 
                 data = fba.json_from_response(response)
                 # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code='{response.status_code}',data[]='{type(data)}'")
index 8d6414494839520c8a292f293572cf14f25c01da..d9322219a717e2a7dc4ba346180727b84b2fb543 100644 (file)
@@ -19,8 +19,20 @@ import reqto
 import requests
 
 from fba import config
+from fba import fba
 from fba import instances
 
+# HTTP headers for non-API requests
+headers = {
+    "User-Agent": config.get("useragent"),
+}
+
+# HTTP headers for API requests
+api_headers = {
+    "User-Agent"  : config.get("useragent"),
+    "Content-Type": "application/json",
+}
+
 def post_json_api(domain: str, path: str, parameter: str, extra_headers: dict = {}) -> dict:
     # DEBUG: print(f"DEBUG: domain='{domain}',path='{path}',parameter='{parameter}',extra_headers()={len(extra_headers)} - CALLED!")
     if not isinstance(domain, str):
@@ -44,7 +56,7 @@ def post_json_api(domain: str, path: str, parameter: str, extra_headers: dict =
             timeout=(config.get("connection_timeout"), config.get("read_timeout"))
         )
 
-        data = json_from_response(response)
+        data = fba.json_from_response(response)
         # DEBUG: print(f"DEBUG: response.ok={response.ok},response.status_code={response.status_code},data[]='{type(data)}'")
         if not response.ok or response.status_code >= 400:
             print(f"WARNING: Cannot query JSON API: domain='{domain}',path='{path}',parameter()={len(parameter)},response.status_code='{response.status_code}',data[]='{type(data)}'")