From 9ae0a381262888325c5b61169945eecad115e9af Mon Sep 17 00:00:00 2001 From: =?utf8?q?Roland=20H=C3=A4der?= Date: Tue, 4 Jul 2023 20:28:01 +0200 Subject: [PATCH] Renaming season: - renamed table/model file 'apis' to 'sources' as wikis are not APIs but all are (instance) sources - renamed api_domain to source_domain --- blocks_empty.db | Bin 40960 -> 40960 bytes fba/commands.py | 158 ++++++++++++++--------------- fba/models/__init__.py | 2 +- fba/models/{apis.py => sources.py} | 36 +++---- 4 files changed, 98 insertions(+), 98 deletions(-) rename fba/models/{apis.py => sources.py} (61%) diff --git a/blocks_empty.db b/blocks_empty.db index 07daaf4a11101d32ba8ef57b150ac0924296e905..9561b71031290f936888cc21d583f4d1d4601852 100644 GIT binary patch delta 198 zcmZoTz|?SnX@ayMGXnzy7ZAgM-$Wf_d1eMZ?}@y8ix@aL-5K~JINkX*xr_Mn`E0lf zxa~P_Z)`ltVc#go#4au^&DdC3l9-f}TAW{6l$=@&rI?+ATpdGP6+#@Hd|VZjprT3& z8eE)E&g83nE`kv8_>}zI#LPS;O)kyNw>giqumIiCKC!WxO_!NX+);Y6F24j+WpTVA N)F_tC5&SDA0000BH(LMz delta 177 zcmZoTz|?SnX@ayMBLf2i7ZAgM&qN(#c}503?}@zp0Sug+w;A{&_!jZ2a~JXD^Vx6} zaNBd<-Yh5(%enbB=W!O}MkXe9aZypm#`KcJq@2{mg3MwNW_Av8bqsM;2yt}saaB+P z2`MRPaB+fIliN65M1XXBN`7u)W}cEJm!>*1o4B>;WPN@KVUW^zLlDUVbP@;1S(_vH HS4;o^4m~Rt diff --git a/fba/commands.py b/fba/commands.py index 95484ae..38990bd 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -43,9 +43,9 @@ from fba.helpers import tidyup from fba.http import federation from fba.http import network -from fba.models import apis from fba.models import blocks from fba.models import instances +from fba.models import sources from fba.networks import friendica from fba.networks import lemmy @@ -101,20 +101,20 @@ def check_nodeinfo(args: argparse.Namespace) -> int: def fetch_pixelfed_api(args: argparse.Namespace) -> int: logger.debug("args[]='%s' - CALLED!", type(args)) - # No CSRF by default, you don't have to add network.api_headers by yourself here + # No CSRF by default, you don't have to add network.source_headers by yourself here headers = tuple() - api_domain = "pixelfed.org" + source_domain = "pixelfed.org" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) try: - logger.debug("Checking CSRF from api_domain='%s' ...", api_domain) - headers = csrf.determine(api_domain, dict()) + logger.debug("Checking CSRF from source_domain='%s' ...", source_domain) + headers = csrf.determine(source_domain, dict()) except network.exceptions as exception: logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__) return list() @@ -122,7 +122,7 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int: try: logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers)) fetched = network.get_json_api( - api_domain, + source_domain, "/api/v1/servers/all.json?scope=All&country=all&language=all", headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -172,19 +172,19 @@ def fetch_bkali(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "gql.apis.bka.li" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "gql.sources.bka.li" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) domains = list() try: - logger.info("Fetching domainlist from api_domain='%s' ...", api_domain) + logger.info("Fetching domainlist from source_domain='%s' ...", source_domain) fetched = network.post_json_api( - api_domain, + source_domain, "/v1/graphql", json.dumps({ "query": "query domainlist {nodeinfo(order_by: {domain: asc}) {domain}}" @@ -193,7 +193,7 @@ def fetch_bkali(args: argparse.Namespace) -> int: logger.debug("fetched[]='%s'", type(fetched)) if "error_message" in fetched: - logger.warning("post_json_api() for 'gql.apis.bka.li' returned error message='%s", fetched["error_message"]) + logger.warning("post_json_api() for 'gql.sources.bka.li' returned error message='%s", fetched["error_message"]) return 100 elif isinstance(fetched["json"], dict) and "error" in fetched["json"] and "message" in fetched["json"]["error"]: logger.warning("post_json_api() returned error: '%s", fetched["error"]["message"]) @@ -436,19 +436,19 @@ def fetch_observer(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "fediverse.observer" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "fediverse.observer" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) types = list() if args.software is None: logger.info("Fetching software list ...") raw = utils.fetch_url( - f"https://{api_domain}", + f"https://{source_domain}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) ).text @@ -484,7 +484,7 @@ def fetch_observer(args: argparse.Namespace) -> int: try: logger.debug("Fetching table data for software='%s' ...", software) raw = utils.fetch_url( - f"https://{api_domain}/app/views/tabledata.php?software={software}", + f"https://{source_domain}/app/views/tabledata.php?software={software}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) ).text @@ -493,7 +493,7 @@ def fetch_observer(args: argparse.Namespace) -> int: doc = bs4.BeautifulSoup(raw, features="html.parser") logger.debug("doc[]='%s'", type(doc)) except network.exceptions as exception: - logger.warning("Cannot fetch software='%s' from api_domain='%s': '%s'", software, api_domain, type(exception)) + logger.warning("Cannot fetch software='%s' from source_domain='%s': '%s'", software, source_domain, type(exception)) continue items = doc.findAll("a", {"class": "url"}) @@ -529,20 +529,20 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "wiki.todon.eu" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "wiki.todon.eu" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) blocklist = { "silenced": list(), "reject": list(), } - raw = utils.fetch_url(f"https://{api_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text + raw = utils.fetch_url(f"https://{source_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw)) doc = bs4.BeautifulSoup(raw, "html.parser") @@ -637,15 +637,15 @@ def fetch_cs(args: argparse.Namespace): "reject" : list(), } - api_domain = "raw.githubusercontent.com" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "raw.githubusercontent.com" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) - raw = utils.fetch_url(f"https://{api_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text + raw = utils.fetch_url(f"https://{source_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw)) doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features="html.parser") @@ -718,12 +718,12 @@ def fetch_fba_rss(args: argparse.Namespace) -> int: components = urlparse(args.feed) - if apis.is_recent(components.netloc): + if sources.is_recent(components.netloc): logger.info("API from components.netloc='%s' has recently being accessed - EXIT!", components.netloc) return 0 else: logger.debug("components.netloc='%s' has not been recently used, marking ...", components.netloc) - apis.update(components.netloc) + sources.update(components.netloc) logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed) response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) @@ -779,15 +779,15 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "ryana.agency" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "ryana.agency" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) - feed = f"https://{api_domain}/users/fba/feed.atom" + feed = f"https://{source_domain}/users/fba/feed.atom" domains = list() @@ -837,7 +837,7 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int: logger.debug("domain='%s'", domain) try: logger.info("Fetching instances from domain='%s' ...", domain) - federation.fetch_instances(domain, api_domain, None, inspect.currentframe().f_code.co_name) + federation.fetch_instances(domain, source_domain, None, inspect.currentframe().f_code.co_name) except network.exceptions as exception: logger.warning("Exception '%s' during fetching instances (fetch_fbabot_atom) from domain='%s'", type(exception), domain) instances.set_last_error(domain, exception) @@ -906,16 +906,16 @@ def fetch_oliphant(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "codeberg.org" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "codeberg.org" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) # Base URL - base_url = f"https://{api_domain}/oliphant/blocklists/raw/branch/main/blocklists" + base_url = f"https://{source_domain}/oliphant/blocklists/raw/branch/main/blocklists" # URLs to fetch blocklists = ( @@ -1122,16 +1122,16 @@ def fetch_fedipact(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "fedipact.online" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "fedipact.online" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) response = utils.fetch_url( - f"https://{api_domain}", + f"https://{source_domain}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) ) @@ -1175,16 +1175,16 @@ def fetch_joinfediverse(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "joinfediverse.wiki" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "joinfediverse.wiki" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) raw = utils.fetch_url( - f"https://{api_domain}/FediBlock", + f"https://{source_domain}/FediBlock", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) ).text @@ -1458,18 +1458,18 @@ def fetch_fedilist(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "demo.fedilist.com" - if apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + source_domain = "demo.fedilist.com" + if sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) - url = f"http://{api_domain}/instance/csv?onion=not" + url = f"http://{source_domain}/instance/csv?onion=not" if args.software is not None and args.software != "": logger.debug("args.software='%s'", args.software) - url = f"http://{api_domain}/instance/csv?software={args.software}&onion=not" + url = f"http://{source_domain}/instance/csv?software={args.software}&onion=not" logger.info("Fetching url='%s' ...", url) response = reqto.get( @@ -1561,24 +1561,24 @@ def fetch_instances_social(args: argparse.Namespace) -> int: logger.debug("Invoking locking.acquire() ...") locking.acquire() - api_domain = "instances.social" + source_domain = "instances.social" if config.get("instances_social_api_key") == "": logger.error("API key not set. Please set in your config.json file.") return 1 - elif apis.is_recent(api_domain): - logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain) + elif sources.is_recent(source_domain): + logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain) return 0 else: - logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain) - apis.update(api_domain) + logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain) + sources.update(source_domain) headers = { "Authorization": f"Bearer {config.get('instances_social_api_key')}", } fetched = network.get_json_api( - api_domain, + source_domain, "/api/1.0/instances/list?count=0&sort_by=name", headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -1624,7 +1624,7 @@ def fetch_instances_social(args: argparse.Namespace) -> int: continue logger.info("Fetching instances from domain='%s'", domain) - federation.fetch_instances(domain, api_domain, None, inspect.currentframe().f_code.co_name) + federation.fetch_instances(domain, source_domain, None, inspect.currentframe().f_code.co_name) logger.debug("Success! - EXIT!") return 0 diff --git a/fba/models/__init__.py b/fba/models/__init__.py index af51119..f72332a 100644 --- a/fba/models/__init__.py +++ b/fba/models/__init__.py @@ -14,8 +14,8 @@ # along with this program. If not, see . __all__ = [ - 'apis', 'blocks', 'error_log', 'instances', + 'sources', ] diff --git a/fba/models/apis.py b/fba/models/sources.py similarity index 61% rename from fba/models/apis.py rename to fba/models/sources.py index 97ff453..55ace81 100644 --- a/fba/models/apis.py +++ b/fba/models/sources.py @@ -26,48 +26,48 @@ from fba.helpers import domain as domain_helper logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -def fetch (api_domain: str) -> dict: - logger.debug("api_domain='%s' - CALLED!", api_domain) - domain_helper.raise_on(api_domain) +def fetch (source_domain: str) -> dict: + logger.debug("source_domain='%s' - CALLED!", source_domain) + domain_helper.raise_on(source_domain) - database.cursor.execute("SELECT * FROM apis WHERE api_domain = ? LIMIT 1", [api_domain]) + database.cursor.execute("SELECT * FROM sources WHERE source_domain = ? LIMIT 1", [source_domain]) row = database.cursor.fetchone() logger.debug("row[]='%s' - EXIT!", type(row)) return row -def is_recent(api_domain: str) -> bool: - logger.debug("api_domain='%s' - CALLED!", api_domain) - domain_helper.raise_on(api_domain) +def is_recent(source_domain: str) -> bool: + logger.debug("source_domain='%s' - CALLED!", source_domain) + domain_helper.raise_on(source_domain) is_recent = False - row = fetch(api_domain) + row = fetch(source_domain) logger.debug("row[]='%s'", type(row)) if row is not None: - logger.debug("api_domain='%s',row[last_accessed]=%d", api_domain, row["last_accessed"]) - is_recent = (time.time() - row["last_accessed"]) <= config.get("api_last_access") + logger.debug("source_domain='%s',row[last_accessed]=%d", source_domain, row["last_accessed"]) + is_recent = (time.time() - row["last_accessed"]) <= config.get("source_last_access") logger.debug("is_recent='%s' - EXIT!", is_recent) return is_recent -def update (api_domain: str): - logger.debug("api_domain='%s' - CALLED!", api_domain) - domain_helper.raise_on(api_domain) +def update (source_domain: str): + logger.debug("source_domain='%s' - CALLED!", source_domain) + domain_helper.raise_on(source_domain) - row = fetch(api_domain) + row = fetch(source_domain) logger.debug("row[]='%s'", type(row)) if row is None: # Add instance - database.cursor.execute("INSERT INTO apis (api_domain, last_accessed) VALUES (?, ?)", [ - api_domain, + database.cursor.execute("INSERT INTO sources (source_domain, last_accessed) VALUES (?, ?)", [ + source_domain, time.time() ]) else: # Update last_accessed - database.cursor.execute("UPDATE apis SET last_accessed = ? WHERE api_domain = ? LIMIT 1", [ + database.cursor.execute("UPDATE sources SET last_accessed = ? WHERE source_domain = ? LIMIT 1", [ time.time(), - api_domain + source_domain ]) logger.debug("EXIT!") -- 2.39.5