X-Git-Url: https://git.mxchange.org/?a=blobdiff_plain;f=fba%2Fcommands.py;h=08ff9ffeec2998bc25a73a101f15314fe7916a7d;hb=83e2848a952fb891a8c99ed5189748102b44f0ec;hp=faabb9e4c31e8336e55ce4810fb0adb63ae910d2;hpb=3ad5eb0b8994ddecadc7b7c05b2241c5d4cb0ae7;p=fba.git diff --git a/fba/commands.py b/fba/commands.py index faabb9e..08ff9ff 100644 --- a/fba/commands.py +++ b/fba/commands.py @@ -133,7 +133,7 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int: (config.get("connection_timeout"), config.get("read_timeout")) ) - logger.debug("JSON API returned %d elements", len(fetched)) + logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched)) if "error_message" in fetched: logger.warning("API returned error_message='%s' - EXIT!", fetched["error_message"]) return 101 @@ -148,7 +148,7 @@ def fetch_pixelfed_api(args: argparse.Namespace) -> int: if "domain" not in row: logger.warning("row='%s' does not contain element 'domain' - SKIPPED!", row) continue - elif row["domain"] is None or row["domain"] == "": + elif row["domain"] in [None, ""]: logger.debug("row[domain]='%s' is empty - SKIPPED!", row["domain"]) continue @@ -203,10 +203,10 @@ def fetch_bkali(args: argparse.Namespace) -> int: logger.debug("fetched[]='%s'", type(fetched)) if "error_message" in fetched: - logger.warning("post_json_api() for 'gql.sources.bka.li' returned error message='%s", fetched["error_message"]) + logger.warning("post_json_api() for 'gql.sources.bka.li' returned error message='%s' - EXIT!", fetched["error_message"]) return 100 elif isinstance(fetched["json"], dict) and "error" in fetched["json"] and "message" in fetched["json"]["error"]: - logger.warning("post_json_api() returned error: '%s", fetched["error"]["message"]) + logger.warning("post_json_api() returned error: '%s' - EXIT!", fetched["json"]["error"]["message"]) return 101 rows = fetched["json"] @@ -224,7 +224,7 @@ def fetch_bkali(args: argparse.Namespace) -> int: if "domain" not in entry: logger.warning("entry()=%d does not contain 'domain' - SKIPPED!", len(entry)) continue - elif entry["domain"] is None or entry["domain"] == "": + elif entry["domain"] in [None, ""]: logger.debug("entry[domain]='%s' is empty - SKIPPED!", entry["domain"]) continue elif not domain_helper.is_wanted(entry["domain"]): @@ -254,7 +254,7 @@ def fetch_bkali(args: argparse.Namespace) -> int: try: logger.info("Fetching instances from domain='%s' ...", domain) - federation.fetch_instances(domain, 'tak.teleyal.blog', None, inspect.currentframe().f_code.co_name) + federation.fetch_instances(domain, "tak.teleyal.blog", None, inspect.currentframe().f_code.co_name) except network.exceptions as exception: logger.warning("Exception '%s' during fetching instances (fetch_bkali) from domain='%s'", type(exception), domain) instances.set_last_error(domain, exception) @@ -311,7 +311,7 @@ def fetch_blocks(args: argparse.Namespace) -> int: if not domain_helper.is_wanted(blocker): logger.warning("blocker='%s' is not wanted - SKIPPED!", blocker) continue - elif instances.is_recent(blocker) and not args.force: + elif not args.force and instances.is_recent(blocker, "last_blocked"): logger.debug("blocker='%s' has been recently accessed - SKIPPED!", blocker) continue @@ -367,13 +367,13 @@ def fetch_blocks(args: argparse.Namespace) -> int: block["reason"] = tidyup.reason(block["reason"]) if block["reason"] is not None and block["reason"] != "" else None logger.debug("blocked='%s',reason='%s' - AFTER!", block["blocked"], block["reason"]) - if block["blocked"] is None or block["blocked"] == "": + if block["blocked"] in [None, ""]: logger.warning("block[blocked]='%s' is empty, blocker='%s'", block["blocked"], blocker) continue elif block["blocked"].endswith(".onion"): logger.debug("blocked='%s' is a TOR .onion domain - SKIPPED", block["blocked"]) continue - elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true": + elif block["blocked"].endswith(".i2p") and not config.get("allow_i2p_domain"): logger.debug("blocked='%s' is an I2P .onion domain - SKIPPED", block["blocked"]) continue elif block["blocked"].endswith(".arpa"): @@ -418,7 +418,7 @@ def fetch_blocks(args: argparse.Namespace) -> int: nodeinfo_url = row["nodeinfo_url"] logger.debug("Looking up instance by domain, blocked='%s'", block["blocked"]) - if block["blocked"] is None or block["blocked"] == "": + if block["blocked"] in [None, ""]: logger.debug("block[blocked]='%s' is empty - SKIPPED!", block["blocked"]) continue @@ -485,7 +485,7 @@ def fetch_observer(args: argparse.Namespace) -> int: types = list() if args.software is None: logger.info("Fetching software list ...") - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{source_domain}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -525,32 +525,50 @@ def fetch_observer(args: argparse.Namespace) -> int: logger.debug("args.software='%s' does not match software='%s' - SKIPPED!", args.software, software) continue - doc = None + items = list() try: logger.debug("Fetching table data for software='%s' ...", software) - raw = utils.fetch_url( - f"https://{source_domain}/app/views/tabledata.php?software={software}", - network.web_headers, - (config.get("connection_timeout"), config.get("read_timeout")) - ).text + raw = network.post_json_api( + f"api.{source_domain}", + "/", + json.dumps({ + "query": "{nodes(softwarename:\"" + software + "\"){domain}}" + }) + ) + logger.debug("raw[%s]()=%d", type(raw), len(raw)) + if "exception" in raw: + logger.warning("row[domain]='%s' has caused an exception: '%s' - raising again ...", row["domain"], type(raw["exception"])) + raise raw["exception"] + elif "error_message" in raw: + logger.warning("row[domain]='%s' has caused error message: '%s' - SKIPPED!", row["domain"], raw["error_message"]) + continue + elif not "data" in raw["json"]: + logger.warning("Cannot find key 'nodes' in raw[json]()=%d", len(raw["json"])) + continue + elif not "nodes" in raw["json"]["data"]: + logger.warning("Cannot find key 'nodes' in raw[json][data]()=%d", len(raw["json"]["data"])) + continue + + items = raw["json"]["data"]["nodes"] + logger.debug("items()=%d", len(items)) - doc = bs4.BeautifulSoup(raw, features="html.parser") - logger.debug("doc[]='%s'", type(doc)) except network.exceptions as exception: logger.warning("Cannot fetch software='%s' from source_domain='%s': '%s'", software, source_domain, type(exception)) continue - items = doc.findAll("a", {"class": "url"}) logger.info("Checking %d items,software='%s' ...", len(items), software) for item in items: logger.debug("item[]='%s'", type(item)) - domain = item.decode_contents() - logger.debug("domain[%s]='%s'", type(domain), domain) - domain = tidyup.domain(domain) if domain not in [None, ""] else None + if not "domain" in item: + logger.debug("item()=%d has not element 'domain'", len(item)) + continue + + logger.debug("item[domain]='%s' - BEFORE!", item["domain"]) + domain = tidyup.domain(item["domain"]) if item["domain"] not in [None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain) continue @@ -565,7 +583,7 @@ def fetch_observer(args: argparse.Namespace) -> int: logger.debug("domain='%s' is already registered - SKIPPED!", domain) continue - logger.info("Fetching instances for domain='%s'", domain) + logger.info("Fetching instances for domain='%s',software='%s' ...", domain, software) federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name) logger.debug("Success! - EXIT!") @@ -591,7 +609,7 @@ def fetch_todon_wiki(args: argparse.Namespace) -> int: } logger.debug("Fetching domainblocks from source_domain='%s'", source_domain) - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{source_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -706,7 +724,7 @@ def fetch_cs(args: argparse.Namespace): sources.update(source_domain) logger.info("Fetching federation.md from source_domain='%s' ...", source_domain) - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{source_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -793,7 +811,7 @@ def fetch_fba_rss(args: argparse.Namespace) -> int: sources.update(domain) logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed) - response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) + response = network.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) if response.ok and response.status_code == 200 and len(response.text) > 0: @@ -807,7 +825,7 @@ def fetch_fba_rss(args: argparse.Namespace) -> int: domain = tidyup.domain(domain) if domain not in[None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain) continue @@ -872,7 +890,7 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int: domains = list() logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed) - response = utils.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) + response = network.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) if response.ok and response.status_code == 200 and len(response.text) > 0: @@ -884,15 +902,17 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int: logger.debug("entry[]='%s'", type(entry)) doc = bs4.BeautifulSoup(entry.content.value, "html.parser") logger.debug("doc[]='%s'", type(doc)) + elements = doc.findAll("a") - for element in doc.findAll("a"): - logger.debug("element[]='%s'", type(element)) + logger.debug("Checking %d element(s) ...", len(elements)) + for element in elements: + logger.debug("element[%s]='%s'", type(element), element) for href in element["href"].split(","): logger.debug("href[%s]='%s' - BEFORE!", type(href), href) domain = tidyup.domain(href) if href not in [None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain) continue @@ -935,59 +955,77 @@ def fetch_fbabot_atom(args: argparse.Namespace) -> int: def fetch_instances(args: argparse.Namespace) -> int: logger.debug("args[]='%s' - CALLED!", type(args)) - logger.debug("args.domain='%s' - checking ...", args.domain) - if not validators.domain(args.domain): - logger.warning("args.domain='%s' is not valid.", args.domain) - return 100 - elif blacklist.is_blacklisted(args.domain): - logger.warning("args.domain='%s' is blacklisted, won't check it!", args.domain) - return 101 - logger.debug("Invoking locking.acquire() ...") locking.acquire() - # Initialize values - domain = tidyup.domain(args.domain) - origin = software = None + # Init variables + rows = list() - # Fetch record - database.cursor.execute("SELECT origin, software FROM instances WHERE domain = ? LIMIT 1", [args.domain]) - row = database.cursor.fetchone() - if row is not None: - origin = row["origin"] - software = row["software"] + # Is domain or software set? + if args.domain not in [None, ""]: + logger.debug("args.domain='%s' - checking ...", args.domain) + if not validators.domain(args.domain): + logger.warning("args.domain='%s' is not valid.", args.domain) + return 100 + elif blacklist.is_blacklisted(args.domain): + logger.warning("args.domain='%s' is blacklisted, won't check it!", args.domain) + return 101 - if software_helper.is_relay(software): - logger.warning("args.domain='%s' is of software type '%s' which is not supported by this command. Please invoke fetch_relays instead.", args.domain, software) - return 102 + logger.debug("args.domain='%s' - BEFORE!", args.domain) + domain = tidyup.domain(args.domain) + logger.debug("domain='%s' - AFTER!", domain) - # Initial fetch - try: - logger.info("Fetching instances from args.domain='%s',origin='%s',software='%s' ...", domain, origin, software) - federation.fetch_instances(domain, origin, software, inspect.currentframe().f_code.co_name) - except network.exceptions as exception: - logger.warning("Exception '%s' during fetching instances (fetch_instances) from args.domain='%s'", type(exception), args.domain) - instances.set_last_error(args.domain, exception) - instances.update(args.domain) - return 100 + # Fetch record + database.cursor.execute("SELECT domain, origin, software FROM instances WHERE domain = ? LIMIT 1", [domain]) + rows = database.cursor.fetchall() + elif args.software not in [None, ""]: + logger.debug("args.software='%s' - BEFORE!", args.software) + software = software_helper.alias(args.software) + logger.debug("software='%s' - AFTER!", software) - if args.single: - logger.debug("Not fetching more instances - EXIT!") - return 0 + # Fetch records + database.cursor.execute("SELECT domain, origin, software FROM instances WHERE software = ? ORDER BY last_updated ASC", [software]) + rows = database.cursor.fetchall() + + logger.info("Checking %d entries ...", len(rows)) + for row in rows: + logger.debug("row[domain]='%s',row[origin]='%s',row[software]='%s'", row["domain"], row["origin"], row["software"]) + if row["software"] is None and instances.is_registered(row["domain"]) : + logger.warning("row[domain]='%s' has no software detected. You can try to run ./fba.py update_nodeinfo --domain=%s --force to get it updated - SKIPPED!", row["domain"], row["domain"]) + continue + elif software_helper.is_relay(row["software"]) and instances.is_registered(row["domain"]): + logger.warning("row[domain]='%s' is of software type '%s' which is not supported by this command. Please invoke fetch_relays instead - SKIPPED!", row["domain"], row["software"]) + continue + elif not args.force and not args.software in [None, ""]and instances.is_recent(row["domain"]): + logger.debug("row[domain]='%s' has been recently crawled - SKIPPED!", row["domain"]) + continue + + # Initial fetch + try: + logger.info("Fetching instances from row[domain]='%s',row[origin]='%s',row[software]='%s' ...", row["domain"], row["origin"], row["software"]) + federation.fetch_instances(row["domain"], row["origin"], row["software"], inspect.currentframe().f_code.co_name) + except network.exceptions as exception: + logger.warning("Exception '%s' during fetching instances (fetch_instances) from row[domain]='%s'", type(exception), row["domain"]) + instances.set_last_error(row["domain"], exception) + instances.update(row["domain"]) + continue + + if args.single: + logger.debug("Not fetching more instances - BREAK!") + break # Loop through some instances database.cursor.execute( - "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_instance")] + "SELECT domain, origin, software \ +FROM instances \ +WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb', 'smithereen', 'vebinet') \ +AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) \ +ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_instance")] ) rows = database.cursor.fetchall() logger.info("Checking %d entries ...", len(rows)) for row in rows: - logger.debug("row[domain]='%s'", row["domain"]) - if row["domain"] == "": - logger.debug("row[domain] is empty - SKIPPED!") - continue - logger.debug("row[domain]='%s' - BEFORE!", row["domain"]) domain = row["domain"].encode("idna").decode("utf-8") logger.debug("domain='%s' - AFTER!", domain) @@ -997,8 +1035,8 @@ def fetch_instances(args: argparse.Namespace) -> int: continue try: - logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", domain, row["origin"], row["software"], row["nodeinfo_url"]) - federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"]) + logger.info("Fetching instances for domain='%s',origin='%s',software='%s' ...", domain, row["origin"], row["software"]) + federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name) except network.exceptions as exception: logger.warning("Exception '%s' during fetching instances (fetch_instances) from domain='%s'", type(exception), domain) instances.set_last_error(domain, exception) @@ -1069,7 +1107,7 @@ def fetch_txt(args: argparse.Namespace) -> int: logger.info("Checking %d text file(s) ...", len(blocklists.txt_files)) for row in blocklists.txt_files: logger.debug("Fetching row[url]='%s' ...", row["url"]) - response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) + response = network.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))) logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text)) if response.ok and response.status_code == 200 and response.text != "": @@ -1080,25 +1118,21 @@ def fetch_txt(args: argparse.Namespace) -> int: for domain in domains: logger.debug("domain='%s' - BEFORE!", domain) domain = tidyup.domain(domain) if domain not in[None, ""] else None - logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain) continue elif not domain_helper.is_wanted(domain): logger.debug("domain='%s' is not wanted - SKIPPED!", domain) continue - elif instances.is_recent(domain): - logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain) + elif not args.force and instances.is_registered(domain): + logger.debug("domain='%s' is already registered - SKIPPED!", domain) continue - logger.debug("Processing domain='%s',row[blocker]='%s'", domain, row["blocker"]) - processed = processing.instance(domain, row["blocker"], inspect.currentframe().f_code.co_name) - + logger.debug("Processing domain='%s',row[blocker]='%s' ...", domain, row["blocker"]) + processed = processing.instance(domain, row["blocker"], inspect.currentframe().f_code.co_name, force=args.force) logger.debug("processed='%s'", processed) - if not processed: - logger.debug("domain='%s' was not generically processed - SKIPPED!", domain) - continue logger.debug("Success! - EXIT!") return 0 @@ -1118,7 +1152,7 @@ def fetch_fedipact(args: argparse.Namespace) -> int: sources.update(source_domain) logger.info("Fetching / from source_domain='%s' ...", source_domain) - response = utils.fetch_url( + response = network.fetch_url( f"https://{source_domain}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -1138,7 +1172,7 @@ def fetch_fedipact(args: argparse.Namespace) -> int: domain = tidyup.domain(row.contents[0]) if row.contents[0] not in [None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain) continue @@ -1177,7 +1211,7 @@ def fetch_joinmobilizon(args: argparse.Namespace) -> int: sources.update(source_domain) logger.info("Fetching instances from source_domain='%s' ...", source_domain) - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{source_domain}/api/v1/instances", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -1225,7 +1259,7 @@ def fetch_joinmisskey(args: argparse.Namespace) -> int: sources.update(source_domain) logger.info("Fetching instances.json from source_domain='%s' ...", source_domain) - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{source_domain}/instances.json", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -1265,17 +1299,23 @@ def recheck_obfuscation(args: argparse.Namespace) -> int: locking.acquire() if isinstance(args.domain, str) and args.domain != "" and domain_helper.is_wanted(args.domain): + logger.debug("Fetching record for args.domain='%s' ...", args.domain) database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND domain = ?", [args.domain]) elif isinstance(args.software, str) and args.software != "" and validators.domain(args.software) == args.software: + logger.debug("Fetching records for args.software='%s' ...", args.software) database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND software = ?", [args.software]) else: + logger.debug("Fetching records where domains have obfuscated block entries ...") database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 OR has_obfuscation IS NULL") rows = database.cursor.fetchall() logger.info("Checking %d domains ...", len(rows)) for row in rows: logger.debug("Fetching peers from domain='%s',software='%s',nodeinfo_url='%s' ...", row["domain"], row["software"], row["nodeinfo_url"]) - if (args.force is None or not args.force) and args.domain is None and args.software is None and instances.is_recent(row["domain"], "last_blocked"): + if blacklist.is_blacklisted(row["domain"]): + logger.debug("row[domain]='%s' is blacklisted - SKIPPED!", row["domain"]) + continue + elif (args.force is None or not args.force) and args.domain is None and args.software is None and instances.is_recent(row["domain"], "last_blocked"): logger.debug("row[domain]='%s' has been recently checked, args.force[]='%s' - SKIPPED!", row["domain"], type(args.force)) continue @@ -1284,6 +1324,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int: logger.debug("blocking()=%d", len(blocking)) if len(blocking) == 0: + logger.debug("Empty blocking list, trying individual fetch_blocks() for row[software]='%s' ...", row["software"]) if row["software"] == "pleroma": logger.debug("domain='%s',software='%s'", row["domain"], row["software"]) blocking = pleroma.fetch_blocks(row["domain"]) @@ -1304,7 +1345,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int: # c.s isn't part of oliphant's "hidden" blocklists logger.debug("row[domain]='%s'", row["domain"]) - if row["domain"] != "chaos.social" and not software_helper.is_relay(row["software"]) and not blocklists.has(row["domain"]): + if row["domain"] != "chaos.social" and row["software"] is not None and not software_helper.is_relay(row["software"]) and not blocklists.has(row["domain"]): logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking)) instances.set_last_blocked(row["domain"]) instances.set_total_blocks(row["domain"], blocking) @@ -1323,7 +1364,7 @@ def recheck_obfuscation(args: argparse.Namespace) -> int: elif block["blocked"].endswith(".onion"): logger.debug("blocked='%s' is a TOR onion domain name - SKIPPED!", block["blocked"]) continue - elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true": + elif block["blocked"].endswith(".i2p") and not config.get("allow_i2p_domain"): logger.debug("blocked='%s' is an I2P onion domain name - SKIPPED!", block["blocked"]) continue elif block["blocked"].endswith(".arpa"): @@ -1440,7 +1481,7 @@ def fetch_fedilist(args: argparse.Namespace) -> int: domain = tidyup.domain(row["hostname"]) if row["hostname"] not in [None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain(): row[hostname]='%s' - SKIPPED!", domain, row["hostname"]) continue @@ -1482,12 +1523,18 @@ def update_nodeinfo(args: argparse.Namespace) -> int: elif args.no_software: logger.info("Fetching domains with no software type detected ...") database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL ORDER BY last_updated ASC") + elif args.with_software: + logger.info("Fetching domains with any software type detected ...") + database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NOT NULL ORDER BY last_updated ASC") elif args.no_auto: logger.info("Fetching domains with other detection mode than AUTO_DISOVERY being set ...") database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode IS NOT NULL AND detection_mode != 'AUTO_DISCOVERY' ORDER BY last_updated ASC") elif args.no_detection: logger.info("Fetching domains with no detection mode being set ...") database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode IS NULL ORDER BY last_updated ASC") + elif args.same: + logger.info("Fetching domains with domain name and software being the same ...") + database.cursor.execute("SELECT domain, software FROM instances WHERE domain=software ORDER BY last_updated ASC") else: logger.info("Fetching domains for recently updated ...") database.cursor.execute("SELECT domain, software FROM instances ORDER BY last_updated ASC") @@ -1498,7 +1545,22 @@ def update_nodeinfo(args: argparse.Namespace) -> int: cnt = 0 for row in domains: logger.debug("row[]='%s'", type(row)) - if not args.force and instances.is_recent(row["domain"], "last_nodeinfo"): + if row["domain"].endswith(".i2p") and not config.get("allow_i2p_domain"): + logger.debug("row[domain]='%s' is an I2P address - SKIPPED", row["domain"]) + continue + elif row["domain"].endswith(".onion"): + logger.debug("row[domain]='%s' is a TOR .onion domain - SKIPPED", row["domain"]) + continue + elif row["domain"].endswith(".arpa"): + logger.debug("row[domain]='%s' is a reverse IP address - SKIPPED", row["domain"]) + continue + elif row["domain"].endswith(".tld"): + logger.debug("row[domain]='%s' is a fake domain - SKIPPED", row["domain"]) + continue + elif blacklist.is_blacklisted(row["domain"]): + logger.debug("row[domain]='%s' is blacklisted - SKIPPED!", row["domain"]) + continue + elif not args.force and instances.is_recent(row["domain"], "last_nodeinfo"): logger.debug("row[domain]='%s' has been recently checked - SKIPPED!", row["domain"]) continue @@ -1559,7 +1621,7 @@ def fetch_instances_social(args: argparse.Namespace) -> int: headers=headers, timeout=(config.get("connection_timeout"), config.get("read_timeout")) ) - logger.debug("fetched[]='%s'", type(fetched)) + logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched)) if "error_message" in fetched: logger.warning("Error during fetching API result: '%s' - EXIT!", fetched["error_message"]) @@ -1604,7 +1666,7 @@ def fetch_instances_social(args: argparse.Namespace) -> int: logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain) continue - logger.info("Fetching instances from domain='%s'", domain) + logger.info("Fetching instances from domain='%s' ...", domain) federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name) logger.debug("Success! - EXIT!") @@ -1632,7 +1694,7 @@ def fetch_relaylist(args: argparse.Namespace) -> int: {}, (config.get("connection_timeout"), config.get("read_timeout")) ) - logger.debug("fetched[]='%s'", type(fetched)) + logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched)) if "error_message" in fetched: logger.warning("Error during fetching API result: '%s' - EXIT!", fetched["error_message"]) @@ -1686,24 +1748,31 @@ def fetch_relays(args: argparse.Namespace) -> int: locking.acquire() if args.domain is not None and args.domain != "": + logger.debug("Fetching instances record for args.domain='%s' ...", args.domain) database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND domain = ? LIMIT 1", [args.domain]) elif args.software is not None and args.software != "": - database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND software = ?", [args.software]) + logger.debug("Fetching instances records for args.software='%s' ...", args.software) + database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND nodeinfo_url IS NOT NULL AND software = ? ORDER BY last_updated DESC", [args.software]) else: - database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay')") + logger.debug("Fetch all relay instances ...") + database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND nodeinfo_url IS NOT NULL ORDER BY last_updated DESC") domains = list() rows = database.cursor.fetchall() logger.info("Checking %d relays ...", len(rows)) for row in rows: - logger.debug("row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"]) - peers = list() + logger.debug("row[domain]='%s',row[software]='%s'", row["domain"], row["software"]) if not args.force and instances.is_recent(row["domain"]): logger.debug("row[domain]='%s' has been recently fetched - SKIPPED!", row["domain"]) continue + elif row["nodeinfo_url"] is None: + logger.warning("row[domain]='%s' has empty nodeinfo_url but this is required - SKIPPED!", row["domain"]) + continue + peers = list() try: + logger.debug("row[domain]='%s',row[software]='%s' - checking ....", row["domain"], row["software"]) if row["software"] == "pub-relay": logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"]) raw = network.fetch_api_url( @@ -1732,7 +1801,7 @@ def fetch_relays(args: argparse.Namespace) -> int: continue else: logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"]) - raw = utils.fetch_url( + raw = network.fetch_url( f"https://{row['domain']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")) @@ -1780,7 +1849,7 @@ def fetch_relays(args: argparse.Namespace) -> int: domain = tidyup.domain(domain) if domain not in[None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"]) continue elif domain not in peers: @@ -1822,7 +1891,7 @@ def fetch_relays(args: argparse.Namespace) -> int: domain = tidyup.domain(domain) if domain not in[None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"]) continue elif domain not in peers: @@ -1846,7 +1915,7 @@ def fetch_relays(args: argparse.Namespace) -> int: domain = tidyup.domain(domain) if domain not in[None, ""] else None logger.debug("domain='%s' - AFTER!", domain) - if domain is None or domain == "": + if domain in [None, ""]: logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"]) continue elif domain not in peers: