(config.get("connection_timeout"), config.get("read_timeout"))
)
- logger.debug("JSON API returned %d elements", len(fetched))
+ logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched))
if "error_message" in fetched:
logger.warning("API returned error_message='%s' - EXIT!", fetched["error_message"])
return 101
if "domain" not in row:
logger.warning("row='%s' does not contain element 'domain' - SKIPPED!", row)
continue
- elif row["domain"] == "":
- logger.debug("row[domain] is empty - SKIPPED!")
+ elif row["domain"] in [None, ""]:
+ logger.debug("row[domain]='%s' is empty - SKIPPED!", row["domain"])
continue
logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
if "domain" not in entry:
logger.warning("entry()=%d does not contain 'domain' - SKIPPED!", len(entry))
continue
- elif entry["domain"] == "":
- logger.debug("entry[domain] is empty - SKIPPED!")
+ elif entry["domain"] in [None, ""]:
+ logger.debug("entry[domain]='%s' is empty - SKIPPED!", entry["domain"])
continue
elif not domain_helper.is_wanted(entry["domain"]):
logger.debug("entry[domain]='%s' is not wanted - SKIPPED!", entry["domain"])
try:
logger.info("Fetching instances from domain='%s' ...", domain)
- federation.fetch_instances(domain, 'tak.teleyal.blog', None, inspect.currentframe().f_code.co_name)
+ federation.fetch_instances(domain, "tak.teleyal.blog", None, inspect.currentframe().f_code.co_name)
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching instances (fetch_bkali) from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)
database.cursor.execute(
"SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC", [args.software]
)
- elif args.force:
- # Re-check all
- logger.debug("Re-checking all instances ...")
+ elif args.only_none:
+ # Check only entries with total_blocked=None
database.cursor.execute(
- "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC"
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND total_blocks IS NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC"
)
else:
# Re-check after "timeout" (aka. minimum interval)
database.cursor.execute(
- "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND (last_blocked IS NULL OR last_blocked < ?) AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_block")]
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_response_time ASC, last_updated ASC"
)
rows = database.cursor.fetchall()
if not domain_helper.is_wanted(blocker):
logger.warning("blocker='%s' is not wanted - SKIPPED!", blocker)
continue
+ elif not args.force and instances.is_recent(blocker, "last_blocked"):
+ logger.debug("blocker='%s' has been recently accessed - SKIPPED!", blocker)
+ continue
logger.debug("Setting last_blocked,has_obfuscation=false for blocker='%s' ...", blocker)
instances.set_last_blocked(blocker)
# c.s isn't part of oliphant's "hidden" blocklists
if blocker == "chaos.social" or software_helper.is_relay(software) or blocklists.has(blocker):
- logger.debug("Skipping blocker='%s', run ./fba.py fetch_cs or fetch_oliphant instead!", blocker)
+ logger.debug("Skipping blocker='%s', run ./fba.py fetch_cs, fetch_oliphant, fetch_csv instead!", blocker)
continue
logger.debug("Invoking federation.fetch_blocks(%s) ...", blocker)
logger.debug("blocker='%s',software='%s' - fetching blocklist ...", blocker, software)
if software == "pleroma":
blocking = pleroma.fetch_blocks(blocker)
- logger.info("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
elif software == "mastodon":
blocking = mastodon.fetch_blocks(blocker)
- logger.info("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
elif software == "lemmy":
blocking = lemmy.fetch_blocks(blocker)
- logger.info("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
elif software == "friendica":
blocking = friendica.fetch_blocks(blocker)
- logger.info("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
elif software == "misskey":
blocking = misskey.fetch_blocks(blocker)
- logger.info("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
else:
logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software)
instances.set_total_blocks(blocker, blocking)
blockdict = list()
+ deobfuscated = obfuscated = 0
logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
for block in blocking:
block["reason"] = tidyup.reason(block["reason"]) if block["reason"] is not None and block["reason"] != "" else None
logger.debug("blocked='%s',reason='%s' - AFTER!", block["blocked"], block["reason"])
- if block["blocked"] == "":
- logger.warning("blocked is empty, blocker='%s'", blocker)
+ if block["blocked"] in [None, ""]:
+ logger.warning("block[blocked]='%s' is empty, blocker='%s'", block["blocked"], blocker)
continue
elif block["blocked"].endswith(".onion"):
logger.debug("blocked='%s' is a TOR .onion domain - SKIPPED", block["blocked"])
continue
+ elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true":
+ logger.debug("blocked='%s' is an I2P .onion domain - SKIPPED", block["blocked"])
+ continue
elif block["blocked"].endswith(".arpa"):
logger.debug("blocked='%s' is a reverse IP address - SKIPPED", block["blocked"])
continue
continue
elif block["blocked"].find("*") >= 0:
logger.debug("blocker='%s' uses obfuscated domains", blocker)
+ instances.set_has_obfuscation(blocker, True)
+ obfuscated = obfuscated + 1
# Some friendica servers also obscure domains without hash
- row = instances.deobfuscate("*", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("*", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
logger.warning("Cannot deobfuscate blocked='%s',blocker='%s',software='%s' - SKIPPED!", block["blocked"], blocker, software)
- instances.set_has_obfuscation(blocker, True)
continue
+ deobfuscated = deobfuscated + 1
block["blocked"] = row["domain"]
origin = row["origin"]
nodeinfo_url = row["nodeinfo_url"]
elif block["blocked"].find("?") >= 0:
logger.debug("blocker='%s' uses obfuscated domains", blocker)
+ instances.set_has_obfuscation(blocker, True)
+ obfuscated = obfuscated + 1
# Some obscure them with question marks, not sure if that's dependent on version or not
- row = instances.deobfuscate("?", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("?", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
logger.warning("Cannot deobfuscate blocked='%s',blocker='%s',software='%s' - SKIPPED!", block["blocked"], blocker, software)
- instances.set_has_obfuscation(blocker, True)
continue
+ deobfuscated = deobfuscated + 1
block["blocked"] = row["domain"]
origin = row["origin"]
nodeinfo_url = row["nodeinfo_url"]
- logger.debug("Looking up instance by domainm, blocked='%s'", block["blocked"])
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
+ logger.debug("Looking up instance by domain, blocked='%s'", block["blocked"])
+ if block["blocked"] in [None, ""]:
+ logger.debug("block[blocked]='%s' is empty - SKIPPED!", block["blocked"])
continue
logger.debug("block[blocked]='%s' - BEFORE!", block["blocked"])
logger.debug("Invoking cookies.clear(%s) ...", block["blocked"])
cookies.clear(block["blocked"])
- logger.debug("Checking if blocker='%s' has pending updates ...", blocker)
- if instances.has_pending(blocker):
- logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update(blocker)
+ logger.info("blocker='%s' has %d obfuscated domain(s) and %d of them could be deobfuscated.", blocker, obfuscated, deobfuscated)
+ instances.set_obfuscated_blocks(blocker, obfuscated)
+
+ logger.debug("Flushing updates for blocker='%s' ...", blocker)
+ instances.update(blocker)
logger.debug("Invoking commit() ...")
database.connection.commit()
logger.info("Fetching %d different table data ...", len(types))
for software in types:
- logger.debug("software='%s' - BEFORE!", software)
+ logger.debug("software='%s'", software)
+
if args.software is not None and args.software != software:
logger.debug("args.software='%s' does not match software='%s' - SKIPPED!", args.software, software)
continue
for item in items:
logger.debug("item[]='%s'", type(item))
domain = item.decode_contents()
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ logger.debug("domain[%s]='%s'", type(domain), domain)
+ domain = tidyup.domain(domain) if domain not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
continue
logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
- software = software_helper.alias(software)
logger.info("Fetching instances for domain='%s'", domain)
federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
for item in rss.items:
logger.debug("item[%s]='%s'", type(item), item)
domain = item.link.split("=")[1]
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
continue
logger.debug("entry[]='%s'", type(entry))
doc = bs4.BeautifulSoup(entry.content.value, "html.parser")
logger.debug("doc[]='%s'", type(doc))
+
for element in doc.findAll("a"):
logger.debug("element[]='%s'", type(element))
for href in element["href"].split(","):
logger.debug("href[%s]='%s' - BEFORE!", type(href), href)
- domain = tidyup.domain(href) if href != None and href != "" else None
+ domain = tidyup.domain(href) if href not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
continue
origin = row["origin"]
software = row["software"]
- if software_helper.is_relay(software):
- logger.warning("args.domain='%s' is of software type '%s' which is not supported by this command. Please invoke fetch_relays instead.", args.domain, software)
+ if software is None:
+ logger.warning("args.domain='%s' has no software detected. You can try to run ./fba.py update_nodeinfo --domain=%s --force to get it updated.", args.domain, args.domain)
return 102
+ elif software_helper.is_relay(software):
+ logger.warning("args.domain='%s' is of software type '%s' which is not supported by this command. Please invoke fetch_relays instead.", args.domain, software)
+ return 103
# Initial fetch
try:
logger.warning("Exception '%s' during fetching instances (fetch_instances) from args.domain='%s'", type(exception), args.domain)
instances.set_last_error(args.domain, exception)
instances.update(args.domain)
- return 100
+ return 104
if args.single:
logger.debug("Not fetching more instances - EXIT!")
# Loop through some instances
database.cursor.execute(
- "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_instance")]
+ "SELECT domain, origin, software FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_response_time ASC, last_updated ASC", [time.time() - config.get("recheck_instance")]
)
rows = database.cursor.fetchall()
continue
try:
- logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", domain, row["origin"], row["software"], row["nodeinfo_url"])
- federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"])
+ logger.info("Fetching instances for domain='%s',origin='%s',software='%s' ...", domain, row["origin"], row["software"])
+ federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name)
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching instances (fetch_instances) from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)
logger.debug("Downloading %d files ...", len(blocklists.oliphant_blocklists))
for block in blocklists.oliphant_blocklists:
# Is domain given and not equal blocker?
+ logger.debug("block[blocker]='%s',block[csv_url]='%s'", block["blocker"], block["csv_url"])
if isinstance(args.domain, str) and args.domain != block["blocker"]:
logger.debug("Skipping blocker='%s', not matching args.domain='%s'", block["blocker"], args.domain)
continue
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
- # Static URLs
- urls = ({
- "blocker": "seirdy.one",
- "url" : "https://seirdy.one/pb/bsl.txt",
- },)
-
- logger.info("Checking %d text file(s) ...", len(urls))
- for row in urls:
+ logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
+ for row in blocklists.txt_files:
logger.debug("Fetching row[url]='%s' ...", row["url"])
response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.info("Processing %d domains ...", len(domains))
for domain in domains:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
continue
elif not domain_helper.is_wanted(domain):
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row.contents[0]) if row.contents[0] != None and row.contents[0] != "" else None
+ domain = tidyup.domain(row.contents[0]) if row.contents[0] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
continue
logger.debug("Success! - EXIT!")
return 0
-def fetch_joinfediverse(args: argparse.Namespace) -> int:
- logger.debug("args[]='%s' - CALLED!", type(args))
-
- logger.debug("Invoking locking.acquire() ...")
- locking.acquire()
-
- source_domain = "joinfediverse.wiki"
- if sources.is_recent(source_domain):
- logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 1
- else:
- logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
- sources.update(source_domain)
-
- logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
- f"https://{source_domain}/FediBlock",
- network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- ).text
- logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
-
- doc = bs4.BeautifulSoup(raw, "html.parser")
- logger.debug("doc[]='%s'", type(doc))
-
- tables = doc.findAll("table", {"class": "wikitable"})
-
- logger.info("Analyzing %d table(s) ...", len(tables))
- blocklist = list()
- for table in tables:
- logger.debug("table[]='%s'", type(table))
-
- rows = table.findAll("tr")
- logger.info("Checking %d row(s) ...", len(rows))
- block_headers = dict()
- for row in rows:
- logger.debug("row[%s]='%s'", type(row), row)
-
- headers = row.findAll("th")
- logger.debug("Found headers()=%d header(s)", len(headers))
- if len(headers) > 1:
- block_headers = dict()
- cnt = 0
- for header in headers:
- cnt = cnt + 1
- logger.debug("header[]='%s',cnt=%d", type(header), cnt)
- text = header.contents[0]
-
- logger.debug("text[]='%s'", type(text))
- if not isinstance(text, str):
- logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
- continue
- elif validators.domain(text.strip()):
- logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
- continue
-
- text = tidyup.domain(text.strip())
- logger.debug("text='%s' - AFTER!", text)
- if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
- logger.debug("Found header: '%s'=%d", text, cnt)
- block_headers[cnt] = text
-
- elif len(block_headers) == 0:
- logger.debug("row is not scrapable - SKIPPED!")
- continue
- elif len(block_headers) > 0:
- logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
- cnt = 0
- block = dict()
-
- for element in row.find_all(["th", "td"]):
- cnt = cnt + 1
- logger.debug("element[]='%s',cnt=%d", type(element), cnt)
- if cnt in block_headers:
- logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
-
- text = element.text.strip()
- key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
-
- logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
- if key in ["domain", "instance"]:
- block[key] = text
- elif key == "reason":
- block[key] = tidyup.reason(text)
- elif key == "subdomain(s)":
- block[key] = list()
- if text != "":
- block[key] = text.split("/")
- else:
- logger.debug("key='%s'", key)
- block[key] = text
-
- logger.debug("block()=%d ...", len(block))
- if len(block) > 0:
- logger.debug("Appending block()=%d ...", len(block))
- blocklist.append(block)
-
- logger.debug("blocklist()=%d", len(blocklist))
-
- database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
- domains = database.cursor.fetchall()
-
- logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
- blocking = list()
- for block in blocklist:
- logger.debug("block='%s'", block)
- if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
- origin = block["blocked"]
- logger.debug("origin='%s'", origin)
- for subdomain in block["subdomain(s)"]:
- block["blocked"] = subdomain + "." + origin
- logger.debug("block[blocked]='%s'", block["blocked"])
- blocking.append(block)
- else:
- blocking.append(block)
-
- logger.debug("blocking()=%d", blocking)
- for block in blocking:
- logger.debug("block[]='%s'", type(block))
- if "blocked" not in block:
- raise KeyError(f"block()={len(block)} does not have element 'blocked'")
-
- block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
- logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
-
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
- elif instances.is_recent(block["blocked"]):
- logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("Proccessing blocked='%s' ...", block["blocked"])
- processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
-
- blockdict = list()
- for blocker in domains:
- blocker = blocker[0]
- logger.debug("blocker[%s]='%s'", type(blocker), blocker)
- instances.set_last_blocked(blocker)
-
- for block in blocking:
- logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
- block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
-
- logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
- if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
- logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
- blockdict.append({
- "blocked": block["blocked"],
- "reason" : block["reason"],
- })
-
- if instances.has_pending(blocker):
- logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update(blocker)
-
- logger.debug("Invoking commit() ...")
- database.connection.commit()
-
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
- logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
- network.send_bot_post(blocker, blockdict)
-
- logger.debug("Success! - EXIT!")
- return 0
-
def recheck_obfuscation(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
locking.acquire()
if isinstance(args.domain, str) and args.domain != "" and domain_helper.is_wanted(args.domain):
- database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND domain = ?", [args.domain])
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND domain = ?", [args.domain])
elif isinstance(args.software, str) and args.software != "" and validators.domain(args.software) == args.software:
- database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND software = ?", [args.software])
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE (has_obfuscation = 1 OR has_obfuscation IS NULL) AND software = ?", [args.software])
else:
- database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1")
+ database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 OR has_obfuscation IS NULL")
rows = database.cursor.fetchall()
logger.info("Checking %d domains ...", len(rows))
if (args.force is None or not args.force) and args.domain is None and args.software is None and instances.is_recent(row["domain"], "last_blocked"):
logger.debug("row[domain]='%s' has been recently checked, args.force[]='%s' - SKIPPED!", row["domain"], type(args.force))
continue
+ elif blacklist.is_blacklisted(row["domain"]):
+ logger.warning("row[domain]='%s' is blacklisted - SKIPPED!", row["domain"])
+ continue
logger.debug("Invoking federation.fetch_blocks(%s) ...", row["domain"])
blocking = federation.fetch_blocks(row["domain"])
logger.debug("blocking()=%d", len(blocking))
if len(blocking) == 0:
+ logger.debug("Empty blocking list, trying individual fetch_blocks() for row[software]='%s' ...", row["software"])
if row["software"] == "pleroma":
logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
blocking = pleroma.fetch_blocks(row["domain"])
# c.s isn't part of oliphant's "hidden" blocklists
logger.debug("row[domain]='%s'", row["domain"])
- if row["domain"] != "chaos.social" and not software_helper.is_relay(software) and not blocklists.has(row["domain"]):
+ if row["domain"] != "chaos.social" and row["software"] is not None and not software_helper.is_relay(row["software"]) and not blocklists.has(row["domain"]):
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
instances.set_last_blocked(row["domain"])
instances.set_total_blocks(row["domain"], blocking)
if block["blocked"] == "":
logger.debug("block[blocked] is empty - SKIPPED!")
continue
+ elif block["blocked"].endswith(".onion"):
+ logger.debug("blocked='%s' is a TOR onion domain name - SKIPPED!", block["blocked"])
+ continue
+ elif block["blocked"].endswith(".i2p") and config.get("allow_i2p_domain") == "true":
+ logger.debug("blocked='%s' is an I2P onion domain name - SKIPPED!", block["blocked"])
+ continue
elif block["blocked"].endswith(".arpa"):
logger.debug("blocked='%s' is a reversed IP address - SKIPPED!", block["blocked"])
continue
elif block["blocked"].endswith(".tld"):
logger.debug("blocked='%s' is a fake domain name - SKIPPED!", block["blocked"])
continue
- elif block["blocked"].endswith(".onion"):
- logger.debug("blocked='%s' is a TOR onion domain name - SKIPPED!", block["blocked"])
- continue
elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
logger.debug("block='%s' is obfuscated.", block["blocked"])
obfuscated = obfuscated + 1
- blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+ blocked = utils.deobfuscate(block["blocked"], row["domain"], block["digest"] if "digest" in block else None)
elif not domain_helper.is_wanted(block["blocked"]):
logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
"reason" : block["reason"],
})
- logger.debug("Settings obfuscated=%d for row[domain]='%s' ...", obfuscated, row["domain"])
+ logger.debug("Setting obfuscated=%d for row[domain]='%s' ...", obfuscated, row["domain"])
+ instances.set_has_obfuscation(row["domain"], (obfuscated > 0))
instances.set_obfuscated_blocks(row["domain"], obfuscated)
logger.info("domain='%s' has %d obfuscated domain(s)", row["domain"], obfuscated)
continue
logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
- domain = tidyup.domain(row["hostname"]) if row["hostname"] != None and row["hostname"] != "" else None
+ domain = tidyup.domain(row["hostname"]) if row["hostname"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain(): row[hostname]='%s' - SKIPPED!", domain, row["hostname"])
continue
if args.domain is not None and args.domain != "":
logger.debug("Fetching args.domain='%s'", args.domain)
- database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ?", [args.domain])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ? LIMIT 1", [args.domain])
elif args.software is not None and args.software != "":
logger.info("Fetching domains for args.software='%s'", args.software)
- database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.software.lower(), time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? ORDER BY last_updated ASC", [args.software])
elif args.mode is not None and args.mode != "":
logger.info("Fetching domains for args.mode='%s'", args.mode.upper())
- database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.mode.upper(), time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? ORDER BY last_updated ASC", [args.mode])
elif args.no_software:
logger.info("Fetching domains with no software type detected ...")
- database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL ORDER BY last_updated ASC")
+ elif args.with_software:
+ logger.info("Fetching domains with any software type detected ...")
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NOT NULL ORDER BY last_updated ASC")
+ elif args.no_auto:
+ logger.info("Fetching domains with other detection mode than AUTO_DISOVERY being set ...")
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode IS NOT NULL AND detection_mode != 'AUTO_DISCOVERY' ORDER BY last_updated ASC")
+ elif args.no_detection:
+ logger.info("Fetching domains with no detection mode being set ...")
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode IS NULL ORDER BY last_updated ASC")
else:
logger.info("Fetching domains for recently updated ...")
- database.cursor.execute("SELECT domain, software FROM instances WHERE last_nodeinfo < ? OR last_nodeinfo IS NULL", [time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances ORDER BY last_updated ASC")
domains = database.cursor.fetchall()
if not args.force and instances.is_recent(row["domain"], "last_nodeinfo"):
logger.debug("row[domain]='%s' has been recently checked - SKIPPED!", row["domain"])
continue
+ elif blacklist.is_blacklisted(row["domain"]):
+ logger.debug("row[domain]='%s' is blacklisted - SKIPPED!", row["domain"])
+ continue
try:
logger.info("Checking nodeinfo for row[domain]='%s',row[software]='%s' (%s%%) ...", row["domain"], row["software"], "{:5.1f}".format(cnt / len(domains) * 100))
fetched = network.get_json_api(
source_domain,
"/api/1.0/instances/list?count=0&sort_by=name",
- headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ headers=headers,
+ timeout=(config.get("connection_timeout"), config.get("read_timeout"))
)
- logger.debug("fetched[]='%s'", type(fetched))
+ logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched))
if "error_message" in fetched:
logger.warning("Error during fetching API result: '%s' - EXIT!", fetched["error_message"])
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row["name"]) if row["name"] != None and row["name"] != "" else None
+ domain = tidyup.domain(row["name"]) if row["name"] not in [None, ""] else None
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if domain is None and domain == "":
+ logger.debug("domain='%s' is empty after tidyup.domain() - SKIPPED!", domain)
+ continue
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
+ elif domain in domains:
+ logger.debug("domain='%s' is already added - SKIPPED!", domain)
+ continue
+ elif instances.is_registered(domain):
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
+ continue
+ elif instances.is_recent(domain):
+ logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
+ continue
+
+ logger.info("Fetching instances from domain='%s'", domain)
+ federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
+
+ logger.debug("Success! - EXIT!")
+ return 0
+
+def fetch_relaylist(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "api.relaylist.com"
+
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+ return 1
+ else:
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
+
+ logger.info("Fetching list from source_domain='%s' ...", source_domain)
+ fetched = network.get_json_api(
+ source_domain,
+ "/relays",
+ {},
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+ logger.debug("fetched(%d)[]='%s'", len(fetched), type(fetched))
+
+ if "error_message" in fetched:
+ logger.warning("Error during fetching API result: '%s' - EXIT!", fetched["error_message"])
+ return 2
+ elif "exception" in fetched:
+ logger.warning("Exception '%s' during fetching API result - EXIT!", type(fetched["exception"]))
+ return 3
+ elif "json" not in fetched:
+ logger.warning("fetched has no element 'json' - EXIT!")
+ return 4
+
+ domains = list()
+
+ logger.info("Checking %d row(s) ...", len(fetched["json"]))
+ for row in fetched["json"]:
+ logger.debug("row[]='%s'", type(row))
+ domain = urlparse(row["url"]).netloc.lower().split(":")[0]
logger.debug("domain='%s' - AFTER!", domain)
if domain is None and domain == "":
logger.info("Checking %d relays ...", len(rows))
for row in rows:
logger.debug("row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
- peers = list()
if not args.force and instances.is_recent(row["domain"]):
logger.debug("row[domain]='%s' has been recently fetched - SKIPPED!", row["domain"])
continue
+ peers = list()
try:
if row["software"] == "pub-relay":
logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
instances.set_last_instance_fetch(row["domain"])
instances.update(row["domain"])
continue
- elif not "json" in raw:
+ elif "json" not in raw:
logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
continue
elif not "metadata" in raw["json"]:
continue
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"])
continue
elif domain not in peers:
logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
peers.append(domain)
+ logger.debug("domains()=%d,domain='%s'", len(domains), domain)
if dict_helper.has_key(domains, "domain", domain):
logger.debug("domain='%s' already added", domain)
continue
link = tag.find("a")
logger.debug("link[%s]='%s'", type(link), link)
- if link is None:
- logger.warning("tag='%s' has no a-tag - SKIPPED!", tag)
- continue
- elif "href" not in link:
- logger.warning("link()=%d has no key 'href' - SKIPPED!", len(link))
+ if not isinstance(link, bs4.element.Tag):
+ logger.warning("tag[%s]='%s' is not type of 'bs4.element.Tag' - SKIPPED!", type(tag), tag)
continue
- components = urlparse(link["href"])
+ components = urlparse(link.get("href"))
+ logger.debug("components(%d)='%s'", len(components), components)
domain = components.netloc.lower().split(":")[0]
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"])
continue
elif domain not in peers:
logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
peers.append(domain)
+ logger.debug("domains()=%d,domain='%s'", len(domains), domain)
if dict_helper.has_key(domains, "domain", domain):
logger.debug("domain='%s' already added", domain)
continue
logger.debug("Checking %d peer(s) row[domain]='%s' ...", len(raw["json"]["metadata"]["peers"]), row["domain"])
for domain in raw["json"]["metadata"]["peers"]:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
- if domain is None or domain == "":
+ if domain in [None, ""]:
logger.debug("domain='%s' is empty after tidyup.domain() from origin='%s' - SKIPPED!", domain, row["domain"])
continue
elif domain not in peers:
logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
peers.append(domain)
+ logger.debug("domains()=%d,domain='%s'", len(domains), domain)
if dict_helper.has_key(domains, "domain", domain):
logger.debug("domain='%s' already added", domain)
continue
- logger.debug("Appending domain='%s',origin='%s',software='%s'", domain, row["domain"], row["software"])
+ logger.debug("Appending domain='%s',origin='%s',software='%s' ...", domain, row["domain"], row["software"])
domains.append({
"domain": domain,
"origin": row["domain"],