if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
source_domain = "gql.api.bka.li"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
# c.s isn't part of oliphant's "hidden" blocklists
if blocker == "chaos.social" or software_helper.is_relay(software) or blocklists.has(blocker):
- logger.debug("Skipping blocker='%s', run ./fba.py fetch_cs or fetch_oliphant instead!", blocker)
+ logger.debug("Skipping blocker='%s', run ./fba.py fetch_cs, fetch_oliphant, fetch_csv instead!", blocker)
continue
logger.debug("Invoking federation.fetch_blocks(%s) ...", blocker)
instances.set_total_blocks(blocker, blocking)
blockdict = list()
+ deobfuscated = obfuscated = 0
logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
for block in blocking:
continue
elif block["blocked"].find("*") >= 0:
logger.debug("blocker='%s' uses obfuscated domains", blocker)
+ instances.set_has_obfuscation(blocker, True)
+ obfuscated = obfuscated + 1
# Some friendica servers also obscure domains without hash
- row = instances.deobfuscate("*", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("*", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
logger.warning("Cannot deobfuscate blocked='%s',blocker='%s',software='%s' - SKIPPED!", block["blocked"], blocker, software)
- instances.set_has_obfuscation(blocker, True)
continue
+ deobfuscated = deobfuscated + 1
block["blocked"] = row["domain"]
origin = row["origin"]
nodeinfo_url = row["nodeinfo_url"]
elif block["blocked"].find("?") >= 0:
logger.debug("blocker='%s' uses obfuscated domains", blocker)
+ instances.set_has_obfuscation(blocker, True)
+ obfuscated = obfuscated + 1
# Some obscure them with question marks, not sure if that's dependent on version or not
- row = instances.deobfuscate("?", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("?", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
logger.warning("Cannot deobfuscate blocked='%s',blocker='%s',software='%s' - SKIPPED!", block["blocked"], blocker, software)
- instances.set_has_obfuscation(blocker, True)
continue
+ deobfuscated = deobfuscated + 1
block["blocked"] = row["domain"]
origin = row["origin"]
nodeinfo_url = row["nodeinfo_url"]
logger.debug("Invoking cookies.clear(%s) ...", block["blocked"])
cookies.clear(block["blocked"])
+ logger.info("blocker='%s' has %d obfuscated domain(s) and %d of them could be deobfuscated.", blocker, obfuscated, deobfuscated)
+
logger.debug("Checking if blocker='%s' has pending updates ...", blocker)
if instances.has_pending(blocker):
logger.debug("Flushing updates for blocker='%s' ...", blocker)
source_domain = "fediverse.observer"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
for item in items:
logger.debug("item[]='%s'", type(item))
domain = item.decode_contents()
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
source_domain = "wiki.todon.eu"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
source_domain = "raw.githubusercontent.com"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
for item in rss.items:
logger.debug("item[%s]='%s'", type(item), item)
domain = item.link.split("=")[1]
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
logger.debug("element[]='%s'", type(element))
for href in element["href"].split(","):
logger.debug("href[%s]='%s' - BEFORE!", type(href), href)
- domain = tidyup.domain(href) if href != None and href != "" else None
+ domain = tidyup.domain(href) if href not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.debug("Success - EXIT!")
return 0
+def fetch_csv(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ logger.info("Checking %d CSV files ...", len(blocklists.csv_files))
+ for block in blocklists.csv_files:
+ logger.debug("block[blocker]='%s',block[csv_url]='%s'", block["blocker"], block["csv_url"])
+
+ # Is domain given and not equal blocker?
+ if isinstance(args.domain, str) and args.domain != block["blocker"]:
+ logger.debug("Skipping blocker='%s', not matching args.domain='%s'", block["blocker"], args.domain)
+ continue
+
+ logger.debug("Invoking processing.csv_block(%s, %s, fetch_csv) ...", block["blocker"], block["csv_url"])
+ processing.csv_block(block["blocker"], block["csv_url"], inspect.currentframe().f_code.co_name)
+
+ logger.debug("Success - EXIT!")
+ return 0
+
def fetch_oliphant(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
source_domain = "codeberg.org"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
# Base URL
base_url = f"https://{source_domain}/oliphant/blocklists/raw/branch/main/blocklists"
- domains = list()
-
logger.debug("Downloading %d files ...", len(blocklists.oliphant_blocklists))
for block in blocklists.oliphant_blocklists:
# Is domain given and not equal blocker?
if isinstance(args.domain, str) and args.domain != block["blocker"]:
logger.debug("Skipping blocker='%s', not matching args.domain='%s'", block["blocker"], args.domain)
continue
- elif args.domain in domains:
- logger.debug("args.domain='%s' already handled - SKIPPED!", args.domain)
- continue
-
- instances.set_last_blocked(block["blocker"])
-
- # Fetch this URL
- logger.info("Fetching csv_url='%s' for blocker='%s' ...", block["csv_url"], block["blocker"])
- response = utils.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
-
- logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
- if not response.ok or response.status_code > 200 or response.content == "":
- logger.warning("Could not fetch csv_url='%s' for blocker='%s' - SKIPPED!", block["csv_url"], block["blocker"])
- continue
- logger.debug("Fetched %d Bytes, parsing CSV ...", len(response.content))
- reader = csv.DictReader(response.content.decode("utf-8").splitlines(), dialect="unix")
-
- blockdict = list()
+ url = f"{base_url}/{block['csv_url']}"
- cnt = 0
- for row in reader:
- logger.debug("row[%s]='%s'", type(row), row)
- domain = severity = None
- reject_media = reject_reports = False
-
- if "#domain" in row:
- domain = row["#domain"]
- elif "domain" in row:
- domain = row["domain"]
- else:
- logger.debug("row='%s' does not contain domain column", row)
- continue
-
- if "#severity" in row:
- severity = blocks.alias_block_level(row["#severity"])
- elif "severity" in row:
- severity = blocks.alias_block_level(row["severity"])
- else:
- logger.debug("row='%s' does not contain severity column", row)
- continue
-
- if "#reject_media" in row and row["#reject_media"].lower() == "true":
- reject_media = True
- elif "reject_media" in row and row["reject_media"].lower() == "true":
- reject_media = True
-
- if "#reject_reports" in row and row["#reject_reports"].lower() == "true":
- reject_reports = True
- elif "reject_reports" in row and row["reject_reports"].lower() == "true":
- reject_reports = True
-
- cnt = cnt + 1
- logger.debug("domain='%s',severity='%s',reject_media='%s',reject_reports='%s'", domain, severity, reject_media, reject_reports)
- if domain is None or domain == "":
- logger.debug("domain='%s' is empty - SKIPPED!", domain)
- continue
- elif domain.endswith(".onion"):
- logger.debug("domain='%s' is a TOR .onion domain - SKIPPED", domain)
- continue
- elif domain.endswith(".arpa"):
- logger.debug("domain='%s' is a reverse IP address - SKIPPED", domain)
- continue
- elif domain.endswith(".tld"):
- logger.debug("domain='%s' is a fake domain - SKIPPED", domain)
- continue
- elif domain.find("*") >= 0 or domain.find("?") >= 0:
- logger.debug("domain='%s' is obfuscated - Invoking utils.deobfuscate(%s, %s) ...", domain, domain, block["blocker"])
- domain = utils.deobfuscate(domain, block["blocker"])
- logger.debug("domain='%s' - AFTER!", domain)
-
- if not validators.domain(domain):
- logger.debug("domain='%s' is not a valid domain - SKIPPED!")
- continue
- elif blacklist.is_blacklisted(domain):
- logger.warning("domain='%s' is blacklisted - SKIPPED!", domain)
- continue
- elif blocks.is_instance_blocked(block["blocker"], domain, severity):
- logger.debug("block[blocker]='%s' has already blocked domain='%s' with severity='%s' - SKIPPED!", block["blocker"], domain, severity)
- continue
-
- logger.debug("Marking domain='%s' as handled", domain)
- domains.append(domain)
-
- logger.debug("Processing domain='%s' ...", domain)
- processed = processing.instance(domain, block["blocker"], inspect.currentframe().f_code.co_name)
- logger.debug("processed='%s'", processed)
-
- if processing.block(block["blocker"], domain, None, severity) and config.get("bot_enabled"):
- logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", domain, block["block_level"], block["blocker"])
- blockdict.append({
- "blocked": domain,
- "reason" : block["reason"],
- })
-
- if reject_media:
- processing.block(block["blocker"], domain, None, "reject_media")
- if reject_reports:
- processing.block(block["blocker"], domain, None, "reject_reports")
-
- logger.debug("block[blocker]='%s'", block["blocker"])
- if not blocklists.has(block["blocker"]):
- logger.debug("Invoking instances.set_total_blocks(%s, domains()=%d) ...", block["blocker"], len(domains))
- instances.set_total_blocks(block["blocker"], domains)
-
- logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"])
- if instances.has_pending(block["blocker"]):
- logger.debug("Flushing updates for block[blocker]='%s' ...", block["blocker"])
- instances.update(block["blocker"])
-
- logger.debug("Invoking commit() ...")
- database.connection.commit()
-
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
- logger.info("Sending bot POST for blocker='%s',blockdict()=%d ...", block["blocker"], len(blockdict))
- network.send_bot_post(block["blocker"], blockdict)
+ logger.debug("Invoking processing.csv_block(%s, %s, fetch_oliphant) ...", block["blocker"], url)
+ processing.csv_block(block["blocker"], url, inspect.currentframe().f_code.co_name)
logger.debug("Success! - EXIT!")
return 0
logger.info("Processing %d domains ...", len(domains))
for domain in domains:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
source_domain = "fedipact.online"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row.contents[0]) if row.contents[0] != None and row.contents[0] != "" else None
+ domain = tidyup.domain(row.contents[0]) if row.contents[0] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
source_domain = "instances.joinmobilizon.org"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
source_domain = "instanceapp.misskey.page"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
logger.debug("Success! - EXIT!")
return 0
-def fetch_joinfediverse(args: argparse.Namespace) -> int:
- logger.debug("args[]='%s' - CALLED!", type(args))
-
- logger.debug("Invoking locking.acquire() ...")
- locking.acquire()
-
- source_domain = "joinfediverse.wiki"
- if sources.is_recent(source_domain):
- logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
- else:
- logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
- sources.update(source_domain)
-
- logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
- f"https://{source_domain}/FediBlock",
- network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- ).text
- logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
-
- doc = bs4.BeautifulSoup(raw, "html.parser")
- logger.debug("doc[]='%s'", type(doc))
-
- tables = doc.findAll("table", {"class": "wikitable"})
-
- logger.info("Analyzing %d table(s) ...", len(tables))
- blocklist = list()
- for table in tables:
- logger.debug("table[]='%s'", type(table))
-
- rows = table.findAll("tr")
- logger.info("Checking %d row(s) ...", len(rows))
- block_headers = dict()
- for row in rows:
- logger.debug("row[%s]='%s'", type(row), row)
-
- headers = row.findAll("th")
- logger.debug("Found headers()=%d header(s)", len(headers))
- if len(headers) > 1:
- block_headers = dict()
- cnt = 0
- for header in headers:
- cnt = cnt + 1
- logger.debug("header[]='%s',cnt=%d", type(header), cnt)
- text = header.contents[0]
-
- logger.debug("text[]='%s'", type(text))
- if not isinstance(text, str):
- logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
- continue
- elif validators.domain(text.strip()):
- logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
- continue
-
- text = tidyup.domain(text.strip())
- logger.debug("text='%s' - AFTER!", text)
- if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
- logger.debug("Found header: '%s'=%d", text, cnt)
- block_headers[cnt] = text
-
- elif len(block_headers) == 0:
- logger.debug("row is not scrapable - SKIPPED!")
- continue
- elif len(block_headers) > 0:
- logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
- cnt = 0
- block = dict()
-
- for element in row.find_all(["th", "td"]):
- cnt = cnt + 1
- logger.debug("element[]='%s',cnt=%d", type(element), cnt)
- if cnt in block_headers:
- logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
-
- text = element.text.strip()
- key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
-
- logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
- if key in ["domain", "instance"]:
- block[key] = text
- elif key == "reason":
- block[key] = tidyup.reason(text)
- elif key == "subdomain(s)":
- block[key] = list()
- if text != "":
- block[key] = text.split("/")
- else:
- logger.debug("key='%s'", key)
- block[key] = text
-
- logger.debug("block()=%d ...", len(block))
- if len(block) > 0:
- logger.debug("Appending block()=%d ...", len(block))
- blocklist.append(block)
-
- logger.debug("blocklist()=%d", len(blocklist))
-
- database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
- domains = database.cursor.fetchall()
-
- logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
- blocking = list()
- for block in blocklist:
- logger.debug("block='%s'", block)
- if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
- origin = block["blocked"]
- logger.debug("origin='%s'", origin)
- for subdomain in block["subdomain(s)"]:
- block["blocked"] = subdomain + "." + origin
- logger.debug("block[blocked]='%s'", block["blocked"])
- blocking.append(block)
- else:
- blocking.append(block)
-
- logger.debug("blocking()=%d", blocking)
- for block in blocking:
- logger.debug("block[]='%s'", type(block))
- if "blocked" not in block:
- raise KeyError(f"block()={len(block)} does not have element 'blocked'")
-
- block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
- logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
-
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
- elif instances.is_recent(block["blocked"]):
- logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("Proccessing blocked='%s' ...", block["blocked"])
- processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
-
- blockdict = list()
- for blocker in domains:
- blocker = blocker[0]
- logger.debug("blocker[%s]='%s'", type(blocker), blocker)
- instances.set_last_blocked(blocker)
-
- for block in blocking:
- logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
- block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
-
- logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
- if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
- logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
- blockdict.append({
- "blocked": block["blocked"],
- "reason" : block["reason"],
- })
-
- if instances.has_pending(blocker):
- logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update(blocker)
-
- logger.debug("Invoking commit() ...")
- database.connection.commit()
-
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
- logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
- network.send_bot_post(blocker, blockdict)
-
- logger.debug("Success! - EXIT!")
- return 0
-
def recheck_obfuscation(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
# c.s isn't part of oliphant's "hidden" blocklists
logger.debug("row[domain]='%s'", row["domain"])
- if row["domain"] != "chaos.social" and not software_helper.is_relay(software) and not blocklists.has(row["domain"]):
+ if row["domain"] != "chaos.social" and not software_helper.is_relay(row["software"]) and not blocklists.has(row["domain"]):
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
instances.set_last_blocked(row["domain"])
instances.set_total_blocks(row["domain"], blocking)
elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
logger.debug("block='%s' is obfuscated.", block["blocked"])
obfuscated = obfuscated + 1
- blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+ blocked = utils.deobfuscate(block["blocked"], row["domain"], block["digest"] if "digest" in block else None)
elif not domain_helper.is_wanted(block["blocked"]):
logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
instances.set_obfuscated_blocks(row["domain"], obfuscated)
logger.info("domain='%s' has %d obfuscated domain(s)", row["domain"], obfuscated)
- if obfuscated == 0 and len(blocking) > 0:
- logger.info("Block list from domain='%s' has been fully deobfuscated.", row["domain"])
- instances.set_has_obfuscation(row["domain"], False)
-
if instances.has_pending(row["domain"]):
logger.debug("Flushing updates for blocker='%s' ...", row["domain"])
instances.update(row["domain"])
source_domain = "demo.fedilist.com"
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 1
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
continue
logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
- domain = tidyup.domain(row["hostname"]) if row["hostname"] != None and row["hostname"] != "" else None
+ domain = tidyup.domain(row["hostname"]) if row["hostname"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
if args.domain is not None and args.domain != "":
logger.debug("Fetching args.domain='%s'", args.domain)
- database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ?", [args.domain])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ? LIMIT 1", [args.domain])
elif args.software is not None and args.software != "":
logger.info("Fetching domains for args.software='%s'", args.software)
- database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.software.lower(), time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? ORDER BY last_updated ASC")
elif args.mode is not None and args.mode != "":
logger.info("Fetching domains for args.mode='%s'", args.mode.upper())
- database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.mode.upper(), time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? ORDER BY last_updated ASC")
elif args.no_software:
logger.info("Fetching domains with no software type detected ...")
- database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL ORDER BY last_updated ASC")
+ elif args.no_auto:
+ logger.info("Fetching domains with other detection mode than AUTO_DISOVERY being set ...")
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode IS NOT NULL AND detection_mode != 'AUTO_DISCOVERY' ORDER BY last_updated ASC")
else:
logger.info("Fetching domains for recently updated ...")
- database.cursor.execute("SELECT domain, software FROM instances WHERE last_nodeinfo < ? OR last_nodeinfo IS NULL", [time.time() - config.get("recheck_nodeinfo")])
+ database.cursor.execute("SELECT domain, software FROM instances ORDER BY last_updated ASC")
domains = database.cursor.fetchall()
return 1
elif sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 0
+ return 2
else:
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row["name"]) if row["name"] != None and row["name"] != "" else None
+ domain = tidyup.domain(row["name"]) if row["name"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None and domain == "":
instances.set_last_instance_fetch(row["domain"])
instances.update(row["domain"])
continue
- elif not "json" in raw:
+ elif "json" not in raw:
logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
continue
elif not "metadata" in raw["json"]:
continue
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
link = tag.find("a")
logger.debug("link[%s]='%s'", type(link), link)
- if link is None:
- logger.warning("tag='%s' has no a-tag - SKIPPED!", tag)
- continue
- elif "href" not in link:
- logger.warning("link()=%d has no key 'href' - SKIPPED!", len(link))
+ if not isinstance(link, bs4.element.Tag):
+ logger.warning("tag[%s]='%s' is not type of 'bs4.element.Tag' - SKIPPED!", type(tag), tag)
continue
- components = urlparse(link["href"])
+ components = urlparse(link.get("href"))
+ logger.debug("components(%d)='%s'", len(components), components)
domain = components.netloc.lower().split(":")[0]
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.debug("Checking %d peer(s) row[domain]='%s' ...", len(raw["json"]["metadata"]["peers"]), row["domain"])
for domain in raw["json"]["metadata"]["peers"]:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":