from fba.networks import misskey
from fba.networks import pleroma
+# Locally "cached" values to speedup code and keep massive debug log shorter
+_timeout = (config.get("connection_timeout"), config.get("read_timeout"))
+_bot_enabled = config.get("bot_enabled")
+
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
#logger.setLevel(logging.DEBUG)
def fetch_blocks(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- if args.domain is not None and args.domain != "":
+ if args.domain not in [None, ""]:
logger.debug("args.domain='%s' - checking ...", args.domain)
if not validators.domain(args.domain, rfc_2782=True):
logger.warning("args.domain='%s' is not valid.", args.domain)
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
- if args.domain is not None and args.domain != "":
+ if args.domain not in [None, ""]:
# Re-check single domain
logger.debug("Querying database for args.domain='%s' ...", args.domain)
database.cursor.execute("SELECT domain, software, origin, nodeinfo_url FROM instances WHERE domain = ? LIMIT 1", [args.domain])
- elif args.software is not None and args.software != "":
+ elif args.software not in [None, ""]:
# Re-check single software
logger.debug("Querying database for args.software='%s' ...", args.software)
database.cursor.execute("SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? ORDER BY last_blocked ASC, total_blocks DESC", [args.software])
block["block_level"] = blocks.alias_block_level(block["block_level"])
logger.debug("block[block_level]='%s' - AFTER!", block["block_level"])
- if processing.block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] in ["rejected", "suspended"] and config.get("bot_enabled"):
+ if processing.block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] in ["rejected", "suspended"] and _bot_enabled:
logger.debug("Appending block[blocked]'%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
blockdict.append({
"blocked": block["blocked"],
logger.debug("Invoking cookies.clear(%s) ...", blocker)
cookies.clear(blocker)
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d'", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
+ logger.debug("_bot_enabled='%s',blockdict()=%d'", _bot_enabled, len(blockdict))
+ if _bot_enabled and len(blockdict) > 0:
logger.info("Sending bot POST for blocker='%s',blockdict()=%d ...", blocker, len(blockdict))
network.send_bot_post(blocker, blockdict)
raw = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))
raw = network.fetch_url(
f"https://{source_domain}/todon/domainblocks",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
blockdict = list()
for block_level in blocklist:
+ logger.debug("block_level='%s'", block_level)
blockers = blocklist[block_level]
- logger.debug("block_level='%s',blockers()=%d'", block_level, len(blockers))
+ logger.debug("Checking %d blocker entries for block_level='%s' ...", len(blockers), block_level)
for blocked in blockers:
logger.debug("blocked='%s'", blocked)
if not domain_helper.is_wanted(blocked):
logger.warning("blocked='%s' is not wanted - SKIPPED!", blocked)
continue
- elif not domain_helper.is_wanted(blocker):
- logger.warning("blocker='%s' is not wanted - SKIPPED!", blocker)
- continue
elif blocks.is_instance_blocked(blocker, blocked, block_level):
logger.debug("blocked='%s',block_level='%s' is already blocked - SKIPPED!", blocked, block_level)
continue
logger.info("Adding new block: blocked='%s',block_level='%s'", blocked, block_level)
- if processing.block(blocker, blocked, None, block_level) and block_level in ["suspended", "rejected"] and config.get("bot_enabled"):
+ if processing.block(blocker, blocked, None, block_level) and block_level in ["suspended", "rejected"] and _bot_enabled:
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", blocked, block_level, blocker)
blockdict.append({
"blocked": blocked,
logger.debug("Invoking commit() ...")
database.connection.commit()
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
+ logger.debug("_bot_enabled='%s',blockdict()=%d", _bot_enabled, len(blockdict))
+ if _bot_enabled and len(blockdict) > 0:
logger.info("Sending bot POST for blocker='%s',blockdict()=%d ...", blocker, len(blockdict))
network.send_bot_post(blocker, blockdict)
raw = network.fetch_url(
f"https://{source_domain}/federation",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
logger.warning("Exception '%s' during fetching instances (fetch_cs) from row[domain]='%s'", type(exception), row["domain"])
instances.set_last_error(row["domain"], exception)
- if processing.block(blocker, row["domain"], row["reason"], block_level) and block_level in ["suspended", "rejected"] and config.get("bot_enabled"):
+ if processing.block(blocker, row["domain"], row["reason"], block_level) and block_level in ["suspended", "rejected"] and _bot_enabled:
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", row["domain"], block_level, blocker)
blockdict.append({
"blocked": row["domain"],
logger.debug("Invoking commit() ...")
database.connection.commit()
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
+ logger.debug("_bot_enabled='%s',blockdict()=%d", _bot_enabled, len(blockdict))
+ if _bot_enabled and len(blockdict) > 0:
logger.info("Sending bot POST for blocker='%s',blockdict()=%d ...", blocker, len(blockdict))
network.send_bot_post(blocker, blockdict)
sources.update(domain)
logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
- response = network.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(args.feed, network.web_headers, _timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
domains = list()
logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
- response = network.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(feed, network.web_headers, _timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
for row in blocklists.txt_files:
logger.debug("Fetching row[url]='%s' ...", row["url"])
- response = network.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(row["url"], network.web_headers, _timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and response.text != "":
response = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
raw = network.fetch_url(
f"https://{source_domain}/api/v1/instances",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
raw = network.fetch_url(
f"https://{source_domain}/instances.json",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
block["block_level"] = blocks.alias_block_level(block["block_level"])
logger.debug("block[block_level]='%s' - AFTER!", block["block_level"])
- if processing.block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] in ["suspended", "rejected"] and config.get("bot_enabled"):
+ if processing.block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] in ["suspended", "rejected"] and _bot_enabled:
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", blocked, block["block_level"], row["domain"])
blockdict.append({
"blocked": blocked,
logger.debug("Invoking commit() ...")
database.connection.commit()
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
+ logger.debug("_bot_enabled='%s',blockdict()=%d", _bot_enabled, len(blockdict))
+ if _bot_enabled and len(blockdict) > 0:
logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", row["domain"], len(blockdict))
network.send_bot_post(row["domain"], blockdict)
sources.update(source_domain)
url = f"http://{source_domain}/instance/csv?onion=not"
- if args.software is not None and args.software != "":
+ if args.software not in [None, ""]:
logger.debug("args.software='%s'", args.software)
url = f"http://{source_domain}/instance/csv?software={args.software}&onion=not"
response = reqto.get(
url,
headers=network.web_headers,
- timeout=(config.get("connection_timeout"), config.get("read_timeout")),
+ timeout=_timeout,
allow_redirects=False
)
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
- if args.domain is not None and args.domain != "":
+ if args.domain not in [None, ""]:
logger.debug("Fetching args.domain='%s'", args.domain)
database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ? LIMIT 1", [args.domain])
- elif args.software is not None and args.software != "":
+ elif args.software not in [None, ""]:
logger.info("Fetching domains for args.software='%s'", args.software)
database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? ORDER BY last_updated ASC", [args.software])
- elif args.mode is not None and args.mode != "":
+ elif args.mode not in [None, ""]:
logger.info("Fetching domains for args.mode='%s'", args.mode.upper())
database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? ORDER BY last_updated ASC", [args.mode])
elif args.no_software:
logger.info("Fetching list from source_domain='%s' ...", source_domain)
rows = network.fetch_json_rows(
source_domain,
- "/api/1.0/instances/list?count=0&sort_by=name",
+ "/api/1.0/instances/list?count=0&sort_by=name",
{
"Authorization": f"Bearer {config.get('instances_social_api_key')}",
},
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
- logger.debug("row[]='%s'", type(row))
+ logger.debug("row[]='%s' - BEFORE!", type(row))
domain = tidyup.domain(row["name"]) if row["name"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
- logger.debug("row[]='%s'", type(row))
+ logger.debug("row[%s]='%s' - BEFORE!", type(row), row)
domain = urlparse(row["url"]).netloc.lower().split(":")[0]
logger.debug("domain='%s' - AFTER!", domain)
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
- if args.domain is not None and args.domain != "":
+ if args.domain not in [None, ""]:
logger.debug("Fetching instances record for args.domain='%s' ...", args.domain)
database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND domain = ? LIMIT 1", [args.domain])
- elif args.software is not None and args.software != "":
+ elif args.software not in [None, ""]:
logger.debug("Fetching instances records for args.software='%s' ...", args.software)
database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay', 'pub-relay') AND nodeinfo_url IS NOT NULL AND software = ? ORDER BY last_updated DESC", [args.software])
else:
logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
raw = network.fetch_api_url(
row["nodeinfo_url"],
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
)
logger.debug("raw[%s]()=%d", type(raw), len(raw))
raw = network.fetch_url(
f"https://{row['domain']}",
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=_timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))