import logging
import time
+from urllib.parse import urlparse
+
import argparse
import atoma
import bs4
from fba.helpers import config
from fba.helpers import cookies
from fba.helpers import locking
+from fba.helpers import processing
from fba.helpers import software as software_helper
from fba.helpers import tidyup
from fba.http import federation
from fba.http import network
-from fba.models import apis
from fba.models import blocks
from fba.models import instances
+from fba.models import sources
from fba.networks import friendica
from fba.networks import lemmy
def fetch_pixelfed_api(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- # No CSRF by default, you don't have to add network.api_headers by yourself here
+ # No CSRF by default, you don't have to add network.source_headers by yourself here
headers = tuple()
- api_domain = "pixelfed.org"
+ source_domain = "pixelfed.org"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
try:
- logger.debug("Checking CSRF from api_domain='%s' ...", api_domain)
- headers = csrf.determine(api_domain, dict())
+ logger.debug("Checking CSRF from source_domain='%s' ...", source_domain)
+ headers = csrf.determine(source_domain, dict())
except network.exceptions as exception:
logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
return list()
try:
logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
fetched = network.get_json_api(
- api_domain,
+ source_domain,
"/api/v1/servers/all.json?scope=All&country=all&language=all",
headers,
(config.get("connection_timeout"), config.get("read_timeout"))
elif row["domain"] == "":
logger.debug("row[domain] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(row["domain"]):
- logger.warning("row[domain]='%s' is not wanted - SKIPPED!", row["domain"])
+
+ logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
+ domain = row["domain"].encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
+ logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
- elif instances.is_registered(row["domain"]):
- logger.debug("row[domain]='%s' is already registered - SKIPPED!", row["domain"])
+ elif instances.is_registered(domain):
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
- elif instances.is_recent(row["domain"]):
- logger.debug("row[domain]='%s' has been recently crawled - SKIPPED!", row["domain"])
+ elif instances.is_recent(domain):
+ logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
continue
- logger.debug("Fetching instances from row[domain]='%s' ...", row["domain"])
- federation.fetch_instances(row["domain"], None, None, inspect.currentframe().f_code.co_name)
+ logger.debug("Fetching instances from domain='%s' ...", domain)
+ federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
except network.exceptions as exception:
logger.warning("Cannot fetch graphql,exception[%s]:'%s' - EXIT!", type(exception), str(exception))
def fetch_bkali(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "gql.apis.bka.li"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "gql.api.bka.li"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
domains = list()
try:
- logger.info("Fetching domainlist from api_domain='%s' ...", api_domain)
+ logger.info("Fetching domainlist from source_domain='%s' ...", source_domain)
fetched = network.post_json_api(
- api_domain,
+ source_domain,
"/v1/graphql",
json.dumps({
"query": "query domainlist {nodeinfo(order_by: {domain: asc}) {domain}}"
logger.debug("fetched[]='%s'", type(fetched))
if "error_message" in fetched:
- logger.warning("post_json_api() for 'gql.apis.bka.li' returned error message='%s", fetched["error_message"])
+ logger.warning("post_json_api() for 'gql.sources.bka.li' returned error message='%s", fetched["error_message"])
return 100
elif isinstance(fetched["json"], dict) and "error" in fetched["json"] and "message" in fetched["json"]["error"]:
logger.warning("post_json_api() returned error: '%s", fetched["error"]["message"])
logger.debug("domains()=%d", len(domains))
if len(domains) > 0:
- locking.acquire()
-
logger.info("Adding %d new instances ...", len(domains))
for domain in domains:
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
try:
logger.info("Fetching instances from domain='%s' ...", domain)
federation.fetch_instances(domain, 'tak.teleyal.blog', None, inspect.currentframe().f_code.co_name)
logger.warning("args.domain='%s' is not registered, please run ./utils.py fetch_instances '%s' first.", args.domain, args.domain)
return 102
+ logger.debug("Invoking locking.acquire() ...")
locking.acquire()
if args.domain is not None and args.domain != "":
instances.set_has_obfuscation(blocker, False)
blocking = list()
- blockdict = list()
if software == "pleroma":
logger.info("blocker='%s',software='%s'", blocker, software)
blocking = pleroma.fetch_blocks(blocker, nodeinfo_url)
else:
logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software)
- logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
- instances.set_total_blocks(blocker, blocking)
+ logger.debug("blocker='%s'", blocker)
+ if blocker != "chaos.social":
+ logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
+ instances.set_total_blocks(blocker, blocking)
logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
blockdict = list()
if block["blocked"] == "":
logger.debug("block[blocked] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(block["blocked"]):
+
+ logger.debug("block[blocked]='%s' - BEFORE!", block["blocked"])
+ block["blocked"] = block["blocked"].lstrip(".").encode("idna").decode("utf-8")
+ logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
+ if not utils.is_domain_wanted(block["blocked"]):
logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
elif block["block_level"] in ["accept", "accepted"]:
logger.debug("Hash wasn't found, adding: blocked='%s',blocker='%s'", block["blocked"], blocker)
federation.fetch_instances(block["blocked"], blocker, None, inspect.currentframe().f_code.co_name)
- block["block_level"] = utils.alias_block_level(block["block_level"])
+ block["block_level"] = blocks.alias_block_level(block["block_level"])
- if utils.process_block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
+ if processing.block(blocker, block["blocked"], block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
blockdict.append({
"blocked": block["blocked"],
def fetch_observer(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "fediverse.observer"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "fediverse.observer"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
-
- # Acquire lock
- locking.acquire()
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
types = list()
if args.software is None:
logger.info("Fetching software list ...")
raw = utils.fetch_url(
- f"https://{api_domain}",
+ f"https://{source_domain}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
).text
try:
logger.debug("Fetching table data for software='%s' ...", software)
raw = utils.fetch_url(
- f"https://{api_domain}/app/views/tabledata.php?software={software}",
+ f"https://{source_domain}/app/views/tabledata.php?software={software}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
).text
doc = bs4.BeautifulSoup(raw, features="html.parser")
logger.debug("doc[]='%s'", type(doc))
except network.exceptions as exception:
- logger.warning("Cannot fetch software='%s' from api_domain='%s': '%s'", software, api_domain, type(exception))
+ logger.warning("Cannot fetch software='%s' from source_domain='%s': '%s'", software, source_domain, type(exception))
continue
items = doc.findAll("a", {"class": "url"})
for item in items:
logger.debug("item[]='%s'", type(item))
domain = item.decode_contents()
-
logger.debug("domain='%s' - AFTER!", domain)
+
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
def fetch_todon_wiki(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "wiki.todon.eu"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "wiki.todon.eu"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
-
- locking.acquire()
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
blocklist = {
"silenced": list(),
"reject": list(),
}
- raw = utils.fetch_url(f"https://{api_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+ raw = utils.fetch_url(f"https://{source_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
doc = bs4.BeautifulSoup(raw, "html.parser")
continue
logger.info("Adding new block: blocked='%s',block_level='%s'", blocked, block_level)
- if utils.process_block(blocker, blocked, None, block_level) and block_level == "reject" and config.get("bot_enabled"):
+ if processing.block(blocker, blocked, None, block_level) and block_level == "reject" and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", blocked, block_level, blocker)
blockdict.append({
"blocked": blocked,
def fetch_cs(args: argparse.Namespace):
logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
extensions = [
"extra",
"abbr",
"wikilinks"
]
- domains = {
+ blocklist = {
"silenced": list(),
"reject" : list(),
}
- api_domain = "raw.githubusercontent.com"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ source_domain = "raw.githubusercontent.com"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
- raw = utils.fetch_url(f"https://{api_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+ raw = utils.fetch_url(f"https://{source_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features="html.parser")
silenced = doc.find("h2", {"id": "silenced-instances"}).findNext("table").find("tbody")
logger.debug("silenced[%s]()=%d", type(silenced), len(silenced))
- domains["silenced"] = federation.find_domains(silenced)
+ blocklist["silenced"] = federation.find_domains(silenced)
blocked = doc.find("h2", {"id": "blocked-instances"}).findNext("table").find("tbody")
logger.debug("blocked[%s]()=%d", type(blocked), len(blocked))
- domains["reject"] = federation.find_domains(blocked)
+ blocklist["reject"] = federation.find_domains(blocked)
blocking = blocklist["silenced"] + blocklist["reject"]
blocker = "chaos.social"
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
instances.set_total_blocks(blocker, blocking)
- logger.debug("domains[silenced]()=%d,domains[reject]()=%d", len(domains["silenced"]), len(domains["reject"]))
- blockdict = list()
- if len(domains) > 0:
- locking.acquire()
-
- for block_level in domains:
- logger.info("block_level='%s' has %d row(s)", block_level, len(domains[block_level]))
+ logger.debug("blocklist[silenced]()=%d,blocklist[reject]()=%d", len(blocklist["silenced"]), len(blocklist["reject"]))
+ if len(blocking) > 0:
+ blockdict = list()
+ for block_level in blocklist:
+ logger.info("block_level='%s' has %d row(s)", block_level, len(blocklist[block_level]))
- for row in domains[block_level]:
+ for row in blocklist[block_level]:
logger.debug("row[%s]='%s'", type(row), row)
- if instances.is_recent(row["domain"], "last_blocked"):
+ if not "domain" in row:
+ logger.warning("row[]='%s' has no element 'domain' - SKIPPED!", type(row))
+ continue
+ elif instances.is_recent(row["domain"], "last_blocked"):
logger.debug("row[domain]='%s' has been recently crawled - SKIPPED!", row["domain"])
continue
elif not instances.is_registered(row["domain"]):
logger.warning("Exception '%s' during fetching instances (fetch_cs) from row[domain]='%s'", type(exception), row["domain"])
instances.set_last_error(row["domain"], exception)
- if utils.process_block(blocker, row["domain"], row["reason"], block_level) and block_level == "reject" and config.get("bot_enabled"):
+ if processing.block(blocker, row["domain"], row["reason"], block_level) and block_level == "reject" and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", row["domain"], block_level, blocker)
blockdict.append({
"blocked": row["domain"],
def fetch_fba_rss(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
+
domains = list()
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ components = urlparse(args.feed)
+
+ if sources.is_recent(components.netloc):
+ logger.info("API from components.netloc='%s' has recently being accessed - EXIT!", components.netloc)
+ return 0
+ else:
+ logger.debug("components.netloc='%s' has not been recently used, marking ...", components.netloc)
+ sources.update(components.netloc)
+
logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
logger.debug("domains()=%d", len(domains))
if len(domains) > 0:
- locking.acquire()
-
logger.info("Adding %d new instances ...", len(domains))
for domain in domains:
+ logger.debug("domain='%s'", domain)
try:
logger.info("Fetching instances from domain='%s' ...", domain)
federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
def fetch_fbabot_atom(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "ryana.agency"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "ryona.agency"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
- feed = f"https://{api_domain}/users/fba/feed.atom"
+ feed = f"https://{source_domain}/users/fba/feed.atom"
domains = list()
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
logger.debug("domains()=%d", len(domains))
if len(domains) > 0:
- locking.acquire()
-
logger.info("Adding %d new instances ...", len(domains))
for domain in domains:
logger.debug("domain='%s'", domain)
try:
logger.info("Fetching instances from domain='%s' ...", domain)
- federation.fetch_instances(domain, api_domain, None, inspect.currentframe().f_code.co_name)
+ federation.fetch_instances(domain, source_domain, None, inspect.currentframe().f_code.co_name)
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching instances (fetch_fbabot_atom) from domain='%s'", type(exception), domain)
instances.set_last_error(domain, exception)
def fetch_instances(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
+ logger.debug("args.domain='%s' - checking ...", args.domain)
+ if not validators.domain(args.domain):
+ logger.warning("args.domain='%s' is not valid.", args.domain)
+ return 100
+ elif blacklist.is_blacklisted(args.domain):
+ logger.warning("args.domain='%s' is blacklisted, won't check it!", args.domain)
+ return 101
+
+ logger.debug("Invoking locking.acquire() ...")
locking.acquire()
# Initial fetch
if row["domain"] == "":
logger.debug("row[domain] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(row["domain"]):
- logger.warning("Domain row[domain]='%s' is not wanted - SKIPPED!", row["domain"])
+
+ logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
+ domain = row["domain"].encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
+ logger.warning("Domain domain='%s' is not wanted - SKIPPED!", domain)
continue
try:
- logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", row["domain"], row["origin"], row["software"], row["nodeinfo_url"])
- federation.fetch_instances(row["domain"], row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"])
+ logger.info("Fetching instances for domain='%s',origin='%s',software='%s',nodeinfo_url='%s'", domain, row["origin"], row["software"], row["nodeinfo_url"])
+ federation.fetch_instances(domain, row["origin"], row["software"], inspect.currentframe().f_code.co_name, row["nodeinfo_url"])
except network.exceptions as exception:
- logger.warning("Exception '%s' during fetching instances (fetch_instances) from row[domain]='%s'", type(exception), row["domain"])
- instances.set_last_error(row["domain"], exception)
+ logger.warning("Exception '%s' during fetching instances (fetch_instances) from domain='%s'", type(exception), domain)
+ instances.set_last_error(domain, exception)
logger.debug("Success - EXIT!")
return 0
def fetch_oliphant(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "codeberg.org"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "codeberg.org"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
-
- locking.acquire()
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
# Base URL
- base_url = f"https://{api_domain}/oliphant/blocklists/raw/branch/main/blocklists"
+ base_url = f"https://{source_domain}/oliphant/blocklists/raw/branch/main/blocklists"
# URLs to fetch
blocklists = (
},{
"blocker": "sunny.garden",
"csv_url": "mastodon/sunny.garden.csv",
+ },{
+ "blocker": "sunny.garden",
+ "csv_url": "mastodon/gardenfence.csv",
},{
"blocker": "solarpunk.moe",
"csv_url": "mastodon/solarpunk.moe.csv",
},{
"blocker": "union.place",
"csv_url": "mastodon/union.place.csv",
+ },{
+ "blocker": "oliphant.social",
+ "csv_url": "mastodon/birdsite.csv",
}
)
elif args.domain in domains:
logger.debug("args.domain='%s' already handled - SKIPPED!", args.domain)
continue
- elif instances.is_recent(block["blocker"]):
- logger.debug("block[blocker]='%s' has been recently crawled - SKIPPED!", block["blocker"])
- continue
# Fetch this URL
logger.info("Fetching csv_url='%s' for blocker='%s' ...", block["csv_url"], block["blocker"])
blockdict = list()
- logger.info("Processing %d rows ...", len(reader))
cnt = 0
for row in reader:
logger.debug("row[%s]='%s'", type(row), row)
continue
if "#severity" in row:
- severity = row["#severity"]
+ severity = blocks.alias_block_level(row["#severity"])
elif "severity" in row:
- severity = row["severity"]
+ severity = blocks.alias_block_level(row["severity"])
else:
logger.debug("row='%s' does not contain severity column", row)
continue
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ elif domain.endswith(".onion"):
+ logger.debug("domain='%s' is a TOR .onion domain - SKIPPED", domain)
+ continue
+ elif domain.endswith(".arpa"):
+ logger.debug("domain='%s' is a reverse IP address - SKIPPED", domain)
+ continue
+ elif domain.endswith(".tld"):
+ logger.debug("domain='%s' is a fake domain - SKIPPED", domain)
+ continue
+ elif domain.find("*") >= 0 or domain.find("?") >= 0:
+ logger.debug("domain='%s' is obfuscated - Invoking utils.deobfuscate(%s, %s) ...", domain, domain, block["blocker"])
+ domain = utils.deobfuscate(domain, block["blocker"])
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not validators.domain(domain):
+ logger.debug("domain='%s' is not a valid domain - SKIPPED!")
+ continue
+ elif blacklist.is_blacklisted(domain):
+ logger.warning("domain='%s' is blacklisted - SKIPPED!", domain)
continue
logger.debug("Marking domain='%s' as handled", domain)
domains.append(domain)
logger.debug("Processing domain='%s' ...", domain)
- processed = utils.process_domain(domain, block["blocker"], inspect.currentframe().f_code.co_name)
+ processed = processing.domain(domain, block["blocker"], inspect.currentframe().f_code.co_name)
logger.debug("processed='%s'", processed)
- if utils.process_block(block["blocker"], domain, None, "reject") and config.get("bot_enabled"):
+ if processing.block(block["blocker"], domain, None, severity) and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", domain, block["block_level"], block["blocker"])
blockdict.append({
"blocked": domain,
})
if reject_media:
- utils.process_block(block["blocker"], domain, None, "reject_media")
+ processing.block(block["blocker"], domain, None, "reject_media")
if reject_reports:
- utils.process_block(block["blocker"], domain, None, "reject_reports")
+ processing.block(block["blocker"], domain, None, "reject_reports")
- logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", block["blocker"], cnt)
- instances.set_total_blocks(block["blocker"], cnt)
+ logger.debug("block[blocker]='%s'", block["blocker"])
+ if block["blocker"] != "chaos.social":
+ logger.debug("Invoking instances.set_total_blocks(%s, domains()=%d) ...", block["blocker"], len(domains))
+ instances.set_total_blocks(block["blocker"], domains)
logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"])
if instances.has_pending(block["blocker"]):
def fetch_txt(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
+ logger.debug("Invoking locking.acquire() ...")
locking.acquire()
# Static URLs
continue
logger.debug("Processing domain='%s',row[blocker]='%s'", domain, row["blocker"])
- processed = utils.process_domain(domain, row["blocker"], inspect.currentframe().f_code.co_name)
+ processed = processing.domain(domain, row["blocker"], inspect.currentframe().f_code.co_name)
logger.debug("processed='%s'", processed)
if not processed:
def fetch_fedipact(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "fedipact.online"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "fedipact.online"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
-
- locking.acquire()
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
response = utils.fetch_url(
- f"https://{api_domain}",
+ f"https://{source_domain}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
)
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
def fetch_joinfediverse(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "joinfediverse.wiki"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "joinfediverse.wiki"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
-
- locking.acquire()
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
raw = utils.fetch_url(
- f"https://{api_domain}/FediBlock",
+ f"https://{source_domain}/FediBlock",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
).text
logger.debug("text[]='%s'", type(text))
if not isinstance(text, str):
- logger.debug("text[]='%s' is not 'str' - SKIPPED!", type(text))
+ logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
continue
elif validators.domain(text.strip()):
logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
continue
text = tidyup.domain(text.strip())
- logger.debug("text='%s'", text)
+ logger.debug("text='%s' - AFTER!", text)
if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
logger.debug("Found header: '%s'=%d", text, cnt)
block_headers[cnt] = text
logger.debug("block='%s'", block)
if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
origin = block["blocked"]
+ logger.debug("origin='%s'", origin)
for subdomain in block["subdomain(s)"]:
block["blocked"] = subdomain + "." + origin
+ logger.debug("block[blocked]='%s'", block["blocked"])
blocking.append(block)
else:
blocking.append(block)
logger.debug("blocking()=%d", blocking)
for block in blocking:
logger.debug("block[]='%s'", type(block))
- block["blocked"] = tidyup.domain(block["blocked"])
+ if "blocked" not in block:
+ raise KeyError(f"block()={len(block)} does not have element 'blocked'")
+ block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
if block["blocked"] == "":
logger.debug("block[blocked] is empty - SKIPPED!")
continue
elif not utils.is_domain_wanted(block["blocked"]):
- logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+ logger.warning("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
continue
elif instances.is_recent(block["blocked"]):
- logger.debug("blocked='%s' has been recently checked - SKIPPED!", block["blocked"])
+ logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
continue
logger.info("Proccessing blocked='%s' ...", block["blocked"])
- utils.process_domain(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
+ processing.domain(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
blockdict = list()
for blocker in domains:
logger.debug("blocker[%s]='%s'", type(blocker), blocker)
for block in blocking:
- logger.debug("block[blocked]='%s',block[reason]='%s' - BEFORE!", block["blocked"], block["reason"])
+ logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
continue
logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
- if utils.process_block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
+ if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
blockdict.append({
"blocked": block["blocked"],
def recheck_obfuscation(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
+ logger.debug("Invoking locking.acquire() ...")
locking.acquire()
if isinstance(args.domain, str) and args.domain != "" and utils.is_domain_wanted(args.domain):
else:
logger.warning("Unknown sofware: domain='%s',software='%s'", row["domain"], row["software"])
- logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
- instances.set_total_blocks(row["domain"], blocking)
+ logger.debug("row[domain]='%s'", row["domain"])
+ if row["domain"] != "chaos.social":
+ logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
+ instances.set_total_blocks(row["domain"], blocking)
- logger.info("Checking %d block(s) from domain='%s' ...", len(blocking), row["domain"])
obfuscated = 0
blockdict = list()
+
+ logger.info("Checking %d block(s) from domain='%s' ...", len(blocking), row["domain"])
for block in blocking:
logger.debug("block[blocked]='%s'", block["blocked"])
blocked = None
elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
logger.debug("block='%s' is obfuscated.", block["blocked"])
obfuscated = obfuscated + 1
- blocked = utils.deobfuscate_domain(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+ blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
elif not utils.is_domain_wanted(block["blocked"]):
logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
logger.debug("blocked='%s' is already blocked by domain='%s' - SKIPPED!", blocked, row["domain"])
continue
- block["block_level"] = utils.alias_block_level(block["block_level"])
+ block["block_level"] = blocks.alias_block_level(block["block_level"])
logger.info("blocked='%s' has been deobfuscated to blocked='%s', adding ...", block["blocked"], blocked)
- if utils.process_block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
+ if processing.block(row["domain"], blocked, block["reason"], block["block_level"]) and block["block_level"] == "reject" and config.get("bot_enabled"):
logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], row["domain"])
blockdict.append({
"blocked": blocked,
def fetch_fedilist(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "demo.fedilist.com"
- if apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "demo.fedilist.com"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
- url = f"http://{api_domain}/instance/csv?onion=not"
+ url = f"http://{source_domain}/instance/csv?onion=not"
if args.software is not None and args.software != "":
logger.debug("args.software='%s'", args.software)
- url = f"http://{api_domain}/instance/csv?software={args.software}&onion=not"
-
- locking.acquire()
+ url = f"http://{source_domain}/instance/csv?software={args.software}&onion=not"
logger.info("Fetching url='%s' ...", url)
response = reqto.get(
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if not response.ok or response.status_code >= 300 or len(response.content) == 0:
- logger.warning("Failed fetching url='%s': response.ok='%s',response.status_code=%d,response.content()=%d - EXIT!", response.ok, response.status_code, len(response.text))
+ logger.warning("Failed fetching url='%s': response.ok='%s',response.status_code=%d,response.content()=%d - EXIT!", url, response.ok, response.status_code, len(response.text))
return 1
reader = csv.DictReader(response.content.decode("utf-8").splitlines(), dialect="unix")
logger.debug("reader[]='%s'", type(reader))
- blockdict = list()
for row in reader:
logger.debug("row[]='%s'", type(row))
+ if "hostname" not in row:
+ logger.warning("row()=%d has no element 'hostname' - SKIPPED!", len(row))
+ continue
+
+ logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
domain = tidyup.domain(row["hostname"])
logger.debug("domain='%s' - AFTER!", domain)
if domain == "":
logger.debug("domain is empty after tidyup: row[hostname]='%s' - SKIPPED!", row["hostname"])
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif (args.all is None or not args.all) and instances.is_registered(domain):
- logger.debug("domain='%s' is already registered, --all not specified: args.all[]='%s'", type(args.all))
+ logger.debug("domain='%s' is already registered, --all not specified: args.all[]='%s'", domain, type(args.all))
continue
elif instances.is_recent(domain):
logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
def update_nodeinfo(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
+ logger.debug("Invoking locking.acquire() ...")
locking.acquire()
if args.domain is not None and args.domain != "":
software = federation.determine_software(row["domain"])
logger.debug("Determined software='%s'", software)
- if software != row["software"]:
- logger.warning("Software type has changed from '%s' to '%s'!", row["software"], software)
+ if (software != row["software"] and software is not None) or args.force is True:
+ logger.warning("Software type for row[domain]='%s' has changed from '%s' to '%s'!", row["domain"], row["software"], software)
instances.set_software(row["domain"], software)
instances.set_success(row["domain"])
def fetch_instances_social(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
- api_domain = "instances.social"
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "instances.social"
if config.get("instances_social_api_key") == "":
logger.error("API key not set. Please set in your config.json file.")
return 1
- elif apis.is_recent(api_domain):
- logger.info("API from api_domain='%s' has recently being accessed - EXIT!", api_domain)
+ elif sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
else:
- logger.debug("api_domain='%s' has not been recently used, marking ...", api_domain)
- apis.update(api_domain)
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
- locking.acquire()
headers = {
"Authorization": f"Bearer {config.get('instances_social_api_key')}",
}
fetched = network.get_json_api(
- api_domain,
+ source_domain,
"/api/1.0/instances/list?count=0&sort_by=name",
headers,
(config.get("connection_timeout"), config.get("read_timeout"))
for row in rows:
logger.debug("row[]='%s'", type(row))
domain = tidyup.domain(row["name"])
-
logger.debug("domain='%s' - AFTER!", domain)
+
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = domain.encode("idna").decode("utf-8")
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if not utils.is_domain_wanted(domain):
logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
continue
logger.info("Fetching instances from domain='%s'", domain)
- federation.fetch_instances(domain, api_domain, None, inspect.currentframe().f_code.co_name)
+ federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
+
+ logger.debug("Success! - EXIT!")
+ return 0
+
+def convert_idna(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ database.cursor.execute("SELECT domain FROM instances WHERE domain NOT LIKE '%xn--%' ORDER BY domain ASC")
+ rows = database.cursor.fetchall()
+
+ logger.debug("rows[]='%s'", type(rows))
+ instances.translate_idnas(rows, "domain")
+
+ database.cursor.execute("SELECT origin FROM instances WHERE origin NOT LIKE '%xn--%' ORDER BY origin ASC")
+ rows = database.cursor.fetchall()
+
+ logger.debug("rows[]='%s'", type(rows))
+ instances.translate_idnas(rows, "origin")
+
+ database.cursor.execute("SELECT blocker FROM blocks WHERE blocker NOT LIKE '%xn--%' ORDER BY blocker ASC")
+ rows = database.cursor.fetchall()
+
+ logger.debug("rows[]='%s'", type(rows))
+ blocks.translate_idnas(rows, "blocker")
+
+ database.cursor.execute("SELECT blocked FROM blocks WHERE blocked NOT LIKE '%xn--%' ORDER BY blocked ASC")
+ rows = database.cursor.fetchall()
+
+ logger.debug("rows[]='%s'", type(rows))
+ blocks.translate_idnas(rows, "blocked")
logger.debug("Success! - EXIT!")
return 0