import reqto
import validators
-from fba import csrf
from fba import database
from fba import utils
from fba.helpers import blacklist
+from fba.helpers import blocklists
from fba.helpers import config
from fba.helpers import cookies
+from fba.helpers import dicts as dict_helper
+from fba.helpers import domain as domain_helper
from fba.helpers import locking
from fba.helpers import processing
from fba.helpers import software as software_helper
from fba.helpers import tidyup
+from fba.http import csrf
from fba.http import federation
from fba.http import network
def check_instance(args: argparse.Namespace) -> int:
logger.debug("args.domain='%s' - CALLED!", args.domain)
+
status = 0
if not validators.domain(args.domain):
logger.warning("args.domain='%s' is not valid", args.domain)
return list()
try:
- logger.debug("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
+ logger.info("Fetching JSON from pixelfed.org API, headers()=%d ...", len(headers))
fetched = network.get_json_api(
source_domain,
"/api/v1/servers/all.json?scope=All&country=all&language=all",
domain = row["domain"].encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
logger.debug("domain='%s' is already registered - SKIPPED!", domain)
elif entry["domain"] == "":
logger.debug("entry[domain] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(entry["domain"]):
- logger.warning("entry[domain]='%s' is not wanted - SKIPPED!", entry["domain"])
+ elif not domain_helper.is_wanted(entry["domain"]):
+ logger.debug("entry[domain]='%s' is not wanted - SKIPPED!", entry["domain"])
continue
elif instances.is_registered(entry["domain"]):
logger.debug("entry[domain]='%s' is already registered - SKIPPED!", entry["domain"])
if args.domain is not None and args.domain != "":
# Re-check single domain
- logger.debug("Querying database for single args.domain='%s' ...", args.domain)
+ logger.debug("Querying database for args.domain='%s' ...", args.domain)
database.cursor.execute(
- "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE domain = ?", [args.domain]
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE domain = ? LIMIT 1", [args.domain]
)
elif args.software is not None and args.software != "":
# Re-check single software
logger.debug("Querying database for args.software='%s' ...", args.software)
database.cursor.execute(
- "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? AND nodeinfo_url IS NOT NULL", [args.software]
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software = ? AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC", [args.software]
+ )
+ elif args.force:
+ # Re-check all
+ logger.debug("Re-checking all instances ...")
+ database.cursor.execute(
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC"
)
else:
# Re-check after "timeout" (aka. minimum interval)
database.cursor.execute(
- "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND (last_blocked IS NULL OR last_blocked < ?) AND nodeinfo_url IS NOT NULL ORDER BY rowid DESC", [time.time() - config.get("recheck_block")]
+ "SELECT domain, software, origin, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'lemmy', 'friendica', 'misskey') AND (last_blocked IS NULL OR last_blocked < ?) AND nodeinfo_url IS NOT NULL ORDER BY total_blocks DESC, last_updated ASC", [time.time() - config.get("recheck_block")]
)
rows = database.cursor.fetchall()
logger.info("Checking %d entries ...", len(rows))
for blocker, software, origin, nodeinfo_url in rows:
logger.debug("blocker='%s',software='%s',origin='%s',nodeinfo_url='%s'", blocker, software, origin, nodeinfo_url)
- blocker = tidyup.domain(blocker)
- logger.debug("blocker='%s' - AFTER!", blocker)
- if blocker == "":
- logger.warning("blocker is now empty!")
- continue
- elif nodeinfo_url is None or nodeinfo_url == "":
- logger.debug("blocker='%s',software='%s' has empty nodeinfo_url", blocker, software)
- continue
- elif not utils.is_domain_wanted(blocker):
+ if not domain_helper.is_wanted(blocker):
logger.warning("blocker='%s' is not wanted - SKIPPED!", blocker)
continue
- logger.debug("blocker='%s'", blocker)
+ logger.debug("Setting last_blocked,has_obfuscation=false for blocker='%s' ...", blocker)
instances.set_last_blocked(blocker)
instances.set_has_obfuscation(blocker, False)
- blocking = list()
- if software == "pleroma":
- logger.info("blocker='%s',software='%s'", blocker, software)
- blocking = pleroma.fetch_blocks(blocker, nodeinfo_url)
- elif software == "mastodon":
- logger.info("blocker='%s',software='%s'", blocker, software)
- blocking = mastodon.fetch_blocks(blocker, nodeinfo_url)
- elif software == "lemmy":
- logger.info("blocker='%s',software='%s'", blocker, software)
- blocking = lemmy.fetch_blocks(blocker, nodeinfo_url)
- elif software == "friendica":
- logger.info("blocker='%s',software='%s'", blocker, software)
- blocking = friendica.fetch_blocks(blocker)
- elif software == "misskey":
- logger.info("blocker='%s',software='%s'", blocker, software)
- blocking = misskey.fetch_blocks(blocker)
- else:
- logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software)
+ # c.s isn't part of oliphant's "hidden" blocklists
+ if blocker == "chaos.social" or blocklists.has(blocker):
+ logger.debug("Skipping blocker='%s', run ./fba.py fetch_cs or fetch_oliphant instead!", blocker)
+ continue
- logger.debug("blocker='%s'", blocker)
- if blocker != "chaos.social":
- logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
- instances.set_total_blocks(blocker, blocking)
+ logger.debug("Invoking federation.fetch_blocks(%s) ...", blocker)
+ blocking = federation.fetch_blocks(blocker)
+
+ logger.info("blocker='%s',software='%s' has %d block entries returned.", blocker, software, len(blocking))
+ if len(blocking) == 0:
+ logger.debug("blocker='%s',software='%s' - fetching blocklist ...", blocker, software)
+ if software == "pleroma":
+ blocking = pleroma.fetch_blocks(blocker)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ elif software == "mastodon":
+ blocking = mastodon.fetch_blocks(blocker)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ elif software == "lemmy":
+ blocking = lemmy.fetch_blocks(blocker)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ elif software == "friendica":
+ blocking = friendica.fetch_blocks(blocker)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ elif software == "misskey":
+ blocking = misskey.fetch_blocks(blocker)
+ logger.debug("blocker='%s' returned %d entries,software='%s'", blocker, len(blocking), software)
+ else:
+ logger.warning("Unknown software: blocker='%s',software='%s'", blocker, software)
+
+ logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
+ instances.set_total_blocks(blocker, blocking)
- logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
blockdict = list()
+
+ logger.info("Checking %d entries from blocker='%s',software='%s' ...", len(blocking), blocker, software)
for block in blocking:
logger.debug("blocked='%s',block_level='%s',reason='%s'", block["blocked"], block["block_level"], block["reason"])
block["blocked"] = block["blocked"].lstrip(".").encode("idna").decode("utf-8")
logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
- if not utils.is_domain_wanted(block["blocked"]):
- logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+ if not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
elif block["block_level"] in ["accept", "accepted"]:
logger.debug("blocked='%s' is accepted, not wanted here - SKIPPED!", block["blocked"])
logger.debug("Checking if blocker='%s' has pending updates ...", blocker)
if instances.has_pending(blocker):
logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update_data(blocker)
+ instances.update(blocker)
logger.debug("Invoking commit() ...")
database.connection.commit()
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
- elif instances.is_recent(domain):
- logger.debug("domain='%s' is recently being handled - SKIPPED!", domain)
- continue
software = software_helper.alias(software)
logger.info("Fetching instances for domain='%s'", domain)
"reject": list(),
}
- raw = utils.fetch_url(f"https://{source_domain}/todon/domainblocks", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+ logger.debug("Fetching domainblocks from source_domain='%s'", source_domain)
+ raw = utils.fetch_url(
+ f"https://{source_domain}/todon/domainblocks",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
doc = bs4.BeautifulSoup(raw, "html.parser")
blocker = "todon.eu"
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
+ instances.set_last_blocked(blocker)
instances.set_total_blocks(blocker, blocking)
blockdict = list()
logger.debug("Checking if blocker='%s' has pending updates ...", blocker)
if instances.has_pending(blocker):
logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update_data(blocker)
+ instances.update(blocker)
logger.debug("Success! - EXIT!")
return 0
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
- raw = utils.fetch_url(f"https://{source_domain}/chaossocial/meta/master/federation.md", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout"))).text
+ logger.info("Fetching federation.md from source_domain='%s' ...", source_domain)
+ raw = utils.fetch_url(
+ f"https://{source_domain}/chaossocial/meta/master/federation.md",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
doc = bs4.BeautifulSoup(markdown.markdown(raw, extensions=extensions), features="html.parser")
blocker = "chaos.social"
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", blocker, len(blocking))
+ instances.set_last_blocked(blocker)
instances.set_total_blocks(blocker, blocking)
logger.debug("blocklist[silenced]()=%d,blocklist[reject]()=%d", len(blocklist["silenced"]), len(blocklist["reject"]))
logger.debug("Checking if blocker='%s' has pending updates ...", blocker)
if instances.has_pending(blocker):
logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update_data(blocker)
+ instances.update(blocker)
logger.debug("Success! - EXIT!")
return 0
response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing RSS feed (%d Bytes) ...", len(response.text))
rss = atoma.parse_rss_bytes(response.content)
logger.debug("rss[]='%s'", type(rss))
for item in rss.items:
- logger.debug("item='%s'", item)
+ logger.debug("item[%s]='%s'", type(item), item)
domain = tidyup.domain(item.link.split("=")[1])
logger.debug("domain='%s' - AFTER!", domain)
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
logger.debug("domain='%s' is already added - SKIPPED!", domain)
locking.acquire()
source_domain = "ryona.agency"
+ feed = f"https://{source_domain}/users/fba/feed.atom"
+
+ logger.debug("args.feed[%s]='%s'", type(args.feed), args.feed)
+ if args.feed is not None and validators.url(args.feed):
+ logger.debug("Setting feed='%s' ...", args.feed)
+ feed = str(args.feed)
+ source_domain = urlparse(args.feed).netloc
+
if sources.is_recent(source_domain):
logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
return 0
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
- feed = f"https://{source_domain}/users/fba/feed.atom"
-
domains = list()
logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
response = utils.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing ATOM feed (%d Bytes) ...", len(response.text))
atom = atoma.parse_atom_bytes(response.content)
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
logger.debug("domain='%s' is already added - SKIPPED!", domain)
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
+ # Initialize values
+ domain = tidyup.domain(args.domain)
+ origin = software = None
+
+ # Fetch record
+ database.cursor.execute("SELECT origin, software FROM instances WHERE domain = ? LIMIT 1", [args.domain])
+ row = database.cursor.fetchone()
+ if row is not None:
+ origin = row["origin"]
+ software = row["software"]
+
# Initial fetch
try:
- logger.info("Fetching instances from args.domain='%s' ...", args.domain)
- federation.fetch_instances(args.domain, None, None, inspect.currentframe().f_code.co_name)
+ logger.info("Fetching instances from args.domain='%s',origin='%s',software='%s' ...", domain, origin, software)
+ federation.fetch_instances(domain, origin, software, inspect.currentframe().f_code.co_name)
except network.exceptions as exception:
logger.warning("Exception '%s' during fetching instances (fetch_instances) from args.domain='%s'", type(exception), args.domain)
instances.set_last_error(args.domain, exception)
- instances.update_data(args.domain)
+ instances.update(args.domain)
return 100
if args.single:
# Loop through some instances
database.cursor.execute(
- "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY rowid DESC", [time.time() - config.get("recheck_instance")]
+ "SELECT domain, origin, software, nodeinfo_url FROM instances WHERE software IN ('pleroma', 'mastodon', 'friendica', 'misskey', 'lemmy', 'peertube', 'takahe', 'gotosocial', 'brighteon', 'wildebeest', 'bookwyrm', 'mitra', 'areionskey', 'mammuthus', 'neodb') AND (last_instance_fetch IS NULL OR last_instance_fetch < ?) ORDER BY total_peers DESC, last_updated ASC", [time.time() - config.get("recheck_instance")]
)
rows = database.cursor.fetchall()
domain = row["domain"].encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("Domain domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("Domain domain='%s' is not wanted - SKIPPED!", domain)
continue
try:
# Base URL
base_url = f"https://{source_domain}/oliphant/blocklists/raw/branch/main/blocklists"
- # URLs to fetch
- blocklists = (
- {
- "blocker": "artisan.chat",
- "csv_url": "mastodon/artisan.chat.csv",
- },{
- "blocker": "mastodon.art",
- "csv_url": "mastodon/mastodon.art.csv",
- },{
- "blocker": "pleroma.envs.net",
- "csv_url": "mastodon/pleroma.envs.net.csv",
- },{
- "blocker": "oliphant.social",
- "csv_url": "mastodon/_unified_tier3_blocklist.csv",
- },{
- "blocker": "mastodon.online",
- "csv_url": "mastodon/mastodon.online.csv",
- },{
- "blocker": "mastodon.social",
- "csv_url": "mastodon/mastodon.social.csv",
- },{
- "blocker": "mastodon.social",
- "csv_url": "other/missing-tier0-mastodon.social.csv",
- },{
- "blocker": "rage.love",
- "csv_url": "mastodon/rage.love.csv",
- },{
- "blocker": "sunny.garden",
- "csv_url": "mastodon/sunny.garden.csv",
- },{
- "blocker": "sunny.garden",
- "csv_url": "mastodon/gardenfence.csv",
- },{
- "blocker": "solarpunk.moe",
- "csv_url": "mastodon/solarpunk.moe.csv",
- },{
- "blocker": "toot.wales",
- "csv_url": "mastodon/toot.wales.csv",
- },{
- "blocker": "union.place",
- "csv_url": "mastodon/union.place.csv",
- },{
- "blocker": "oliphant.social",
- "csv_url": "mastodon/birdsite.csv",
- }
- )
-
domains = list()
- logger.debug("Downloading %d files ...", len(blocklists))
- for block in blocklists:
+ logger.debug("Downloading %d files ...", len(blocklists.oliphant_blocklists))
+ for block in blocklists.oliphant_blocklists:
# Is domain given and not equal blocker?
if isinstance(args.domain, str) and args.domain != block["blocker"]:
logger.debug("Skipping blocker='%s', not matching args.domain='%s'", block["blocker"], args.domain)
logger.debug("args.domain='%s' already handled - SKIPPED!", args.domain)
continue
+ instances.set_last_blocked(block["blocker"])
+
# Fetch this URL
logger.info("Fetching csv_url='%s' for blocker='%s' ...", block["csv_url"], block["blocker"])
response = utils.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
- if not response.ok or response.status_code >= 300 or response.content == "":
+ if not response.ok or response.status_code > 200 or response.content == "":
logger.warning("Could not fetch csv_url='%s' for blocker='%s' - SKIPPED!", block["csv_url"], block["blocker"])
continue
processing.block(block["blocker"], domain, None, "reject_reports")
logger.debug("block[blocker]='%s'", block["blocker"])
- if block["blocker"] != "chaos.social":
+ if not blocklists.has(block["blocker"]):
logger.debug("Invoking instances.set_total_blocks(%s, domains()=%d) ...", block["blocker"], len(domains))
instances.set_total_blocks(block["blocker"], domains)
logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"])
if instances.has_pending(block["blocker"]):
logger.debug("Flushing updates for block[blocker]='%s' ...", block["blocker"])
- instances.update_data(block["blocker"])
+ instances.update(block["blocker"])
logger.debug("Invoking commit() ...")
database.connection.commit()
response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Returned %d Bytes for processing", len(response.text.strip()))
domains = response.text.split("\n")
if domain == "":
logger.debug("domain is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ elif not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_recent(domain):
logger.debug("domain='%s' has been recently crawled - SKIPPED!", domain)
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
+ logger.info("Fetching / from source_domain='%s' ...", source_domain)
response = utils.fetch_url(
f"https://{source_domain}",
network.web_headers,
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Parsing %d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif instances.is_registered(domain):
logger.debug("domain='%s' is already registered - SKIPPED!", domain)
continue
logger.info("Fetching domain='%s' ...", domain)
- federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
+ federation.fetch_instances(domain, "beach.city", None, inspect.currentframe().f_code.co_name)
+
+ logger.debug("Success! - EXIT!")
+ return 0
+
+def fetch_joinmobilizon(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "instances.joinmobilizon.org"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+ return 0
+ else:
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
+
+ logger.info("Fetching instances from source_domain='%s' ...", source_domain)
+ raw = utils.fetch_url(
+ f"https://{source_domain}/api/v1/instances",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
+ logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
+
+ parsed = json.loads(raw)
+ logger.debug("parsed[%s]()=%d", type(parsed), len(parsed))
+
+ if "data" not in parsed:
+ logger.warning("parsed()=%d does not contain key 'data'")
+ return 1
+
+ logger.info("Checking %d instances ...", len(parsed["data"]))
+ for row in parsed["data"]:
+ logger.debug("row[]='%s'", type(row))
+ if "host" not in row:
+ logger.warning("row='%s' does not contain key 'host' - SKIPPED!", row)
+ continue
+ elif not domain_helper.is_wanted(row["host"]):
+ logger.debug("row[host]='%s' is not wanted - SKIPPED!", row["host"])
+ continue
+ elif instances.is_registered(row["host"]):
+ logger.debug("row[host]='%s' is already registered - SKIPPED!", row["host"])
+ continue
+
+ logger.info("Fetching row[host]='%s' ...", row["host"])
+ federation.fetch_instances(row["host"], "demo.mobilizon.org", None, inspect.currentframe().f_code.co_name)
logger.debug("Success! - EXIT!")
return 0
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
+ logger.info("Fetching instances.json from source_domain='%s' ...", source_domain)
raw = utils.fetch_url(
f"https://{source_domain}/instances.json",
network.web_headers,
if "url" not in row:
logger.warning("row()=%d does not have element 'url' - SKIPPED!", len(row))
continue
- elif not utils.is_domain_wanted(row["url"]):
+ elif not domain_helper.is_wanted(row["url"]):
logger.debug("row[url]='%s' is not wanted - SKIPPED!", row["url"])
continue
elif instances.is_registered(row["url"]):
logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
sources.update(source_domain)
+ logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
raw = utils.fetch_url(
f"https://{source_domain}/FediBlock",
network.web_headers,
if block["blocked"] == "":
logger.debug("block[blocked] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(block["blocked"]):
- logger.warning("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
+ elif not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
continue
elif instances.is_recent(block["blocked"]):
logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
continue
- logger.info("Proccessing blocked='%s' ...", block["blocked"])
+ logger.debug("Proccessing blocked='%s' ...", block["blocked"])
processing.domain(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
blockdict = list()
for blocker in domains:
blocker = blocker[0]
logger.debug("blocker[%s]='%s'", type(blocker), blocker)
+ instances.set_last_blocked(blocker)
for block in blocking:
logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
if block["blocked"] == "":
logger.debug("block[blocked] is empty - SKIPPED!")
continue
- elif not utils.is_domain_wanted(block["blocked"]):
- logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+ elif not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
if instances.has_pending(blocker):
logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update_data(blocker)
+ instances.update(blocker)
logger.debug("Invoking commit() ...")
database.connection.commit()
logger.debug("Invoking locking.acquire() ...")
locking.acquire()
- if isinstance(args.domain, str) and args.domain != "" and utils.is_domain_wanted(args.domain):
+ if isinstance(args.domain, str) and args.domain != "" and domain_helper.is_wanted(args.domain):
database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND domain = ?", [args.domain])
elif isinstance(args.software, str) and args.software != "" and validators.domain(args.software) == args.software:
database.cursor.execute("SELECT domain, software, nodeinfo_url FROM instances WHERE has_obfuscation = 1 AND software = ?", [args.software])
logger.info("Checking %d domains ...", len(rows))
for row in rows:
logger.debug("Fetching peers from domain='%s',software='%s',nodeinfo_url='%s' ...", row["domain"], row["software"], row["nodeinfo_url"])
- if (args.force is None or not args.force) and instances.is_recent(row["domain"]) and args.domain is None and args.software is None:
+ if (args.force is None or not args.force) and args.domain is None and args.software is None and instances.is_recent(row["domain"], "last_blocked"):
logger.debug("row[domain]='%s' has been recently checked, args.force[]='%s' - SKIPPED!", row["domain"], type(args.force))
continue
- blocking = list()
- if row["software"] == "pleroma":
- logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
- blocking = pleroma.fetch_blocks(row["domain"], row["nodeinfo_url"])
- elif row["software"] == "mastodon":
- logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
- blocking = mastodon.fetch_blocks(row["domain"], row["nodeinfo_url"])
- elif row["software"] == "lemmy":
- logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
- blocking = lemmy.fetch_blocks(row["domain"], row["nodeinfo_url"])
- elif row["software"] == "friendica":
- logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
- blocking = friendica.fetch_blocks(row["domain"])
- elif row["software"] == "misskey":
- logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
- blocking = misskey.fetch_blocks(row["domain"])
- else:
- logger.warning("Unknown sofware: domain='%s',software='%s'", row["domain"], row["software"])
+ logger.debug("Invoking federation.fetch_blocks(%s) ...", row["domain"])
+ blocking = federation.fetch_blocks(row["domain"])
+
+ logger.debug("blocking()=%d", len(blocking))
+ if len(blocking) == 0:
+ if row["software"] == "pleroma":
+ logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
+ blocking = pleroma.fetch_blocks(row["domain"])
+ elif row["software"] == "mastodon":
+ logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
+ blocking = mastodon.fetch_blocks(row["domain"])
+ elif row["software"] == "lemmy":
+ logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
+ blocking = lemmy.fetch_blocks(row["domain"])
+ elif row["software"] == "friendica":
+ logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
+ blocking = friendica.fetch_blocks(row["domain"])
+ elif row["software"] == "misskey":
+ logger.debug("domain='%s',software='%s'", row["domain"], row["software"])
+ blocking = misskey.fetch_blocks(row["domain"])
+ else:
+ logger.warning("Unknown software: domain='%s',software='%s'", row["domain"], row["software"])
+ # c.s isn't part of oliphant's "hidden" blocklists
logger.debug("row[domain]='%s'", row["domain"])
- # chaos.social requires special care ...
- if row["domain"] != "chaos.social":
+ if row["domain"] != "chaos.social" and not blocklists.has(row["domain"]):
logger.debug("Invoking instances.set_total_blocks(%s, %d) ...", row["domain"], len(blocking))
+ instances.set_last_blocked(row["domain"])
instances.set_total_blocks(row["domain"], blocking)
obfuscated = 0
logger.debug("block='%s' is obfuscated.", block["blocked"])
obfuscated = obfuscated + 1
blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
- elif not utils.is_domain_wanted(block["blocked"]):
- logger.warning("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+ elif not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
elif blocks.is_instance_blocked(row["domain"], block["blocked"]):
logger.debug("blocked='%s' is already blocked - SKIPPED!", block["blocked"])
if blocked is not None and blocked != block["blocked"]:
logger.debug("blocked='%s' was deobfuscated to blocked='%s'", block["blocked"], blocked)
obfuscated = obfuscated - 1
+
if blocks.is_instance_blocked(row["domain"], blocked):
logger.debug("blocked='%s' is already blocked by domain='%s' - SKIPPED!", blocked, row["domain"])
continue
+ elif blacklist.is_blacklisted(blocked):
+ logger.debug("blocked='%s' is blacklisted - SKIPPED!", blocked)
+ continue
block["block_level"] = blocks.alias_block_level(block["block_level"])
"reason" : block["reason"],
})
+ logger.debug("Settings obfuscated=%d for row[domain]='%s' ...", obfuscated, row["domain"])
+ instances.set_obfuscated_blocks(row["domain"], obfuscated)
+
logger.info("domain='%s' has %d obfuscated domain(s)", row["domain"], obfuscated)
if obfuscated == 0 and len(blocking) > 0:
logger.info("Block list from domain='%s' has been fully deobfuscated.", row["domain"])
if instances.has_pending(row["domain"]):
logger.debug("Flushing updates for blocker='%s' ...", row["domain"])
- instances.update_data(row["domain"])
+ instances.update(row["domain"])
logger.debug("Invoking commit() ...")
database.connection.commit()
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if not response.ok or response.status_code >= 300 or len(response.content) == 0:
+ if not response.ok or response.status_code > 200 or len(response.content) == 0:
logger.warning("Failed fetching url='%s': response.ok='%s',response.status_code=%d,response.content()=%d - EXIT!", url, response.ok, response.status_code, len(response.text))
return 1
reader = csv.DictReader(response.content.decode("utf-8").splitlines(), dialect="unix")
logger.debug("reader[]='%s'", type(reader))
- for row in reader:
+ if reader is None:
+ logger.warning("Failed parsing response.content()=%d as CSV content", len(response.content))
+ return 2
+
+ rows = list(reader)
+
+ logger.info("Checking %d rows ...", len(rows))
+ for row in rows:
logger.debug("row[]='%s'", type(row))
if "hostname" not in row:
logger.warning("row()=%d has no element 'hostname' - SKIPPED!", len(row))
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif (args.force is None or not args.force) and instances.is_registered(domain):
logger.debug("domain='%s' is already registered, --force not specified: args.force[]='%s'", domain, type(args.force))
database.cursor.execute("SELECT domain, software FROM instances WHERE domain = ?", [args.domain])
elif args.software is not None and args.software != "":
logger.info("Fetching domains for args.software='%s'", args.software)
- database.cursor.execute("SELECT domain, software FROM instances WHERE software = ?", [args.software])
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.software.lower(), time.time() - config.get("recheck_nodeinfo")])
+ elif args.mode is not None and args.mode != "":
+ logger.info("Fetching domains for args.mode='%s'", args.mode.upper())
+ database.cursor.execute("SELECT domain, software FROM instances WHERE detection_mode = ? AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [args.mode.upper(), time.time() - config.get("recheck_nodeinfo")])
+ elif args.no_software:
+ logger.info("Fetching domains with no software type detected ...")
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IS NULL AND (last_nodeinfo < ? OR last_nodeinfo IS NULL)", [time.time() - config.get("recheck_nodeinfo")])
else:
logger.info("Fetching domains for recently updated ...")
database.cursor.execute("SELECT domain, software FROM instances WHERE last_nodeinfo < ? OR last_nodeinfo IS NULL", [time.time() - config.get("recheck_nodeinfo")])
cnt = 0
for row in domains:
logger.debug("row[]='%s'", type(row))
+ if not args.force and instances.is_recent(row["domain"], "last_nodeinfo"):
+ logger.debug("row[domain]='%s' has been recently checked - SKIPPED!", row["domain"])
+ continue
+
try:
logger.info("Checking nodeinfo for row[domain]='%s',row[software]='%s' (%s%%) ...", row["domain"], row["software"], "{:5.1f}".format(cnt / len(domains) * 100))
software = federation.determine_software(row["domain"])
logger.debug("Determined software='%s'", software)
if (software != row["software"] and software is not None) or args.force is True:
+ logger.debug("software='%s'", software)
+ if software is None:
+ logger.debug("Setting nodeinfo_url to 'None' for row[domain]='%s' ...", row["domain"])
+ instances.set_nodeinfo_url(row["domain"], None)
+
logger.warning("Software type for row[domain]='%s' has changed from '%s' to '%s'!", row["domain"], row["software"], software)
instances.set_software(row["domain"], software)
- instances.set_success(row["domain"])
+ if software is not None:
+ logger.debug("Setting row[domain]='%s' as successfully determined ...", row["domain"])
+ instances.set_success(row["domain"])
except network.exceptions as exception:
logger.warning("Exception '%s' during updating nodeinfo for row[domain]='%s'", type(exception), row["domain"])
instances.set_last_error(row["domain"], exception)
instances.set_last_nodeinfo(row["domain"])
- instances.update_data(row["domain"])
+ instances.update(row["domain"])
cnt = cnt + 1
logger.debug("Success! - EXIT!")
"Authorization": f"Bearer {config.get('instances_social_api_key')}",
}
+ logger.info("Fetching list from source_domain='%s' ...", source_domain)
fetched = network.get_json_api(
source_domain,
"/api/1.0/instances/list?count=0&sort_by=name",
domain = domain.encode("idna").decode("utf-8")
logger.debug("domain='%s' - AFTER!", domain)
- if not utils.is_domain_wanted(domain):
- logger.warning("domain='%s' is not wanted - SKIPPED!", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
continue
elif domain in domains:
logger.debug("domain='%s' is already added - SKIPPED!", domain)
logger.debug("Success! - EXIT!")
return 0
+def fetch_relays(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ if args.domain is not None and args.domain != "":
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay') AND domain = ? LIMIT 1", [args.domain])
+ else:
+ database.cursor.execute("SELECT domain, software FROM instances WHERE software IN ('activityrelay', 'aoderelay', 'selective-relay')")
+
+ domains = list()
+ rows = database.cursor.fetchall()
+
+ logger.info("Checking %d relays ...", len(rows))
+ for row in rows:
+ logger.debug("row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
+ peers = list()
+ if not args.force and instances.is_recent(row["domain"]):
+ logger.debug("row[domain]='%s' has been recently fetched - SKIPPED!", row["domain"])
+ continue
+
+ try:
+ logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
+ raw = utils.fetch_url(
+ f"https://{row['domain']}",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
+ logger.debug("raw[%s]()=%d", type(raw), len(raw))
+ except network.exceptions as exception:
+ logger.warning("Exception '%s' during fetching from relay '%s': '%s'", type(exception), row["domain"], str(exception))
+ instances.set_last_error(row["domain"], exception)
+ instances.set_last_instance_fetch(row["domain"])
+ instances.update(row["domain"])
+ continue
+
+ doc = bs4.BeautifulSoup(raw, features="html.parser")
+ logger.debug("doc[]='%s'", type(doc))
+
+ logger.debug("row[software]='%s'", row["software"])
+ if row["software"] == "activityrelay":
+ logger.debug("Checking row[domain]='%s' ...", row["domain"])
+ tags = doc.findAll("p")
+
+ logger.debug("Checking %d paragraphs ...", len(tags))
+ for tag in tags:
+ logger.debug("tag[]='%s'", type(tag))
+ if len(tag.contents) == 0:
+ logger.debug("tag='%s' is an empty tag - SKIPPED!", tag)
+ continue
+ elif "registered instances" not in tag.contents[0]:
+ logger.debug("Skipping paragraph, text not found.")
+ continue
+
+ logger.debug("Found tag.contents[0][]='%s'", tag.contents[0])
+ for domain in tag.contents:
+ logger.debug("domain[%s]='%s'", type(domain), domain)
+ if not isinstance(domain, bs4.element.NavigableString) or "registered instances" in domain:
+ continue
+
+ domain = str(domain)
+ logger.debug("domain='%s'", domain)
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = tidyup.domain(domain)
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if domain == "":
+ logger.debug("Empty domain after tidyup.domain() from origin='%s' - SKIPPED!", row["domain"])
+ continue
+ elif domain not in peers:
+ logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
+ peers.append(domain)
+
+ if dict_helper.has_key(domains, "domain", domain):
+ logger.debug("domain='%s' already added", domain)
+ continue
+
+ logger.debug("Appending domain='%s',origin='%s',software='%s' ...", domain, row["domain"], row["software"])
+ domains.append({
+ "domain": domain,
+ "origin": row["domain"],
+ })
+ elif row["software"] in ["aoderelay", "selective-relay"]:
+ logger.debug("Checking row[domain]='%s' ...", row["domain"])
+ if row["software"] == "aoderelay":
+ tags = doc.findAll("section", {"class": "instance"})
+ else:
+ tags = doc.find("div", {"id": "instances"}).findAll("li")
+
+ logger.debug("Checking %d tags ...", len(tags))
+ for tag in tags:
+ logger.debug("tag[]='%s'", type(tag))
+
+ link = tag.find("a")
+ logger.debug("link[%s]='%s'", type(link), link)
+ if link is None:
+ logger.warning("tag='%s' has no a-tag ...", tag)
+ continue
+
+ components = urlparse(link["href"])
+ domain = components.netloc.lower()
+
+ if not domain_helper.is_wanted(domain):
+ logger.debug("domain='%s' is not wanted - SKIPPED!", domain)
+ continue
+
+ logger.debug("domain='%s' - BEFORE!", domain)
+ domain = tidyup.domain(domain)
+ logger.debug("domain='%s' - AFTER!", domain)
+
+ if domain == "":
+ logger.debug("Empty domain after tidyup.domain() from origin='%s' - SKIPPED!", row["domain"])
+ continue
+ elif domain not in peers:
+ logger.debug("Appending domain='%s' to peers list for relay='%s' ...", domain, row["domain"])
+ peers.append(domain)
+
+ if dict_helper.has_key(domains, "domain", domain):
+ logger.debug("domain='%s' already added", domain)
+ continue
+
+ logger.debug("Appending domain='%s',origin='%s',software='%s'", domain, row["domain"], row["software"])
+ domains.append({
+ "domain": domain,
+ "origin": row["domain"],
+ })
+ else:
+ logger.warning("row[domain]='%s',row[software]='%s' is not supported", row["domain"], row["software"])
+
+ logger.debug("Updating last_instance_fetch for row[domain]='%s' ...", row["domain"])
+ instances.set_last_instance_fetch(row["domain"])
+
+ logger.info("Relay '%s' has %d peer(s) registered.", row["domain"], len(peers))
+ instances.set_total_peers(row["domain"], peers)
+
+ logger.debug("Flushing data for row[domain]='%s'", row["domain"])
+ instances.update(row["domain"])
+
+ logger.info("Checking %d domains ...", len(domains))
+ for row in domains:
+ logger.debug("row[domain]='%s',row[origin]='%s'", row["domain"], row["origin"])
+ if instances.is_registered(row["domain"]):
+ logger.debug("row[domain]='%s' is already registered - SKIPPED!", row["domain"])
+ continue
+
+ logger.info("Fetching row[domain]='%s',row[origin]='%s' ...", row["domain"], row["origin"])
+ federation.fetch_instances(row["domain"], row["origin"], None, inspect.currentframe().f_code.co_name)
+
+ logger.debug("Success! - EXIT!")
+ return 0
+
def convert_idna(args: argparse.Namespace) -> int:
logger.debug("args[]='%s' - CALLED!", type(args))
logger.debug("Success! - EXIT!")
return 0
+
+def remove_invalid(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ database.cursor.execute("SELECT domain FROM instances ORDER BY domain ASC")
+ rows = database.cursor.fetchall()
+
+ logger.info("Checking %d domains ...", len(rows))
+ for row in rows:
+ logger.debug("row[domain]='%s'", row["domain"])
+ if not validators.domain(row["domain"].split("/")[0]):
+ logger.info("Invalid row[domain]='%s' found, removing ...", row["domain"])
+ database.cursor.execute("DELETE FROM blocks WHERE blocker = ? OR blocked = ?", [row["domain"], row["domain"]])
+ database.cursor.execute("DELETE FROM instances WHERE domain = ? LIMIT 1", [row["domain"]])
+
+ logger.debug("Invoking commit() ...")
+ database.connection.commit()
+
+ logger.info("Vaccum cleaning database ...")
+ database.cursor.execute("VACUUM")
+
+ logger.debug("Success! - EXIT!")
+ return 0