-from reqto import get
-from reqto import post
from bs4 import BeautifulSoup
-from reqto import get
from hashlib import sha256
+import reqto
import re
import sqlite3
import json
def get_peers(domain: str) -> str:
try:
- res = get(f"https://{domain}/api/v1/instance/peers", headers=headers, timeout=5)
+ res = reqto.get(f"https://{domain}/api/v1/instance/peers", headers=headers, timeout=5)
return res.json()
except:
print("WARNING: Cannot fetch peers:", domain)
def get_type(instdomain: str) -> str:
try:
- res = get(f"https://{instdomain}/nodeinfo/2.1.json", headers=headers, timeout=5)
+ res = reqto.get(f"https://{instdomain}/nodeinfo/2.1.json", headers=headers, timeout=5)
if res.status_code == 404:
- res = get(f"https://{instdomain}/nodeinfo/2.0", headers=headers, timeout=5)
+ res = reqto.get(f"https://{instdomain}/nodeinfo/2.0", headers=headers, timeout=5)
if res.status_code == 404:
- res = get(f"https://{instdomain}/nodeinfo/2.0.json", headers=headers, timeout=5)
+ res = reqto.get(f"https://{instdomain}/nodeinfo/2.0.json", headers=headers, timeout=5)
if res.ok and "text/html" in res.headers["content-type"]:
- res = get(f"https://{instdomain}/nodeinfo/2.1", headers=headers, timeout=5)
+ res = reqto.get(f"https://{instdomain}/nodeinfo/2.1", headers=headers, timeout=5)
if res.ok:
if res.json()["software"]["name"] in ["akkoma", "rebased"]:
return "pleroma"
else:
return res.json()["software"]["name"]
elif res.status_code == 404:
- res = get(f"https://{instdomain}/api/v1/instance", headers=headers, timeout=5)
+ res = reqto.get(f"https://{instdomain}/api/v1/instance", headers=headers, timeout=5)
if res.ok:
return "mastodon"
except:
message = message + "(the list has been truncated to the first 20 entries)"
botheaders = {**headers, **{"Authorization": "Bearer " + config["bot_token"]}}
- req = post(f"{config['bot_instance']}/api/v1/statuses",
+ req = reqto.post(f"{config['bot_instance']}/api/v1/statuses",
data={"status":message, "visibility":config['bot_visibility'], "content_type":"text/plain"},
headers=botheaders, timeout=10).json()
return True
try:
doc = BeautifulSoup(
- get(f"https://{domain}/about/more", headers=headers, timeout=5).text,
+ reqto.get(f"https://{domain}/about/more", headers=headers, timeout=5).text,
"html.parser",
)
except:
try:
doc = BeautifulSoup(
- get(f"https://{domain}/friendica", headers=headers, timeout=5).text,
+ reqto.get(f"https://{domain}/friendica", headers=headers, timeout=5).text,
"html.parser",
)
except:
# iterating through all "suspended" (follow-only in its terminology) instances page-by-page, since that troonware doesn't support sending them all at once
try:
if counter == 0:
- doc = post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"suspended":True,"limit":step}), headers=headers, timeout=5).json()
+ doc = reqto.post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"suspended":True,"limit":step}), headers=headers, timeout=5).json()
if doc == []: raise
else:
- doc = post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"suspended":True,"limit":step,"offset":counter-1}), headers=headers, timeout=5).json()
+ doc = reqto.post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"suspended":True,"limit":step,"offset":counter-1}), headers=headers, timeout=5).json()
if doc == []: raise
for instance in doc:
# just in case
# same shit, different asshole ("blocked" aka full suspend)
try:
if counter == 0:
- doc = post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"blocked":True,"limit":step}), headers=headers, timeout=5).json()
+ doc = reqto.post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"blocked":True,"limit":step}), headers=headers, timeout=5).json()
if doc == []: raise
else:
- doc = post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"blocked":True,"limit":step,"offset":counter-1}), headers=headers, timeout=5).json()
+ doc = reqto.post(f"https://{domain}/api/federation/instances", data=json.dumps({"sort":"+caughtAt","host":None,"blocked":True,"limit":step,"offset":counter-1}), headers=headers, timeout=5).json()
if doc == []: raise
for instance in doc:
if instance["isBlocked"]:
+import reqto
import time
import bs4
import fba
try:
# Blocks
federation = reqto.get(
- f"https://{blocker}/nodeinfo/2.1.json", headers=headers, timeout=5
+ f"https://{blocker}/nodeinfo/2.1.json", headers=fba.headers, timeout=5
).json()["metadata"]["federation"]
if "mrf_simple" in federation:
for block_level, blocks in (
blocked = searchres[0]
fba.c.execute(
- "SELECT domain FROM instances WHERE domain = ?", (blocked)
+ "SELECT domain FROM instances WHERE domain = ?", (blocked,)
)
if fba.c.fetchone() == None:
# handling CSRF, I've saw at least one server requiring it to access the endpoint
meta = bs4.BeautifulSoup(
- reqto.get(f"https://{blocker}/about", headers=headers, timeout=5).text,
+ reqto.get(f"https://{blocker}/about", headers=fba.headers, timeout=5).text,
"html.parser",
)
try:
csrf = meta.find("meta", attrs={"name": "csrf-token"})["content"]
- reqheaders = {**headers, **{"x-csrf-token": csrf}}
+ reqfba.headers = {**fba.headers, **{"x-csrf-token": csrf}}
except:
- reqheaders = headers
+ reqfba.headers = fba.headers
blocks = reqto.get(
- f"https://{blocker}/api/v1/instance/domain_blocks", headers=reqheaders, timeout=5
+ f"https://{blocker}/api/v1/instance/domain_blocks", headers=reqfba.headers, timeout=5
).json()
print("DEBUG: blocks():", len(blocks))
try:
# Blocks
federation = reqto.get(
- f"https://{blocker}/api/v1/instance/peers?filter=suspended", headers=headers, timeout=5
+ f"https://{blocker}/api/v1/instance/peers?filter=suspended", headers=fba.headers, timeout=5
).json()
if (federation == None):