{
- "base_url" : "",
- "log_level" : "info",
- "port" : 8069,
- "useragent" : "Mozilla/5.0 (Windows NT 10.0; rv:102.0) Gecko/20100101 Firefox/102.0",
- "timeout" : 5,
- "bot_enabled" : false,
- "bot_instance" : "https://example.com",
- "bot_token" : "",
- "bot_visibility" : "unlisted",
- "slogan" : "### Your footer slogan ###",
- "recheck_instance": 3600,
- "recheck_block" : 3600
+ "base_url" : "",
+ "log_level" : "info",
+ "port" : 8069,
+ "useragent" : "Mozilla/5.0 (Windows NT 10.0; rv:102.0) Gecko/20100101 Firefox/102.0",
+ "connection_timeout": 2,
+ "read_timeout" : 5,
+ "bot_enabled" : false,
+ "bot_instance" : "https://example.com",
+ "bot_token" : "",
+ "bot_visibility" : "unlisted",
+ "slogan" : "### Your footer slogan ###",
+ "recheck_instance" : 3600,
+ "recheck_block" : 3600
}
peers = None
try:
- res = reqto.get(f"https://{domain}/api/v1/instance/peers", headers=headers, timeout=config["timeout"])
+ res = reqto.get(f"https://{domain}/api/v1/instance/peers", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"]))
if not res.ok or res.status_code >= 400:
print("WARNING: Cannot fetch peers:", domain)
def post_json_api(domain: str, path: str, data: str) -> list:
# NOISY-DEBUG: print("DEBUG: Sending POST to domain,path,data:", domain, path, data)
try:
- res = reqto.post(f"https://{domain}{path}", data=data, headers=headers, timeout=config["timeout"])
+ res = reqto.post(f"https://{domain}{path}", data=data, headers=headers, timeout=(config["connection_timeout"], config["read_timeout"]))
if not res.ok or res.status_code >= 400:
print("WARNING: Cannot query JSON API:", domain, path, data, res.status_code)
json = None
for request in requests:
# NOISY-DEBUG: print("DEBUG: Fetching request:", request)
- res = reqto.get(request, headers=headers, timeout=config["timeout"])
+ res = reqto.get(request, headers=headers, timeout=(config["connection_timeout"], config["read_timeout"]))
# NOISY-DEBUG: print("DEBUG: res.ok,res.json[]:", res.ok, type(res.json()))
if res.ok and res.json() is not None:
try:
doc = BeautifulSoup(
- reqto.get(f"https://{domain}/about/more", headers=headers, timeout=config["timeout"]).text,
+ reqto.get(f"https://{domain}/about/more", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"])).text,
"html.parser",
)
except:
try:
doc = BeautifulSoup(
- reqto.get(f"https://{domain}/friendica", headers=headers, timeout=config["timeout"]).text,
+ reqto.get(f"https://{domain}/friendica", headers=headers, timeout=(config["connection_timeout"], config["read_timeout"])).text,
"html.parser",
)
except:
# handling CSRF, I've saw at least one server requiring it to access the endpoint
# NOISY-DEBUG: print("DEBUG: Fetching meta:", blocker)
meta = bs4.BeautifulSoup(
- reqto.get(f"https://{blocker}/about", headers=fba.headers, timeout=fba.config["timeout"]).text,
+ reqto.get(f"https://{blocker}/about", headers=fba.headers, timeout=fba.(config["connection_timeout"], config["read_timeout"])).text,
"html.parser",
)
try:
reqheaders = fba.headers
# NOISY-DEBUG: print("DEBUG: Quering API domain_blocks:", blocker)
- blocks = reqto.get(f"https://{blocker}/api/v1/instance/domain_blocks", headers=reqheaders, timeout=fba.config["timeout"]).json()
+ blocks = reqto.get(f"https://{blocker}/api/v1/instance/domain_blocks", headers=reqheaders, timeout=fba.(config["connection_timeout"], config["read_timeout"])).json()
# NOISY-DEBUG: print("DEBUG: blocks():", len(blocks))
for block in blocks:
print("INFO: blocker:", blocker)
try:
# Blocks
- federation = reqto.get(f"https://{blocker}/api/v1/instance/peers?filter=suspended", headers=fba.headers, timeout=fba.config["timeout"]).json()
+ federation = reqto.get(f"https://{blocker}/api/v1/instance/peers?filter=suspended", headers=fba.headers, timeout=fba.(config["connection_timeout"], config["read_timeout"])).json()
if (federation == None):
print("WARNING: No valid response:", blocker);