import atoma
import bs4
import markdown
-import reqto
import validators
from fba import database
logger.info("Fetching software list ...")
raw = network.fetch_url(
f"https://{source_domain}",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))
logger.debug("Fetching domainblocks from source_domain='%s'", source_domain)
raw = network.fetch_url(
f"https://{source_domain}/todon/domainblocks",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
logger.info("Fetching federation.md from source_domain='%s' ...", source_domain)
raw = network.fetch_url(
f"https://{source_domain}/federation",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
sources.update(domain)
logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
- response = network.fetch_url(args.feed, network.web_headers, config.timeout)
+ response = network.fetch_url(
+ args.feed,
+ headers=network.web_headers,
+ timeout=config.timeout
+ )
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
domains = []
logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
- response = network.fetch_url(feed, network.web_headers, config.timeout)
+ response = network.fetch_url(
+ feed,
+ headers=network.web_headers,
+ timeout=config.timeout
+ )
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
for entry in atom.entries:
logger.debug("entry[]='%s'", type(entry))
doc = bs4.BeautifulSoup(entry.content.value, "html.parser")
+
logger.debug("doc[]='%s'", type(doc))
elements = doc.findAll("a")
logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
for row in blocklists.txt_files:
logger.debug("Fetching row[url]='%s' ...", row["url"])
- response = network.fetch_url(row["url"], network.web_headers, config.timeout)
+ response = network.fetch_url(
+ row["url"],
+ headers=network.web_headers,
+ timeout=config.timeout
+ )
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and response.text != "":
logger.info("Fetching / from source_domain='%s' ...", source_domain)
response = network.fetch_url(
f"https://{source_domain}",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
)
logger.info("Fetching instances from source_domain='%s' ...", source_domain)
raw = network.fetch_url(
f"https://{source_domain}/api/v1/instances",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
logger.info("Fetching instances.json from source_domain='%s' ...", source_domain)
raw = network.fetch_url(
f"https://{source_domain}/instances.json",
- network.web_headers,
+ headers=network.web_headers,
timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
url = f"http://{source_domain}/instance/csv?software={args.software}&onion=not"
logger.info("Fetching url='%s' ...", url)
- response = reqto.get(
+ response = network.fetch_url(
url,
headers=network.web_headers,
timeout=config.timeout,
}
# HTTP headers for API requests
-api_headers = {
+_api_headers = {
"User-Agent" : config.get("useragent"),
"Content-Type": "application/json",
}
response = reqto.post(
f"https://{domain}{path}",
data=data,
- headers={**api_headers, **headers},
+ headers={**_api_headers, **headers},
timeout=config.timeout,
cookies=cookies.get_all(domain),
allow_redirects=False
try:
logger.debug("Fetching url='%s' ...", url)
- response = fetch_url(url, api_headers, timeout)
+ response = fetch_url(url, _api_headers, timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.reason='%s'", response.ok, response.status_code, response.reason)
if response.ok and response.status_code == 200:
try:
logger.debug("Sending GET to domain='%s',path='%s',timeout(%d)='%s'", domain, path, len(timeout), timeout)
- response = _fetch_response(domain, path, {**api_headers, **headers}, timeout)
+ response = _fetch_response(domain, path, {**_api_headers, **headers}, timeout)
except exceptions as exception:
logger.debug("Fetching path='%s' from domain='%s' failed. exception[%s]='%s'", path, domain, type(exception), str(exception))
json_reply["status_code"] = 999
"visibility" : config.get("bot_visibility"),
"content_type": "text/plain"
},
- headers={**api_headers, **{"Authorization": "Bearer " + config.get("bot_token")}},
+ headers={**_api_headers, **{"Authorization": "Bearer " + config.get("bot_token")}},
timeout=config.timeout,
allow_redirects=False
)