types = list()
if args.software is None:
logger.info("Fetching software list ...")
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
}
logger.debug("Fetching domainblocks from source_domain='%s'", source_domain)
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{source_domain}/todon/domainblocks",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
sources.update(source_domain)
logger.info("Fetching federation.md from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{source_domain}/chaossocial/meta/master/federation.md",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
sources.update(domain)
logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
- response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
domains = list()
logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
- response = utils.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
for row in blocklists.txt_files:
logger.debug("Fetching row[url]='%s' ...", row["url"])
- response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and response.text != "":
sources.update(source_domain)
logger.info("Fetching / from source_domain='%s' ...", source_domain)
- response = utils.fetch_url(
+ response = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
sources.update(source_domain)
logger.info("Fetching instances from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{source_domain}/api/v1/instances",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
sources.update(source_domain)
logger.info("Fetching instances.json from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{source_domain}/instances.json",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))
continue
else:
logger.info("Fetching / from relay row[domain]='%s',row[software]='%s' ...", row["domain"], row["software"])
- raw = utils.fetch_url(
+ raw = network.fetch_url(
f"https://{row['domain']}",
network.web_headers,
(config.get("connection_timeout"), config.get("read_timeout"))