from fba.models import blocks
from fba.models import instances
-# Timeout
-_timeout = (config.get("connection_timeout"), config.get("read_timeout"))
-
router = fastapi.FastAPI(docs_url=config.get("base_url") + "/docs", redoc_url=config.get("base_url") + "/redoc")
router.mount(
"/static",
elif amount <= 0:
raise HTTPException(status_code=500, detail="Invalid amount specified")
- response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/scoreboard.json?mode={mode}&amount={amount}", timeout=_timeout)
+ response = requests.get(
+ f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/scoreboard.json?mode={mode}&amount={amount}",
+ timeout=config.timeout
+ )
if response is None:
raise HTTPException(status_code=500, detail="Could not determine scores")
if mode == "detection_mode" and not instances.valid(value, "detection_mode"):
raise HTTPException(status_code=500, detail="Invalid detection mode provided")
- response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/list.json?mode={mode}&value={value}&amount={amount}", timeout=_timeout)
+ response = requests.get(
+ f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/list.json?mode={mode}&value={value}&amount={amount}",
+ timeout=config.timeout
+ )
domainlist = list()
if response is not None and response.ok:
elif mode in ["domain", "reverse"] and not domain_helper.is_wanted(value):
raise HTTPException(status_code=500, detail="Invalid or blocked domain specified")
- response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/top.json?mode={mode}&value={value}&amount={amount}", timeout=_timeout)
+ response = requests.get(
+ f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/top.json?mode={mode}&value={value}&amount={amount}",
+ timeout=config.timeout
+ )
found = 0
blocklist = list()
if not domain_helper.is_wanted(domain):
raise HTTPException(status_code=500, detail=f"domain='{domain}' is not wanted")
- response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/domain.json?domain={domain}", timeout=_timeout)
+ response = requests.get(
+ f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/domain.json?domain={domain}",
+ timeout=config.timeout
+ )
if not response.ok or response.status_code > 200 or response.text.strip() == "":
raise HTTPException(status_code=response.status_code, detail=response.reason)
@router.get(config.get("base_url") + "/")
def index(request: Request) -> None:
# Get info
- response = requests.get(f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/info.json", timeout=_timeout)
+ response = requests.get(
+ f"http://{config.get('host')}:{config.get('port')}{config.get('base_url')}/api/info.json",
+ timeout=config.timeout
+ )
if not response.ok:
raise HTTPException(status_code=response.status_code, detail=response.text)
from fba.networks import pleroma
# Locally "cached" values to speedup code and keep massive debug log shorter
-_timeout = (config.get("connection_timeout"), config.get("read_timeout"))
_bot_enabled = config.get("bot_enabled")
logging.basicConfig(level=logging.INFO)
raw = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))
raw = network.fetch_url(
f"https://{source_domain}/todon/domainblocks",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
raw = network.fetch_url(
f"https://{source_domain}/federation",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
sources.update(domain)
logger.info("Fetch FBA-specific RSS args.feed='%s' ...", args.feed)
- response = network.fetch_url(args.feed, network.web_headers, _timeout)
+ response = network.fetch_url(args.feed, network.web_headers, config.timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
domains = list()
logger.info("Fetching ATOM feed='%s' from FBA bot account ...", feed)
- response = network.fetch_url(feed, network.web_headers, _timeout)
+ response = network.fetch_url(feed, network.web_headers, config.timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.info("Checking %d text file(s) ...", len(blocklists.txt_files))
for row in blocklists.txt_files:
logger.debug("Fetching row[url]='%s' ...", row["url"])
- response = network.fetch_url(row["url"], network.web_headers, _timeout)
+ response = network.fetch_url(row["url"], network.web_headers, config.timeout)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.status_code == 200 and response.text != "":
response = network.fetch_url(
f"https://{source_domain}",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
raw = network.fetch_url(
f"https://{source_domain}/api/v1/instances",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
raw = network.fetch_url(
f"https://{source_domain}/instances.json",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
response = reqto.get(
url,
headers=network.web_headers,
- timeout=_timeout,
+ timeout=config.timeout,
allow_redirects=False
)
logger.info("Fetching row[nodeinfo_url]='%s' from relay row[domain]='%s',row[software]='%s' ...", row["nodeinfo_url"], row["domain"], row["software"])
raw = network.fetch_api_url(
row["nodeinfo_url"],
- timeout=_timeout
+ timeout=config.timeout
)
logger.debug("raw[%s]()=%d", type(raw), len(raw))
raw = network.fetch_url(
f"https://{row['domain']}",
network.web_headers,
- timeout=_timeout
+ timeout=config.timeout
).text
logger.debug("raw[%s]()=%d", type(raw), len(raw))
# config.get("foo") instead
_config = {}
+# Locally "cached" values to speedup code and keep massive debug log shorter
+timeout = ()
+nodeinfo_timeout = ()
+
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
#logger.setLevel(logging.DEBUG)
_config["max_crawl_depth"] = min(_config["max_crawl_depth"], (sys.getrecursionlimit() - 50))
logger.debug("LOADED!")
+ # "Cached" values
+ timeout = (_config["connection_timeout"], _config["read_timeout"])
+ nodeinfo_timeout = (_config["nodeinfo_connection_timeout"], _config["nodeinfo_read_timeout"])
+
def get(key: str) -> any:
logger.debug("key[%s]='%s' - CALLED!", type(key), key)
response = network.fetch_url(
url,
network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=config.timeout
)
logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
domain,
path,
headers=web_headers,
+ timeout=config.timeout,
allow_redirects=allow_redirects
)
"http://nodeinfo.diaspora.software/ns/schema/1.0",
]
-# Locally "cached" values to speedup code and keep massive debug log shorter
-_timeout = (
- config.get("connection_timeout"),
- config.get("read_timeout")
-)
-_nodeinfo_timeout = (
- config.get("nodeinfo_connection_timeout"),
- config.get("nodeinfo_read_timeout")
-)
-
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
#logger.setLevel(logging.DEBUG)
domain,
request,
headers=headers,
- timeout=_nodeinfo_timeout
+ timeout=config.nodeinfo_timeout
)
logger.debug("data(%d)[]='%s'", len(data), type(data))
domain,
path,
headers=headers,
- timeout=_nodeinfo_timeout
+ timeout=config.nodeinfo_timeout
)
logger.debug("data(%d)[]='%s'", len(data), type(data))
logger.debug("Fetching nodeinfo from url='%s' ...", url)
data = network.fetch_api_url(
url,
- timeout=_timeout
+ timeout=config.timeout
)
logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
if "error_message" not in data and "json" in data:
logger.debug("Auto-discovery successful: domain='%s' - BREAK!", domain)
break
+
elif "server" in infos:
logger.debug("Found infos[server][software]='%s'", infos["server"]["software"])
instances.set_detection_mode(domain, "AUTO_DISCOVERY")
domain,
"/api/v3/site",
headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
+ timeout=config.timeout
)
logger.debug("data[]='%s'", type(data))