from urllib.parse import urlparse
import bs4
+import requests
import validators
from fba import csrf
-from fba import utils
from fba.helpers import config
from fba.helpers import cookies
from fba.helpers import version
from fba.http import network
+from fba.http import nodeinfo
from fba.models import instances
from fba.networks import misskey
from fba.networks import peertube
+# Depth counter, being raised and lowered
+_DEPTH = 0
+
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
-# "rel" identifiers (no real URLs)
-nodeinfo_identifier = [
- "https://nodeinfo.diaspora.software/ns/schema/2.1",
- "https://nodeinfo.diaspora.software/ns/schema/2.0",
- "https://nodeinfo.diaspora.software/ns/schema/1.1",
- "https://nodeinfo.diaspora.software/ns/schema/1.0",
- "http://nodeinfo.diaspora.software/ns/schema/2.1",
- "http://nodeinfo.diaspora.software/ns/schema/2.0",
- "http://nodeinfo.diaspora.software/ns/schema/1.1",
- "http://nodeinfo.diaspora.software/ns/schema/1.0",
-]
-
def fetch_instances(domain: str, origin: str, software: str, command: str, path: str = None):
- logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s' - CALLED!", domain, origin, software, command, path)
+ global _DEPTH
+ logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s',_DEPTH=%d - CALLED!", domain, origin, software, command, path, _DEPTH)
domain_helper.raise_on(domain)
if not isinstance(origin, str) and origin is not None:
- raise ValueError(f"Parameter origin[]='{type(origin)}' is not 'str'")
+ raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'")
elif not isinstance(command, str):
- raise ValueError(f"Parameter command[]='{type(command)}' is not 'str'")
+ raise ValueError(f"Parameter command[]='{type(command)}' is not of type 'str'")
elif command == "":
raise ValueError("Parameter 'command' is empty")
- elif software is None:
+ elif command in ["fetch_blocks", "fetch_cs", "fetch_bkali", "fetch_relays", "fetch_fedipact", "fetch_joinmobilizon", "fetch_joinmisskey", "fetch_joinfediverse"] and origin is None:
+ raise ValueError(f"Parameter command='{command}' but origin is None, please fix invoking this function.")
+ elif not isinstance(path, str) and path is not None:
+ raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
+ elif _DEPTH > 0 and instances.is_recent(domain, "last_instance_fetch"):
+ raise ValueError(f"domain='{domain}' has recently been fetched but function was invoked")
+ elif software is None and not instances.is_recent(domain, "last_nodeinfo"):
try:
logger.debug("Software for domain='%s' is not set, determining ...", domain)
software = determine_software(domain, path)
instances.set_last_error(domain, exception)
logger.debug("Determined software='%s' for domain='%s'", software, domain)
+ elif software is None:
+ logger.debug("domain='%s' has unknown software or nodeinfo has recently being fetched", domain)
elif not isinstance(software, str):
- raise ValueError(f"Parameter software[]='{type(software)}' is not 'str'")
+ raise ValueError(f"Parameter software[]='{type(software)}' is not of type 'str'")
+
+ # Increase depth
+ _DEPTH = _DEPTH + 1
+ logger.debug("Checking if domain='%s' is registered ...", domain)
if not instances.is_registered(domain):
logger.debug("Adding new domain='%s',origin='%s',command='%s',path='%s',software='%s'", domain, origin, command, path, software)
instances.add(domain, origin, command, path, software)
instances.set_last_instance_fetch(domain)
peerlist = list()
- try:
- logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin)
- peerlist = fetch_peers(domain, software, origin)
- except network.exceptions as exception:
- logger.warning("Cannot fetch peers from domain='%s': '%s'", domain, type(exception))
+ logger.debug("software='%s'", software)
+ if software is not None:
+ try:
+ logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin)
+ peerlist = fetch_peers(domain, software, origin)
+ except network.exceptions as exception:
+ logger.warning("Cannot fetch peers from domain='%s',software='%s': '%s'", domain, software, type(exception))
logger.debug("peerlist[]='%s'", type(peerlist))
if isinstance(peerlist, list):
logger.debug("peerlist[]='%s'", type(peerlist))
if peerlist is None or len(peerlist) == 0:
- logger.warning("Cannot fetch peers: domain='%s'", domain)
+ logger.warning("Cannot fetch peers: domain='%s',software='%s'", domain, software)
+
+ if instances.has_pending(domain):
+ logger.debug("Flushing updates for domain='%s' ...", domain)
+ instances.update_data(domain)
logger.debug("Invoking cookies.clear(%s) ...", domain)
cookies.clear(domain)
+ _DEPTH = _DEPTH - 1
logger.debug("EXIT!")
return
- logger.info("Checking %d instance(s) from domain='%s',software='%s' ...", len(peerlist), domain, software)
+ logger.info("Checking %d instance(s) from domain='%s',software='%s',depth=%d ...", len(peerlist), domain, software, _DEPTH)
for instance in peerlist:
logger.debug("instance='%s'", instance)
- if instance is None:
- # Skip "None" types as tidup.domain() cannot parse them
+ if instance is None or instance == "":
+ logger.debug("instance[%s]='%s' is either None or empty - SKIPPED!", type(instance), instance)
continue
logger.debug("instance='%s' - BEFORE!", instance)
if instance == "":
logger.warning("Empty instance after tidyup.domain(), domain='%s'", domain)
continue
- elif not utils.is_domain_wanted(instance):
+ elif ".." in instance:
+ logger.warning("instance='%s' contains double-dot, removing ...", instance)
+ instance = instance.replace("..", ".")
+
+ logger.debug("instance='%s' - BEFORE!", instance)
+ instance = instance.encode("idna").decode("utf-8")
+ logger.debug("instance='%s' - AFTER!", instance)
+
+ if not domain_helper.is_wanted(instance):
logger.debug("instance='%s' is not wanted - SKIPPED!", instance)
continue
elif instance.find("/profile/") > 0 or instance.find("/users/") > 0 or (instances.is_registered(instance.split("/")[0]) and instance.find("/c/") > 0):
logger.debug("instance='%s' is a link to a tag - SKIPPED!", instance)
continue
elif not instances.is_registered(instance):
- logger.debug("Adding new instance='%s',domain='%s',command='%s'", instance, domain, command)
- instances.add(instance, domain, command)
+ logger.debug("Checking if domain='%s' has pending updates ...", domain)
+ if instances.has_pending(domain):
+ logger.debug("Flushing updates for domain='%s' ...", domain)
+ instances.update_data(domain)
+
+ logger.debug("instance='%s',origin='%s',_DEPTH=%d reached!", instance, origin, _DEPTH)
+ if _DEPTH <= config.get("max_crawl_depth") and len(peerlist) >= config.get("min_peers_length"):
+ logger.debug("Fetching instance='%s',origin='%s',command='%s',path='%s',_DEPTH=%d ...", instance, domain, command, path, _DEPTH)
+ fetch_instances(instance, domain, None, command, path)
+ else:
+ logger.debug("Adding instance='%s',domain='%s',command='%s',_DEPTH=%d ...", instance, domain, command, _DEPTH)
+ instances.add(instance, domain, command)
logger.debug("Invoking cookies.clear(%s) ...", domain)
cookies.clear(domain)
logger.debug("Flushing updates for domain='%s' ...", domain)
instances.update_data(domain)
+ _DEPTH = _DEPTH - 1
logger.debug("EXIT!")
def fetch_peers(domain: str, software: str, origin: str) -> list:
domain_helper.raise_on(domain)
if not isinstance(software, str) and software is not None:
- raise ValueError(f"software[]='{type(software)}' is not 'str'")
+ raise ValueError(f"Parameter software[]='{type(software)}' is not of type 'str'")
+ elif not isinstance(origin, str) and origin is not None:
+ raise ValueError(f"Parameter origin[]='{type(origin)}' is not of type 'str'")
+ elif isinstance(origin, str) and origin == "":
+ raise ValueError("Parameter 'origin' is empty")
if software == "misskey":
logger.debug("Invoking misskey.fetch_peers(%s) ...", domain)
logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s)", type(exception), __name__)
instances.set_last_error(domain, exception)
+
+ logger.debug("Returning empty list ... - EXIT!")
return list()
paths = [
break
if not isinstance(peers, list):
- logger.warning("peers[]='%s' is not 'list', maybe bad API response?", type(peers))
+ logger.warning("peers[]='%s' is not of type 'list', maybe bad API response?", type(peers))
peers = list()
logger.debug("Invoking instances.set_total_peers(%s,%d) ...", domain, len(peers))
logger.debug("peers()=%d - EXIT!", len(peers))
return peers
-def fetch_nodeinfo(domain: str, path: str = None) -> dict:
- logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
- domain_helper.raise_on(domain)
-
- if not isinstance(path, str) and path is not None:
- raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'")
-
- logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
- nodeinfo = fetch_wellknown_nodeinfo(domain)
-
- logger.debug("nodeinfo[%s](%d='%s'", type(nodeinfo), len(nodeinfo), nodeinfo)
- if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0:
- logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json']))
- return nodeinfo["json"]
-
- # No CSRF by default, you don't have to add network.api_headers by yourself here
- headers = tuple()
- data = dict()
-
- try:
- logger.debug("Checking CSRF for domain='%s'", domain)
- headers = csrf.determine(domain, dict())
- except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (nodeinfo,%s) - EXIT!", type(exception), __name__)
- instances.set_last_error(domain, exception)
- return {
- "status_code" : 500,
- "error_message": f"exception[{type(exception)}]='{str(exception)}'",
- "exception" : exception,
- }
-
- request_paths = [
- "/nodeinfo/2.1.json",
- "/nodeinfo/2.1",
- "/nodeinfo/2.0.json",
- "/nodeinfo/2.0",
- "/nodeinfo/1.0.json",
- "/nodeinfo/1.0",
- "/api/v1/instance",
- ]
-
- for request in request_paths:
- logger.debug("request='%s'", request)
- http_url = f"http://{domain}{path}"
- https_url = f"https://{domain}{path}"
-
- logger.debug("path[%s]='%s',request='%s',http_url='%s',https_url='%s'", type(path), path, request, http_url, https_url)
- if path is None or path in [request, http_url, https_url]:
- logger.debug("Fetching request='%s' from domain='%s' ...", request, domain)
- if path in [http_url, https_url]:
- logger.debug("domain='%s',path='%s' has protocol in path, splitting ...", domain, path)
- components = urlparse(path)
- path = components.path
-
- data = network.get_json_api(
- domain,
- request,
- headers,
- (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
- )
-
- logger.debug("data[]='%s'", type(data))
- if "error_message" not in data and "json" in data:
- logger.debug("Success: request='%s'", request)
- instances.set_detection_mode(domain, "STATIC_CHECK")
- instances.set_nodeinfo_url(domain, request)
- break
-
- logger.warning("Failed fetching nodeinfo from domain='%s',status_code='%s',error_message='%s'", domain, data['status_code'], data['error_message'])
-
- logger.debug("data()=%d - EXIT!", len(data))
- return data
-
-def fetch_wellknown_nodeinfo(domain: str) -> dict:
- logger.debug("domain='%s' - CALLED!", domain)
- domain_helper.raise_on(domain)
-
- # No CSRF by default, you don't have to add network.api_headers by yourself here
- headers = tuple()
-
- try:
- logger.debug("Checking CSRF for domain='%s'", domain)
- headers = csrf.determine(domain, dict())
- except network.exceptions as exception:
- logger.warning("Exception '%s' during checking CSRF (fetch_wellknown_nodeinfo,%s) - EXIT!", type(exception), __name__)
- instances.set_last_error(domain, exception)
- return {
- "status_code" : 500,
- "error_message": type(exception),
- "exception" : exception,
- }
-
- logger.debug("Fetching .well-known info for domain='%s'", domain)
- data = network.get_json_api(
- domain,
- "/.well-known/nodeinfo",
- headers,
- (config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
- )
-
- if "error_message" not in data:
- nodeinfo = data["json"]
-
- logger.debug("Marking domain='%s' as successfully handled ...", domain)
- instances.set_success(domain)
-
- logger.debug("Found entries: nodeinfo()=%d,domain='%s'", len(nodeinfo), domain)
- if "links" in nodeinfo:
- logger.debug("Found nodeinfo[links]()=%d record(s)", len(nodeinfo["links"]))
- for link in nodeinfo["links"]:
- logger.debug("link[%s]='%s'", type(link), link)
- if not isinstance(link, dict) or not "rel" in link:
- logger.warning("link[]='%s' is not 'dict' or no element 'rel' found", type(link))
- elif link["rel"] in nodeinfo_identifier:
- # Default is that 'href' has a complete URL, but some hosts don't send that
- url = link["href"]
- components = urlparse(link["href"])
-
- logger.debug("components[%s]='%s'", type(components), components)
- if components.scheme == "" and components.netloc == "":
- logger.debug("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
- url = f"https://{domain}{url}"
- components = urlparse(url)
-
- if not utils.is_domain_wanted(components.netloc):
- logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
- continue
-
- logger.debug("Fetching nodeinfo from url='%s' ...", url)
- data = network.fetch_api_url(
- url,
- (config.get("connection_timeout"), config.get("read_timeout"))
- )
-
- logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
- if "error_message" not in data and "json" in data:
- logger.debug("Found JSON data()=%d", len(data))
- instances.set_detection_mode(domain, "AUTO_DISCOVERY")
- instances.set_nodeinfo_url(domain, link["href"])
-
- logger.debug("Marking domain='%s' as successfully handled ...", domain)
- instances.set_success(domain)
- break
- else:
- logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
- instances.set_last_error(domain, data)
- else:
- logger.warning("Unknown 'rel' value: domain='%s',link[rel]='%s'", domain, link["rel"])
- else:
- logger.warning("nodeinfo does not contain 'links': domain='%s'", domain)
-
- logger.debug("Returning data[]='%s' - EXIT!", type(data))
- return data
-
def fetch_generator_from_path(domain: str, path: str = "/") -> str:
- logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path)
+ logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
domain_helper.raise_on(domain)
if not isinstance(path, str):
- raise ValueError(f"path[]='{type(path)}' is not 'str'")
+ raise ValueError(f"path[]='{type(path)}' is not of type 'str'")
elif path == "":
raise ValueError("Parameter 'path' is empty")
software = None
logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
- response = network.fetch_response(domain, path, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ response = network.fetch_response(
+ domain,
+ path,
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout")),
+ allow_redirects=True
+ )
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text.find("<html") > 0:
+ if ((response.ok and response.status_code < 300) or response.status_code == 410) and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
logger.debug("doc[]='%s'", type(doc))
+ platform = doc.find("meta", {"property": "og:platform"})
generator = doc.find("meta", {"name" : "generator"})
site_name = doc.find("meta", {"property": "og:site_name"})
+ app_name = doc.find("meta", {"name" : "application-name"})
- logger.debug("generator[]='%s',site_name[]='%s'", type(generator), type(site_name))
- if isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str):
+ logger.debug("generator[]='%s',site_name[]='%s',platform[]='%s',app_name[]='%s'", type(generator), type(site_name), type(platform), type(app_name))
+ if isinstance(platform, bs4.element.Tag) and isinstance(platform.get("content"), str):
+ logger.debug("Found property=og:platform, domain='%s'", domain)
+ software = tidyup.domain(platform.get("content"))
+
+ logger.debug("software[%s]='%s'", type(software), software)
+ if software is not None and software != "":
+ logger.debug("domain='%s' has og:platform='%s' - Setting detection_mode=PLATFORM ...", domain, software)
+ instances.set_detection_mode(domain, "PLATFORM")
+ elif isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str):
logger.debug("Found generator meta tag: domain='%s'", domain)
software = tidyup.domain(generator.get("content"))
logger.debug("software[%s]='%s'", type(software), software)
if software is not None and software != "":
- logger.info("domain='%s' is generated by '%s'", domain, software)
+ logger.info("domain='%s' is generated by software='%s' - Setting detection_mode=GENERATOR ...", domain, software)
instances.set_detection_mode(domain, "GENERATOR")
+ elif isinstance(app_name, bs4.element.Tag) and isinstance(app_name.get("content"), str):
+ logger.debug("Found property=og:app_name, domain='%s'", domain)
+ software = tidyup.domain(app_name.get("content"))
+
+ logger.debug("software[%s]='%s'", type(software), software)
+ if software is not None and software != "":
+ logger.debug("domain='%s' has application-name='%s' - Setting detection_mode=app_name ...", domain, software)
+ instances.set_detection_mode(domain, "APP_NAME")
elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str):
logger.debug("Found property=og:site_name, domain='%s'", domain)
software = tidyup.domain(site_name.get("content"))
logger.debug("software[%s]='%s'", type(software), software)
if software is not None and software != "":
- logger.info("domain='%s' has og:site_name='%s'", domain, software)
+ logger.debug("domain='%s' has og:site_name='%s' - Setting detection_mode=SITE_NAME ...", domain, software)
instances.set_detection_mode(domain, "SITE_NAME")
+ elif not domain_helper.is_in_url(domain, response.url):
+ logger.warning("domain='%s' doesn't match response.url='%s', maybe redirect to other domain?", domain, response.url)
+
+ components = urlparse(response.url)
+
+ logger.debug("components[]='%s'", type(components))
+ if not instances.is_registered(components.netloc):
+ logger.info("components.netloc='%s' is not registered, adding ...", components.netloc)
+ fetch_instances(components.netloc, domain, None, "fetch_generator")
+
+ message = f"Redirect from domain='{domain}' to response.url='{response.url}'"
+ instances.set_last_error(domain, message)
+ instances.set_software(domain, None)
+ instances.set_detection_mode(domain, None)
+ instances.set_nodeinfo_url(domain, None)
+
+ raise requests.exceptions.TooManyRedirects(message)
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and software == "":
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and "powered by " in software:
logger.debug("software='%s' has 'powered by' in it", software)
- software = version.remove(version.strip_powered_by(software))
+ software = version.remove(software_helper.strip_powered_by(software))
elif isinstance(software, str) and " hosted on " in software:
logger.debug("software='%s' has 'hosted on' in it", software)
- software = version.remove(version.strip_hosted_on(software))
+ software = version.remove(software_helper.strip_hosted_on(software))
elif isinstance(software, str) and " by " in software:
logger.debug("software='%s' has ' by ' in it", software)
- software = version.strip_until(software, " by ")
+ software = software_helper.strip_until(software, " by ")
elif isinstance(software, str) and " see " in software:
logger.debug("software='%s' has ' see ' in it", software)
- software = version.strip_until(software, " see ")
+ software = software_helper.strip_until(software, " see ")
logger.debug("software='%s' - EXIT!", software)
return software
def determine_software(domain: str, path: str = None) -> str:
- logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path)
+ logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
domain_helper.raise_on(domain)
if not isinstance(path, str) and path is not None:
- raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'")
+ raise ValueError(f"Parameter path[]='{type(path)}' is not of type 'str'")
logger.debug("Determining software for domain='%s',path='%s'", domain, path)
software = None
logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
- data = fetch_nodeinfo(domain, path)
+ data = nodeinfo.fetch_nodeinfo(domain, path)
- logger.debug("data[]='%s'", type(data))
+ logger.debug("data[%s]='%s'", type(data), data)
if "exception" in data:
# Continue raising it
logger.debug("data()=%d contains exception='%s' - raising ...", len(data), type(data["exception"]))
logger.debug("domain='%s',path='%s',data[json] found ...", domain, path)
data = data["json"]
else:
- logger.debug("JSON response from domain='%s' does not include [software][name], fetching / ...", domain)
+ logger.debug("Auto-detection for domain='%s' was failing, fetching / ...", domain)
software = fetch_generator_from_path(domain)
logger.debug("Generator for domain='%s' is: '%s'", domain, software)
if "status" in data and data["status"] == "error" and "message" in data:
- logger.warning("JSON response is an error: '%s'", data["message"])
+ logger.warning("JSON response is an error: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"])
instances.set_last_error(domain, data["message"])
instances.set_detection_mode(domain, None)
instances.set_nodeinfo_url(domain, None)
software = data["software"]["name"]
logger.debug("software[%s]='%s' - FOUND!", type(software), software)
elif "message" in data:
- logger.warning("JSON response contains only a message: '%s'", data["message"])
+ logger.warning("JSON response contains only a message: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"])
instances.set_last_error(domain, data["message"])
instances.set_detection_mode(domain, None)
instances.set_nodeinfo_url(domain, None)
+
+ logger.debug("Invoking fetch_generator_from_path(%s) ...", domain)
software = fetch_generator_from_path(domain)
logger.debug("Generator for domain='%s' is: '%s'", domain, software)
+ elif "server" in data and "software" in data["server"]:
+ logger.debug("Found data[server][software]='%s' for domain='%s'", data["server"]["software"].lower(), domain)
+ software = data["server"]["software"].lower()
+ logger.debug("Detected software for domain='%s' is: '%s'", domain, software)
elif "software" not in data or "name" not in data["software"]:
- logger.debug("JSON response from domain='%s' does not include [software][name], fetching / ...", domain)
+ logger.debug("JSON response from domain='%s' does not include [software][name] - Resetting detection_mode,nodeinfo_url ...", domain)
instances.set_detection_mode(domain, None)
instances.set_nodeinfo_url(domain, None)
+
+ logger.debug("Invoking fetch_generator_from_path(%s) ...", domain)
software = fetch_generator_from_path(domain)
logger.debug("Generator for domain='%s' is: '%s'", domain, software)
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and "powered by" in software:
logger.debug("software='%s' has 'powered by' in it", software)
- software = version.remove(version.strip_powered_by(software))
+ software = version.remove(software_helper.strip_powered_by(software))
+
+ software = software.strip()
logger.debug("software='%s' - EXIT!", software)
return software
logger.debug("domain='%s',reason='%s'", domain, reason)
- if not utils.is_domain_wanted(domain):
+ if not domain_helper.is_wanted(domain):
logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
continue
elif domain == "gab.com/.ai, develop.gab.com":
def add_peers(rows: dict) -> list:
logger.debug("rows[]='%s' - CALLED!", type(rows))
if not isinstance(rows, dict):
- raise ValueError(f"Parameter rows[]='{type(rows)}' is not 'dict'")
+ raise ValueError(f"Parameter rows[]='{type(rows)}' is not of type 'dict'")
peers = list()
for key in ["linked", "allowed", "blocked"]:
raise ValueError(f"peer[]='{type(peer)}' is not supported,key='{key}'")
logger.debug("peer[%s]='%s' - AFTER!", type(peer), peer)
- if not utils.is_domain_wanted(peer):
+ if not domain_helper.is_wanted(peer):
logger.debug("peer='%s' is not wanted - SKIPPED!", peer)
continue