import validators
from fba import csrf
+from fba import utils
-from fba.helpers import blacklist
from fba.helpers import config
+from fba.helpers import cookies
+from fba.helpers import domain as domain_helper
+from fba.helpers import software as software_helper
from fba.helpers import tidyup
from fba.helpers import version
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
-# "rel" identifiers (no real URLs)
-nodeinfo_identifier = [
- "https://nodeinfo.diaspora.software/ns/schema/2.1",
- "https://nodeinfo.diaspora.software/ns/schema/2.0",
- "https://nodeinfo.diaspora.software/ns/schema/1.1",
- "https://nodeinfo.diaspora.software/ns/schema/1.0",
- "http://nodeinfo.diaspora.software/ns/schema/2.1",
- "http://nodeinfo.diaspora.software/ns/schema/2.0",
- "http://nodeinfo.diaspora.software/ns/schema/1.1",
- "http://nodeinfo.diaspora.software/ns/schema/1.0",
-]
-
def fetch_instances(domain: str, origin: str, software: str, command: str, path: str = None):
- logger.debug(f"domain='{domain}',origin='{origin}',software='{software}',path='{path}' - CALLED!")
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
- elif not isinstance(origin, str) and origin is not None:
+ logger.debug("domain='%s',origin='%s',software='%s',command='%s',path='%s' - CALLED!", domain, origin, software, command, path)
+ domain_helper.raise_on(domain)
+
+ if not isinstance(origin, str) and origin is not None:
raise ValueError(f"Parameter origin[]='{type(origin)}' is not 'str'")
+ elif not isinstance(command, str):
+ raise ValueError(f"Parameter command[]='{type(command)}' is not 'str'")
+ elif command == "":
+ raise ValueError("Parameter 'command' is empty")
elif software is None:
- logger.debug(f"Updating last_instance_fetch for domain='{domain}' ...")
- instances.set_last_instance_fetch(domain)
-
- logger.debug(f"software for domain='{domain}' is not set, determining ...")
- software = None
try:
+ logger.debug("Software for domain='%s' is not set, determining ...", domain)
software = determine_software(domain, path)
except network.exceptions as exception:
- logger.debug(f"Exception '{type(exception)}' during determining software type")
- pass
+ logger.warning("Exception '%s' during determining software type", type(exception))
+ instances.set_last_error(domain, exception)
- logger.debug(f"Determined software='{software}' for domain='{domain}'")
+ logger.debug("Determined software='%s' for domain='%s'", software, domain)
elif not isinstance(software, str):
raise ValueError(f"Parameter software[]='{type(software)}' is not 'str'")
- elif not isinstance(command, str):
- raise ValueError(f"Parameter command[]='{type(command)}' is not 'str'")
- elif command == "":
- raise ValueError("Parameter 'command' is empty")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain")
+ logger.debug("Checking if domain='%s' is registered ...", domain)
if not instances.is_registered(domain):
- logger.debug(f"Adding new domain='{domain}',origin='{origin}',command='{command}',path='{path}',software='{software}'")
+ logger.debug("Adding new domain='%s',origin='%s',command='%s',path='%s',software='%s'", domain, origin, command, path, software)
instances.add(domain, origin, command, path, software)
- logger.debug(f"Updating last_instance_fetch for domain='{domain}' ...")
+ logger.debug("Updating last_instance_fetch for domain='%s' ...", domain)
instances.set_last_instance_fetch(domain)
- logger.debug("Fetching instances for domain:", domain, software)
- peerlist = fetch_peers(domain, software)
+ peerlist = list()
+ try:
+ logger.debug("Fetching instances for domain='%s',software='%s',origin='%s'", domain, software, origin)
+ peerlist = fetch_peers(domain, software, origin)
+ except network.exceptions as exception:
+ logger.warning("Cannot fetch peers from domain='%s': '%s'", domain, type(exception))
+
+ logger.debug("peerlist[]='%s'", type(peerlist))
+ if isinstance(peerlist, list):
+ logger.debug("Invoking instances.set_total_peerlist(%s,%d) ...", domain, len(peerlist))
+ instances.set_total_peers(domain, peerlist)
- if peerlist is None:
- logger.warning("Cannot fetch peers:", domain)
+ logger.debug("peerlist[]='%s'", type(peerlist))
+ if peerlist is None or len(peerlist) == 0:
+ logger.warning("Cannot fetch peers: domain='%s'", domain)
+
+ if instances.has_pending(domain):
+ logger.debug("Flushing updates for domain='%s' ...", domain)
+ instances.update_data(domain)
+
+ logger.debug("Invoking cookies.clear(%s) ...", domain)
+ cookies.clear(domain)
+
+ logger.debug("EXIT!")
return
- elif instances.has_pending(domain):
- logger.debug(f"domain='{domain}' has pending nodeinfo data, flushing ...")
- instances.update_data(domain)
- logger.info("Checking %d instances from domain='%s' ...", len(peerlist), domain)
+ logger.info("Checking %d instance(s) from domain='%s',software='%s' ...", len(peerlist), domain, software)
for instance in peerlist:
- logger.debug(f"instance='{instance}'")
+ logger.debug("instance='%s'", instance)
if instance is None:
# Skip "None" types as tidup.domain() cannot parse them
continue
- logger.debug(f"instance='{instance}' - BEFORE")
+ logger.debug("instance='%s' - BEFORE!", instance)
instance = tidyup.domain(instance)
- logger.debug(f"instance='{instance}' - AFTER")
+ logger.debug("instance='%s' - AFTER!", instance)
if instance == "":
- logger.warning(f"Empty instance after tidyup.domain(), domain='{domain}'")
- continue
- elif not validators.domain(instance.split("/")[0]):
- logger.warning(f"Bad instance='{instance}' from domain='{domain}',origin='{origin}'")
+ logger.warning("Empty instance after tidyup.domain(), domain='%s'", domain)
continue
- elif instance.endswith(".arpa"):
- logger.warning(f"instance='{instance}' is a reversed .arpa domain and should not be used generally.")
+ elif not utils.is_domain_wanted(instance):
+ logger.debug("instance='%s' is not wanted - SKIPPED!", instance)
continue
- elif blacklist.is_blacklisted(instance):
- logger.debug("instance is blacklisted:", instance)
+ elif instance.find("/profile/") > 0 or instance.find("/users/") > 0 or (instances.is_registered(instance.split("/")[0]) and instance.find("/c/") > 0):
+ logger.debug("instance='%s' is a link to a single user profile - SKIPPED!", instance)
continue
- elif instance.find("/profile/") > 0 or instance.find("/users/") > 0:
- logger.debug(f"instance='{instance}' is a link to a single user profile - SKIPPED!")
- continue
- elif instance.endswith(".tld"):
- logger.debug(f"instance='{instance}' is a fake domain - SKIPPED!")
+ elif instance.find("/tag/") > 0:
+ logger.debug("instance='%s' is a link to a tag - SKIPPED!", instance)
continue
elif not instances.is_registered(instance):
- logger.debug("Adding new instance:", instance, domain)
+ logger.debug("Adding new instance='%s',domain='%s',command='%s'", instance, domain, command)
instances.add(instance, domain, command)
+ logger.debug("Invoking cookies.clear(%s) ...", domain)
+ cookies.clear(domain)
+
+ logger.debug("Checking if domain='%s' has pending updates ...", domain)
+ if instances.has_pending(domain):
+ logger.debug("Flushing updates for domain='%s' ...", domain)
+ instances.update_data(domain)
+
logger.debug("EXIT!")
-def fetch_peers(domain: str, software: str) -> list:
- logger.debug(f"domain({len(domain)})='{domain}',software='{software}' - CALLED!")
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
- elif not isinstance(software, str) and software is not None:
+def fetch_peers(domain: str, software: str, origin: str) -> list:
+ logger.debug("domain='%s',software='%s',origin='%s' - CALLED!", domain, software, origin)
+ domain_helper.raise_on(domain)
+
+ if not isinstance(software, str) and software is not None:
raise ValueError(f"software[]='{type(software)}' is not 'str'")
if software == "misskey":
- logger.debug(f"Invoking misskey.fetch_peers({domain}) ...")
+ logger.debug("Invoking misskey.fetch_peers(%s) ...", domain)
return misskey.fetch_peers(domain)
elif software == "lemmy":
- logger.debug(f"Invoking lemmy.fetch_peers({domain}) ...")
- return lemmy.fetch_peers(domain)
+ logger.debug("Invoking lemmy.fetch_peers(%s,%s) ...", domain, origin)
+ return lemmy.fetch_peers(domain, origin)
elif software == "peertube":
- logger.debug(f"Invoking peertube.fetch_peers({domain}) ...")
+ logger.debug("Invoking peertube.fetch_peers(%s) ...", domain)
return peertube.fetch_peers(domain)
- # Init peers variable
- peers = list()
-
# No CSRF by default, you don't have to add network.api_headers by yourself here
headers = tuple()
try:
- logger.debug(f"Checking CSRF for domain='{domain}'")
+ logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning(f"Exception '{type(exception)}' during checking CSRF (fetch_peers,{__name__}) - EXIT!")
+ logger.warning("Exception '%s' during checking CSRF (fetch_peers,%s) - EXIT!", type(exception), __name__)
instances.set_last_error(domain, exception)
- return peers
+ return list()
- logger.debug(f"Fetching peers from '{domain}',software='{software}' ...")
- data = network.get_json_api(
- domain,
+ paths = [
"/api/v1/instance/peers",
- headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- )
+ "/api/v3/site",
+ ]
- logger.debug("data[]='%s'", type(data))
- if "error_message" in data:
- logger.debug("Was not able to fetch peers, trying alternative ...")
+ # Init peers variable
+ peers = list()
+
+ logger.debug("Checking %d paths ...", len(paths))
+ for path in paths:
+ logger.debug("Fetching path='%s' from domain='%s',software='%s' ...", path, domain, software)
data = network.get_json_api(
domain,
- "/api/v3/site",
+ path,
headers,
(config.get("connection_timeout"), config.get("read_timeout"))
)
logger.debug("data[]='%s'", type(data))
if "error_message" in data:
- logger.warning(f"Could not reach any JSON API at domain='{domain}',status_code='{data['status_code']}',error_message='{data['error_message']}'")
- elif "federated_instances" in data["json"]:
- logger.debug(f"Found federated_instances for domain='{domain}'")
- peers = peers + add_peers(data["json"]["federated_instances"])
- logger.debug("Added instance(s) to peers")
- else:
- message = "JSON response does not contain 'federated_instances' or 'error_message'"
- logger.warning(f"{message},domain='{domain}'")
- instances.set_last_error(domain, message)
- elif isinstance(data["json"], list):
- # DEBUG print("DEBUG: Querying API was successful:", domain, len(data['json']))
- peers = data["json"]
- else:
- logger.warning(f"Cannot parse data[json][]='{type(data['json'])}'")
+ logger.debug("Was not able to fetch peers from path='%s',domain='%s' ...", path, domain)
+ instances.set_last_error(domain, data)
+ elif "json" in data and len(data["json"]) > 0:
+ logger.debug("Querying API path='%s' was successful: domain='%s',data[json][%s]()=%d", path, domain, type(data['json']), len(data['json']))
+ peers = data["json"]
+
+ logger.debug("Marking domain='%s' as successfully handled ...", domain)
+ instances.set_success(domain)
+ break
- logger.debug(f"Adding '{len(peers)}' for domain='{domain}'")
+ if not isinstance(peers, list):
+ logger.warning("peers[]='%s' is not 'list', maybe bad API response?", type(peers))
+ peers = list()
+
+ logger.debug("Invoking instances.set_total_peers(%s,%d) ...", domain, len(peers))
instances.set_total_peers(domain, peers)
- logger.debug("Returning peers[]:", type(peers))
+ logger.debug("peers()=%d - EXIT!", len(peers))
return peers
def fetch_nodeinfo(domain: str, path: str = None) -> dict:
- logger.debug(f"domain='{domain}',path='{path}' - CALLED!")
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
- elif not isinstance(path, str) and path is not None:
+ logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
+ domain_helper.raise_on(domain)
+
+ if not isinstance(path, str) and path is not None:
raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'")
- logger.debug(f"Fetching nodeinfo from domain='{domain}' ...")
+ logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
nodeinfo = fetch_wellknown_nodeinfo(domain)
- logger.debug(f"nodeinfo[{type(nodeinfo)}]({len(nodeinfo)}='{nodeinfo}'")
+ logger.debug("nodeinfo[%s](%d='%s'", type(nodeinfo), len(nodeinfo), nodeinfo)
if "error_message" not in nodeinfo and "json" in nodeinfo and len(nodeinfo["json"]) > 0:
- logger.debug(f"Found nodeinfo[json]()={len(nodeinfo['json'])} - EXIT!")
- return nodeinfo["json"]
+ logger.debug("Found nodeinfo[json]()=%d - EXIT!", len(nodeinfo['json']))
+ return nodeinfo
# No CSRF by default, you don't have to add network.api_headers by yourself here
headers = tuple()
data = dict()
try:
- logger.debug(f"Checking CSRF for domain='{domain}'")
+ logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning(f"Exception '{type(exception)}' during checking CSRF (nodeinfo,{__name__}) - EXIT!")
+ logger.warning("Exception '%s' during checking CSRF (nodeinfo,%s) - EXIT!", type(exception), __name__)
instances.set_last_error(domain, exception)
return {
"status_code" : 500,
"/nodeinfo/2.1",
"/nodeinfo/2.0.json",
"/nodeinfo/2.0",
+ "/nodeinfo/1.0.json",
"/nodeinfo/1.0",
- "/api/v1/instance"
+ "/api/v1/instance",
]
for request in request_paths:
- logger.debug(f"path[{type(path)}]='{path}',request='{request}'")
- if path is None or path == request or path == f"http://{domain}{path}" or path == f"https://{domain}{path}":
- logger.debug(f"Fetching request='{request}' from domain='{domain}' ...")
- if path == f"http://{domain}{path}" or path == f"https://{domain}{path}":
- logger.debug(f"domain='{domain}',path='{path}' has protocol in path, splitting ...")
+ logger.debug("request='%s'", request)
+ http_url = f"http://{domain}{path}"
+ https_url = f"https://{domain}{path}"
+
+ logger.debug("path[%s]='%s',request='%s',http_url='%s',https_url='%s'", type(path), path, request, http_url, https_url)
+ if path is None or path in [request, http_url, https_url]:
+ logger.debug("Fetching request='%s' from domain='%s' ...", request, domain)
+ if path in [http_url, https_url]:
+ logger.debug("domain='%s',path='%s' has protocol in path, splitting ...", domain, path)
components = urlparse(path)
path = components.path
)
logger.debug("data[]='%s'", type(data))
- if "error_message" not in data:
- logger.debug("Success:", request)
+ if "error_message" not in data and "json" in data:
+ logger.debug("Success: request='%s' - Setting detection_mode=STATIC_CHECK ...", request)
instances.set_detection_mode(domain, "STATIC_CHECK")
instances.set_nodeinfo_url(domain, request)
break
- logger.warning(f"Failed fetching nodeinfo from domain='{domain}',status_code='{data['status_code']}',error_message='{data['error_message']}'")
+ logger.warning("Failed fetching nodeinfo from domain='%s',status_code='%s',error_message='%s'", domain, data['status_code'], data['error_message'])
logger.debug("data()=%d - EXIT!", len(data))
return data
def fetch_wellknown_nodeinfo(domain: str) -> dict:
- logger.debug("domain(%d)='%s' - CALLED!", len(domain), domain)
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
+ logger.debug("domain='%s' - CALLED!", domain)
+ domain_helper.raise_on(domain)
+
+ # "rel" identifiers (no real URLs)
+ nodeinfo_identifier = [
+ "https://nodeinfo.diaspora.software/ns/schema/2.1",
+ "http://nodeinfo.diaspora.software/ns/schema/2.1",
+ "https://nodeinfo.diaspora.software/ns/schema/2.0",
+ "http://nodeinfo.diaspora.software/ns/schema/2.0",
+ "https://nodeinfo.diaspora.software/ns/schema/1.1",
+ "http://nodeinfo.diaspora.software/ns/schema/1.1",
+ "https://nodeinfo.diaspora.software/ns/schema/1.0",
+ "http://nodeinfo.diaspora.software/ns/schema/1.0",
+ ]
# No CSRF by default, you don't have to add network.api_headers by yourself here
headers = tuple()
try:
- logger.debug(f"Checking CSRF for domain='{domain}'")
+ logger.debug("Checking CSRF for domain='%s'", domain)
headers = csrf.determine(domain, dict())
except network.exceptions as exception:
- logger.warning(f"Exception '{type(exception)}' during checking CSRF (fetch_wellknown_nodeinfo,{__name__}) - EXIT!")
+ logger.warning("Exception '%s' during checking CSRF (fetch_wellknown_nodeinfo,%s) - EXIT!", type(exception), __name__)
instances.set_last_error(domain, exception)
return {
"status_code" : 500,
"exception" : exception,
}
- logger.debug("Fetching .well-known info for domain:", domain)
+ logger.debug("Fetching .well-known info for domain='%s'", domain)
data = network.get_json_api(
domain,
"/.well-known/nodeinfo",
(config.get("nodeinfo_connection_timeout"), config.get("nodeinfo_read_timeout"))
)
+ logger.debug("data[]='%s'", type(data))
if "error_message" not in data:
nodeinfo = data["json"]
- logger.debug("Found entries:", len(nodeinfo), domain)
+
+ logger.debug("Marking domain='%s' as successfully handled ...", domain)
+ instances.set_success(domain)
+
+ logger.debug("Found entries: nodeinfo()=%d,domain='%s'", len(nodeinfo), domain)
if "links" in nodeinfo:
- logger.debug("Found links in nodeinfo():", len(nodeinfo["links"]))
- for link in nodeinfo["links"]:
- logger.debug(f"link[{type(link)}]='{link}'")
- if not isinstance(link, dict) or not "rel" in link:
- logger.warning(f"link[]='{type(link)}' is not 'dict' or no element 'rel' found")
- elif link["rel"] in nodeinfo_identifier:
- # Default is that 'href' has a complete URL, but some hosts don't send that
- url = link["href"]
- components = urlparse(link["href"])
-
- logger.debug(f"components[{type(components)}]='{components}'")
- if components.scheme == "" and components.netloc == "":
- logger.debug(f"link[href]='{link['href']}' has no scheme and host name in it, prepending from domain='{domain}'")
- url = f"https://{domain}{url}"
- components = urlparse(url)
-
- if not validators.domain(components.netloc):
- logger.warning(f"components.netloc='{components.netloc}' is not a valid domain - SKIPPED!")
- continue
- elif domain.endswith(".arpa"):
- logger.warning("domain='%s' is a domain for reversed IP addresses - SKIPPED!", domain)
- continue
- elif domain.endswith(".tld"):
- logger.warning("domain='%s' is a fake domain - SKIPPED!", domain)
- continue
- elif blacklist.is_blacklisted(components.netloc):
- logger.debug(f"components.netloc='{components.netloc}' is blacklisted - SKIPPED!")
- continue
-
- logger.debug("Fetching nodeinfo from:", url)
- data = network.fetch_api_url(
- url,
- (config.get("connection_timeout"), config.get("read_timeout"))
- )
-
- logger.debug("href,data[]:", link["href"], type(data))
- if "error_message" not in data and "json" in data:
- logger.debug("Found JSON nodeinfo():", len(data))
- instances.set_detection_mode(domain, "AUTO_DISCOVERY")
- instances.set_nodeinfo_url(domain, link["href"])
- break
- else:
- instances.set_last_error(domain, data)
- else:
- logger.warning("Unknown 'rel' value:", domain, link["rel"])
+ logger.debug("Found nodeinfo[links]()=%d record(s),", len(nodeinfo["links"]))
+ for niid in nodeinfo_identifier:
+ data = dict()
+
+ logger.debug("Checking niid='%s' ...", niid)
+ for link in nodeinfo["links"]:
+ logger.debug("link[%s]='%s'", type(link), link)
+ if not isinstance(link, dict) or not "rel" in link:
+ logger.warning("link[]='%s' is not 'dict' or no element 'rel' found", type(link))
+ elif link["rel"] == niid:
+ # Default is that 'href' has a complete URL, but some hosts don't send that
+ logger.debug("link[href]='%s' matches niid='%s'", link["href"], niid)
+ url = link["href"]
+ components = urlparse(link["href"])
+
+ logger.debug("components[%s]='%s'", type(components), components)
+ if components.scheme == "" and components.netloc == "":
+ logger.warning("link[href]='%s' has no scheme and host name in it, prepending from domain='%s'", link['href'], domain)
+ url = f"https://{domain}{url}"
+ components = urlparse(url)
+ elif components.netloc == "":
+ logger.warning("link[href]='%s' has no netloc set, setting domain='%s'", link["href"], domain)
+ url = f"{components.scheme}://{domain}{components.path}"
+ components = urlparse(url)
+
+ if not utils.is_domain_wanted(components.netloc):
+ logger.debug("components.netloc='%s' is not wanted - SKIPPED!", components.netloc)
+ continue
+
+ logger.debug("Fetching nodeinfo from url='%s' ...", url)
+ data = network.fetch_api_url(
+ url,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ )
+
+ logger.debug("link[href]='%s',data[]='%s'", link["href"], type(data))
+ if "error_message" not in data and "json" in data:
+ logger.debug("Found JSON data()=%d,link[href]='%s' - Setting detection_mode=AUTO_DISCOVERY ...", len(data), link["href"])
+ instances.set_detection_mode(domain, "AUTO_DISCOVERY")
+ instances.set_nodeinfo_url(domain, link["href"])
+
+ logger.debug("Marking domain='%s' as successfully handled ...", domain)
+ instances.set_success(domain)
+ break
+ else:
+ logger.debug("Setting last error for domain='%s',data[]='%s'", domain, type(data))
+ instances.set_last_error(domain, data)
+
+ logger.debug("data()=%d", len(data))
+ if "error_message" not in data and "json" in data:
+ logger.debug("Auto-discovery successful: domain='%s'", domain)
+ break
else:
- logger.warning("nodeinfo does not contain 'links':", domain)
+ logger.warning("nodeinfo does not contain 'links': domain='%s'", domain)
- logger.debug("Returning data[]:", type(data))
+ logger.debug("Returning data[]='%s' - EXIT!", type(data))
return data
def fetch_generator_from_path(domain: str, path: str = "/") -> str:
- logger.debug(f"domain({len(domain)})='{domain}',path='{path}' - CALLED!")
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
- elif not isinstance(path, str):
+ logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path)
+ domain_helper.raise_on(domain)
+
+ if not isinstance(path, str):
raise ValueError(f"path[]='{type(path)}' is not 'str'")
elif path == "":
raise ValueError("Parameter 'path' is empty")
- logger.debug(f"domain='{domain}',path='{path}' - CALLED!")
+ logger.debug("domain='%s',path='%s' - CALLED!", domain, path)
software = None
- logger.debug(f"Fetching path='{path}' from '{domain}' ...")
- response = network.fetch_response(domain, path, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+ logger.debug("Fetching path='%s' from domain='%s' ...", path, domain)
+ response = network.fetch_response(
+ domain, path,
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout")),
+ allow_redirects=True
+ )
+ components = urlparse(response.url)
- logger.debug("domain,response.ok,response.status_code,response.text[]:", domain, response.ok, response.status_code, type(response.text))
- if response.ok and response.status_code < 300 and response.text.find("<html") > 0:
- logger.debug(f"Parsing response.text()={len(response.text)} Bytes ...")
+ logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
+ if response.ok and response.status_code < 300 and response.text.find("<html") > 0 and components.netloc == domain:
+ logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
- logger.debug("doc[]:", type(doc))
+ logger.debug("doc[]='%s'", type(doc))
generator = doc.find("meta", {"name" : "generator"})
site_name = doc.find("meta", {"property": "og:site_name"})
- logger.debug(f"generator='{generator}',site_name='{site_name}'")
+ logger.debug("generator[]='%s',site_name[]='%s'", type(generator), type(site_name))
if isinstance(generator, bs4.element.Tag) and isinstance(generator.get("content"), str):
- logger.debug("Found generator meta tag:", domain)
+ logger.debug("Found generator meta tag: domain='%s'", domain)
software = tidyup.domain(generator.get("content"))
+
logger.debug("software[%s]='%s'", type(software), software)
if software is not None and software != "":
- logger.info("domain='%s' is generated by '%s'", domain, software)
+ logger.info("domain='%s' is generated by software='%s' - Setting detection_mode=GENERATOR ...", domain, software)
instances.set_detection_mode(domain, "GENERATOR")
elif isinstance(site_name, bs4.element.Tag) and isinstance(site_name.get("content"), str):
- logger.debug("Found property=og:site_name:", domain)
+ logger.debug("Found property=og:site_name, domain='%s'", domain)
software = tidyup.domain(site_name.get("content"))
+
logger.debug("software[%s]='%s'", type(software), software)
if software is not None and software != "":
- logger.info("domain='%s' has og:site_name='%s'", domain, software)
+ logger.debug("domain='%s' has og:site_name='%s' - Setting detection_mode=SITE_NAME ...", domain, software)
instances.set_detection_mode(domain, "SITE_NAME")
+ elif domain != components.netloc:
+ logger.warning("domain='%s' doesn't match components.netloc='%s', maybe redirect to other domain?", domain, components.netloc)
+ instances.set_last_error(domain, f"Redirect from domain='{domain}' to components.netloc='{components.netloc}'")
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and software == "":
- logger.debug(f"Corrected empty string to None for software of domain='{domain}'")
+ logger.debug("Corrected empty string to None for software of domain='%s'", domain)
software = None
elif isinstance(software, str) and ("." in software or " " in software):
- logger.debug(f"software='{software}' may contain a version number, domain='{domain}', removing it ...")
+ logger.debug("software='%s' may contain a version number, domain='%s', removing it ...", software, domain)
software = version.remove(software)
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and "powered by " in software:
- logger.debug(f"software='{software}' has 'powered by' in it")
+ logger.debug("software='%s' has 'powered by' in it", software)
software = version.remove(version.strip_powered_by(software))
elif isinstance(software, str) and " hosted on " in software:
- logger.debug(f"software='{software}' has 'hosted on' in it")
+ logger.debug("software='%s' has 'hosted on' in it", software)
software = version.remove(version.strip_hosted_on(software))
elif isinstance(software, str) and " by " in software:
- logger.debug(f"software='{software}' has ' by ' in it")
+ logger.debug("software='%s' has ' by ' in it", software)
software = version.strip_until(software, " by ")
elif isinstance(software, str) and " see " in software:
- logger.debug(f"software='{software}' has ' see ' in it")
+ logger.debug("software='%s' has ' see ' in it", software)
software = version.strip_until(software, " see ")
- logger.debug(f"software='{software}' - EXIT!")
+ logger.debug("software='%s' - EXIT!", software)
return software
def determine_software(domain: str, path: str = None) -> str:
- logger.debug(f"domain({len(domain)})='{domain}',path='{path}' - CALLED!")
- if not isinstance(domain, str):
- raise ValueError(f"Parameter domain[]='{type(domain)}' is not 'str'")
- elif domain == "":
- raise ValueError("Parameter 'domain' is empty")
- elif domain.lower() != domain:
- raise ValueError(f"Parameter domain='{domain}' must be all lower-case")
- elif not validators.domain(domain.split("/")[0]):
- raise ValueError(f"domain='{domain}' is not a valid domain")
- elif domain.endswith(".arpa"):
- raise ValueError(f"domain='{domain}' is a domain for reversed IP addresses, please don't crawl them!")
- elif domain.endswith(".tld"):
- raise ValueError(f"domain='{domain}' is a fake domain, please don't crawl them!")
- elif not isinstance(path, str) and path is not None:
+ logger.debug("domain(%d)='%s',path='%s' - CALLED!", len(domain), domain, path)
+ domain_helper.raise_on(domain)
+
+ if not isinstance(path, str) and path is not None:
raise ValueError(f"Parameter path[]='{type(path)}' is not 'str'")
- logger.debug("Determining software for domain,path:", domain, path)
+ logger.debug("Determining software for domain='%s',path='%s'", domain, path)
software = None
- logger.debug(f"Fetching nodeinfo from '{domain}' ...")
+ logger.debug("Fetching nodeinfo from domain='%s' ...", domain)
data = fetch_nodeinfo(domain, path)
- logger.debug(f"data[{type(data)}]='{data}'")
+ logger.debug("data[%s]='%s'", type(data), data)
if "exception" in data:
# Continue raising it
+ logger.debug("data()=%d contains exception='%s' - raising ...", len(data), type(data["exception"]))
raise data["exception"]
elif "error_message" in data:
- logger.debug(f"Returned error_message during fetching nodeinfo: '{data['error_message']}',status_code='{data['status_code']}'")
- return fetch_generator_from_path(domain)
- elif "status" in data and data["status"] == "error" and "message" in data:
- logger.warning("JSON response is an error:", data["message"])
+ logger.debug("Returned error_message during fetching nodeinfo: '%s',status_code=%d", data['error_message'], data['status_code'])
+ software = fetch_generator_from_path(domain)
+ logger.debug("Generator for domain='%s' is: '%s'", domain, software)
+ elif "json" in data:
+ logger.debug("domain='%s',path='%s',data[json] found ...", domain, path)
+ data = data["json"]
+ else:
+ logger.debug("JSON response from domain='%s' does not include [software][name], fetching / ...", domain)
+ software = fetch_generator_from_path(domain)
+ logger.debug("Generator for domain='%s' is: '%s'", domain, software)
+
+ if "status" in data and data["status"] == "error" and "message" in data:
+ logger.warning("JSON response is an error: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"])
instances.set_last_error(domain, data["message"])
- return fetch_generator_from_path(domain)
+ instances.set_detection_mode(domain, None)
+ instances.set_nodeinfo_url(domain, None)
+ software = fetch_generator_from_path(domain)
+ logger.debug("Generator for domain='%s' is: '%s'", domain, software)
+ elif "software" in data and "name" in data["software"]:
+ logger.debug("Found data[json][software][name] in JSON response")
+ software = data["software"]["name"]
+ logger.debug("software[%s]='%s' - FOUND!", type(software), software)
elif "message" in data:
- logger.warning("JSON response contains only a message:", data["message"])
+ logger.warning("JSON response contains only a message: '%s' - Resetting detection_mode,nodeinfo_url ...", data["message"])
instances.set_last_error(domain, data["message"])
- return fetch_generator_from_path(domain)
+ instances.set_detection_mode(domain, None)
+ instances.set_nodeinfo_url(domain, None)
+
+ logger.debug("Invoking fetch_generator_from_path(%s) ...", domain)
+ software = fetch_generator_from_path(domain)
+ logger.debug("Generator for domain='%s' is: '%s'", domain, software)
elif "software" not in data or "name" not in data["software"]:
- logger.debug(f"JSON response from domain='{domain}' does not include [software][name], fetching / ...")
+ logger.debug("JSON response from domain='%s' does not include [software][name] - Resetting detection_mode,nodeinfo_url ...", domain)
+ instances.set_detection_mode(domain, None)
+ instances.set_nodeinfo_url(domain, None)
+
+ logger.debug("Invoking fetch_generator_from_path(%s) ...", domain)
software = fetch_generator_from_path(domain)
- logger.debug(f"Generator for domain='{domain}' is: '{software}'")
- elif "software" in data and "name" in data["software"]:
- logger.debug("Found data[software][name] in JSON response")
- software = data["software"]["name"]
+ logger.debug("Generator for domain='%s' is: '%s'", domain, software)
+ logger.debug("software[%s]='%s'", type(software), software)
if software is None:
logger.debug("Returning None - EXIT!")
return None
- sofware = tidyup.domain(software)
- logger.debug("sofware after tidyup.domain():", software)
-
- if software in ["akkoma", "rebased", "akkounfucked", "ched"]:
- logger.debug("Setting pleroma:", domain, software)
- software = "pleroma"
- elif software in ["hometown", "ecko"]:
- logger.debug("Setting mastodon:", domain, software)
- software = "mastodon"
- elif software in ["slipfox calckey", "calckey", "groundpolis", "foundkey", "cherrypick", "meisskey", "magnetar", "keybump"]:
- logger.debug("Setting misskey:", domain, software)
- software = "misskey"
- elif software == "runtube.re":
- logger.debug("Setting peertube:", domain, software)
- software = "peertube"
- elif software == "nextcloud social":
- logger.debug("Setting nextcloud:", domain, software)
- software = "nextcloud"
- elif software.find("/") > 0:
- logger.warning("Spliting of slash:", software)
- software = tidyup.domain(software.split("/")[-1])
- elif software.find("|") > 0:
- logger.warning("Spliting of pipe:", software)
- software = tidyup.domain(software.split("|")[0])
- elif "powered by" in software:
- logger.debug(f"software='{software}' has 'powered by' in it")
- software = version.strip_powered_by(software)
- elif isinstance(software, str) and " by " in software:
- logger.debug(f"software='{software}' has ' by ' in it")
- software = version.strip_until(software, " by ")
- elif isinstance(software, str) and " see " in software:
- logger.debug(f"software='{software}' has ' see ' in it")
- software = version.strip_until(software, " see ")
-
- logger.debug("software[]='%s'", type(software))
- if software == "":
- logger.warning("tidyup.domain() left no software name behind:", domain)
- software = None
+ logger.debug("software='%s'- BEFORE!", software)
+ software = software_helper.alias(software)
+ logger.debug("software['%s']='%s' - AFTER!", type(software), software)
- logger.debug("software[]='%s'", type(software))
if str(software) == "":
- logger.debug(f"software for '{domain}' was not detected, trying generator ...")
+ logger.debug("software for domain='%s' was not detected, trying generator ...", domain)
software = fetch_generator_from_path(domain)
elif len(str(software)) > 0 and ("." in software or " " in software):
- logger.debug(f"software='{software}' may contain a version number, domain='{domain}', removing it ...")
+ logger.debug("software='%s' may contain a version number, domain='%s', removing it ...", software, domain)
software = version.remove(software)
logger.debug("software[]='%s'", type(software))
if isinstance(software, str) and "powered by" in software:
- logger.debug(f"software='{software}' has 'powered by' in it")
+ logger.debug("software='%s' has 'powered by' in it", software)
software = version.remove(version.strip_powered_by(software))
- logger.debug("Returning domain,software:", domain, software)
+ logger.debug("software='%s' - EXIT!", software)
return software
def find_domains(tag: bs4.element.Tag) -> list:
- logger.debug(f"tag[]='{type(tag)}' - CALLED!")
+ logger.debug("tag[]='%s' - CALLED!", type(tag))
if not isinstance(tag, bs4.element.Tag):
raise ValueError(f"Parameter tag[]='{type(tag)}' is not type of bs4.element.Tag")
elif len(tag.select("tr")) == 0:
domains = list()
for element in tag.select("tr"):
- logger.debug(f"element[]='{type(element)}'")
+ logger.debug("element[]='%s'", type(element))
if not element.find("td"):
logger.debug("Skipping element, no <td> found")
continue
logger.debug("domain='%s',reason='%s'", domain, reason)
- if not validators.domain(domain.split("/")[0]):
- logger.warning("domain='%s' is not a valid domain - SKIPPED!", domain)
- continue
- elif domain.endswith(".arpa"):
- logger.warning("domain='%s' is a domain for reversed IP addresses - SKIPPED!", domain)
- continue
- elif domain.endswith(".tld"):
- logger.warning("domain='%s' is a fake domain - SKIPPED!", domain)
- continue
- elif blacklist.is_blacklisted(domain):
+ if not utils.is_domain_wanted(domain):
logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
continue
elif domain == "gab.com/.ai, develop.gab.com":
logger.warning("domain='%s' is not a valid domain - SKIPPED!", domain)
continue
- logger.debug(f"Adding domain='{domain}',reason='{reason}' ...")
+ logger.debug("Adding domain='%s',reason='%s' ...", domain, reason)
domains.append({
"domain": domain,
"reason": reason,
})
- logger.debug(f"domains()={len(domains)} - EXIT!")
+ logger.debug("domains()=%d - EXIT!", len(domains))
return domains
def add_peers(rows: dict) -> list:
- logger.debug(f"rows[]={type(rows)} - CALLED!")
+ logger.debug("rows[]='%s' - CALLED!", type(rows))
if not isinstance(rows, dict):
raise ValueError(f"Parameter rows[]='{type(rows)}' is not 'dict'")
peers = list()
for key in ["linked", "allowed", "blocked"]:
- logger.debug(f"Checking key='{key}'")
+ logger.debug("Checking key='%s'", key)
if key not in rows or rows[key] is None:
- logger.debug(f"Cannot find key='{key}' or it is NoneType - SKIPPED!")
+ logger.debug("Cannot find key='%s' or it is NoneType - SKIPPED!", key)
continue
- logger.debug(f"Adding {len(rows[key])} peer(s) to peers list ...")
+ logger.debug("Adding %d peer(s) to peers list ...", len(rows[key]))
for peer in rows[key]:
- logger.debug(f"peer='{peer}' - BEFORE!")
- if isinstance(peer, dict) and "domain" in peer:
- logger.debug(f"peer[domain]='{peer['domain']}'")
+ logger.debug("peer[%s]='%s' - BEFORE!", type(peer), peer)
+ if peer is None or peer == "":
+ logger.debug("peer is empty - SKIPPED")
+ continue
+ elif isinstance(peer, dict) and "domain" in peer:
+ logger.debug("peer[domain]='%s'", peer["domain"])
peer = tidyup.domain(peer["domain"])
elif isinstance(peer, str):
- logger.debug(f"peer='{peer}'")
+ logger.debug("peer='%s'", peer)
peer = tidyup.domain(peer)
else:
raise ValueError(f"peer[]='{type(peer)}' is not supported,key='{key}'")
- logger.debug(f"peer='{peer}' - AFTER!")
- if not validators.domain(peer):
- logger.warning(f"peer='{peer}' is not a valid domain - SKIPPED!")
- continue
- elif peer.endswith(".arpa"):
- logger.warning(f"peer='{peer}' is a domain for reversed IP addresses -SKIPPED!")
- continue
- elif peer.endswith(".tld"):
- logger.warning(f"peer='{peer}' is a fake domain - SKIPPED!")
- continue
- elif blacklist.is_blacklisted(peer):
- logger.debug(f"peer='{peer}' is blacklisted - SKIPPED!")
+ logger.debug("peer[%s]='%s' - AFTER!", type(peer), peer)
+ if not utils.is_domain_wanted(peer):
+ logger.debug("peer='%s' is not wanted - SKIPPED!", peer)
continue
- logger.debug(f"Adding peer='{peer}' ...")
+ logger.debug("Appending peer='%s' ...", peer)
peers.append(peer)
- logger.debug(f"peers()={len(peers)} - EXIT!")
+ logger.debug("peers()=%d - EXIT!", len(peers))
return peers