logger.info(f"Fetching csv_url='{block['csv_url']}' for blocker='{block['blocker']}' ...")
response = fba.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
- logger.debug("response[]='%s'", type(response))
+ logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
if response.ok and response.content != "":
logger.debug(f"Fetched {len(response.content)} Bytes, parsing CSV ...")
reader = csv.DictReader(response.content.decode('utf-8').splitlines(), dialect="unix")
logger.debug("Fetching url='%s' ...", url)
response = fba.fetch_url(url, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
- logger.debug("response[]='%s'", type(response))
- if response.ok and response.text != "":
+ logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
+ if response.ok and response.status_code < 300 and response.text != "":
logger.debug(f"Returned {len(response.text.strip())} Bytes for processing")
domains = response.text.split("\n")
continue
logger.debug("EXIT!")
+
+def fetch_fedipact(args: argparse.Namespace):
+ logger.debug("args[]='%s' - CALLED!", type(args))
+ locking.acquire()
+
+ response = fba.fetch_url("https://fedipact.online", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
+
+ logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
+ if response.ok and response.status_code < 300 and response.text != "":
+ logger.debug("Parsing %d Bytes ...", len(response.text))
+
+ doc = bs4.BeautifulSoup(response.text, "html.parser")
+ logger.debug("doc[]='%s'", type(doc))
+
+ rows = doc.findAll("li")
+ logger.info("Checking %d row(s) ...", len(rows))
+ for row in rows:
+ logger.debug("row[]='%s'", type(row))
+ domain = tidyup.domain(row.contents[0])
+
+ logger.debug("domain='%s'", domain)
+ if domain == "":
+ logger.debug("domain is empty - SKIPPED!")
+ continue
+ elif not validators.domain(domain):
+ logger.warning("domain='%s' is not a valid domain name - SKIPPED!", domain)
+ continue
+ elif domain.endswith(".arpa"):
+ logger.debug("domain='%s' is a domain for reversed IP addresses - SKIPPED!", domain)
+ continue
+ elif domain.endswith(".tld"):
+ logger.debug("domain='%s' is a fake domain - SKIPPED!", domain)
+ continue
+ elif blacklist.is_blacklisted(domain):
+ logger.debug("domain='%s' is blacklisted - SKIPPED!", domain)
+ continue
+ elif instances.is_registered(domain):
+ logger.debug("domain='%s' is already registered - SKIPPED!", domain)
+ continue
+
+ logger.info("Fetching domain='%s' ...", domain)
+ federation.fetch_instances(domain, None, None, inspect.currentframe().f_code.co_name)
+
+ logger.debug("EXIT!")