--- /dev/null
+# Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
+# Copyright (C) 2023 Free Software Foundation
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+def fetch_joinfediverse(args: argparse.Namespace) -> int:
+ logger.debug("args[]='%s' - CALLED!", type(args))
+
+ logger.debug("Invoking locking.acquire() ...")
+ locking.acquire()
+
+ source_domain = "joinfediverse.wiki"
+ if sources.is_recent(source_domain):
+ logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
+ return 1
+ else:
+ logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
+ sources.update(source_domain)
+
+ logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
+ raw = utils.fetch_url(
+ f"https://{source_domain}/FediBlock",
+ network.web_headers,
+ (config.get("connection_timeout"), config.get("read_timeout"))
+ ).text
+ logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
+
+ doc = bs4.BeautifulSoup(raw, "html.parser")
+ logger.debug("doc[]='%s'", type(doc))
+
+ tables = doc.findAll("table", {"class": "wikitable"})
+
+ logger.info("Analyzing %d table(s) ...", len(tables))
+ blocklist = list()
+ for table in tables:
+ logger.debug("table[]='%s'", type(table))
+
+ rows = table.findAll("tr")
+ logger.info("Checking %d row(s) ...", len(rows))
+ block_headers = dict()
+ for row in rows:
+ logger.debug("row[%s]='%s'", type(row), row)
+
+ headers = row.findAll("th")
+ logger.debug("Found headers()=%d header(s)", len(headers))
+ if len(headers) > 1:
+ block_headers = dict()
+ cnt = 0
+ for header in headers:
+ cnt = cnt + 1
+ logger.debug("header[]='%s',cnt=%d", type(header), cnt)
+ text = header.contents[0]
+
+ logger.debug("text[]='%s'", type(text))
+ if not isinstance(text, str):
+ logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
+ continue
+ elif validators.domain(text.strip()):
+ logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
+ continue
+
+ text = tidyup.domain(text.strip())
+ logger.debug("text='%s' - AFTER!", text)
+ if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
+ logger.debug("Found header: '%s'=%d", text, cnt)
+ block_headers[cnt] = text
+
+ elif len(block_headers) == 0:
+ logger.debug("row is not scrapable - SKIPPED!")
+ continue
+ elif len(block_headers) > 0:
+ logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
+ cnt = 0
+ block = dict()
+
+ for element in row.find_all(["th", "td"]):
+ cnt = cnt + 1
+ logger.debug("element[]='%s',cnt=%d", type(element), cnt)
+ if cnt in block_headers:
+ logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
+
+ text = element.text.strip()
+ key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
+
+ logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
+ if key in ["domain", "instance"]:
+ block[key] = text
+ elif key == "reason":
+ block[key] = tidyup.reason(text)
+ elif key == "subdomain(s)":
+ block[key] = list()
+ if text != "":
+ block[key] = text.split("/")
+ else:
+ logger.debug("key='%s'", key)
+ block[key] = text
+
+ logger.debug("block()=%d ...", len(block))
+ if len(block) > 0:
+ logger.debug("Appending block()=%d ...", len(block))
+ blocklist.append(block)
+
+ logger.debug("blocklist()=%d", len(blocklist))
+
+ database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
+ domains = database.cursor.fetchall()
+
+ logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
+ blocking = list()
+ for block in blocklist:
+ logger.debug("block='%s'", block)
+ if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
+ origin = block["blocked"]
+ logger.debug("origin='%s'", origin)
+ for subdomain in block["subdomain(s)"]:
+ block["blocked"] = subdomain + "." + origin
+ logger.debug("block[blocked]='%s'", block["blocked"])
+ blocking.append(block)
+ else:
+ blocking.append(block)
+
+ logger.debug("blocking()=%d", blocking)
+ for block in blocking:
+ logger.debug("block[]='%s'", type(block))
+ if "blocked" not in block:
+ raise KeyError(f"block()={len(block)} does not have element 'blocked'")
+
+ block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
+ logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
+
+ if block["blocked"] == "":
+ logger.debug("block[blocked] is empty - SKIPPED!")
+ continue
+ elif not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
+ continue
+ elif instances.is_recent(block["blocked"]):
+ logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
+ continue
+
+ logger.debug("Proccessing blocked='%s' ...", block["blocked"])
+ processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
+
+ blockdict = list()
+ for blocker in domains:
+ blocker = blocker[0]
+ logger.debug("blocker[%s]='%s'", type(blocker), blocker)
+ instances.set_last_blocked(blocker)
+
+ for block in blocking:
+ logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
+ block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
+
+ logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
+ if block["blocked"] == "":
+ logger.debug("block[blocked] is empty - SKIPPED!")
+ continue
+ elif not domain_helper.is_wanted(block["blocked"]):
+ logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
+ continue
+
+ logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
+ if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
+ logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
+ blockdict.append({
+ "blocked": block["blocked"],
+ "reason" : block["reason"],
+ })
+
+ if instances.has_pending(blocker):
+ logger.debug("Flushing updates for blocker='%s' ...", blocker)
+ instances.update(blocker)
+
+ logger.debug("Invoking commit() ...")
+ database.connection.commit()
+
+ logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
+ if config.get("bot_enabled") and len(blockdict) > 0:
+ logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
+ network.send_bot_post(blocker, blockdict)
+
+ logger.debug("Success! - EXIT!")
+ return 0
obfuscated = obfuscated + 1
# Some friendica servers also obscure domains without hash
- row = instances.deobfuscate("*", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("*", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
obfuscated = obfuscated + 1
# Some obscure them with question marks, not sure if that's dependent on version or not
- row = instances.deobfuscate("?", block["blocked"], block["hash"] if "hash" in block else None)
+ row = instances.deobfuscate("?", block["blocked"], block["digest"] if "digest" in block else None)
logger.debug("row[]='%s'", type(row))
if row is None:
for item in items:
logger.debug("item[]='%s'", type(item))
domain = item.decode_contents()
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
for item in rss.items:
logger.debug("item[%s]='%s'", type(item), item)
domain = item.link.split("=")[1]
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.debug("element[]='%s'", type(element))
for href in element["href"].split(","):
logger.debug("href[%s]='%s' - BEFORE!", type(href), href)
- domain = tidyup.domain(href) if href != None and href != "" else None
+ domain = tidyup.domain(href) if href not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.info("Processing %d domains ...", len(domains))
for domain in domains:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row.contents[0]) if row.contents[0] != None and row.contents[0] != "" else None
+ domain = tidyup.domain(row.contents[0]) if row.contents[0] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
elif block["blocked"].find("*") >= 0 or block["blocked"].find("?") >= 0:
logger.debug("block='%s' is obfuscated.", block["blocked"])
obfuscated = obfuscated + 1
- blocked = utils.deobfuscate(block["blocked"], row["domain"], block["hash"] if "hash" in block else None)
+ blocked = utils.deobfuscate(block["blocked"], row["domain"], block["digest"] if "digest" in block else None)
elif not domain_helper.is_wanted(block["blocked"]):
logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
continue
continue
logger.debug("row[hostname]='%s' - BEFORE!", row["hostname"])
- domain = tidyup.domain(row["hostname"]) if row["hostname"] != None and row["hostname"] != "" else None
+ domain = tidyup.domain(row["hostname"]) if row["hostname"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.info("Checking %d row(s) ...", len(rows))
for row in rows:
logger.debug("row[]='%s'", type(row))
- domain = tidyup.domain(row["name"]) if row["name"] != None and row["name"] != "" else None
+ domain = tidyup.domain(row["name"]) if row["name"] not in [None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None and domain == "":
instances.set_last_instance_fetch(row["domain"])
instances.update(row["domain"])
continue
- elif not "json" in raw:
+ elif "json" not in raw:
logger.warning("raw()=%d does not contain key 'json' in response - SKIPPED!", len(raw))
continue
elif not "metadata" in raw["json"]:
continue
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
domain = components.netloc.lower().split(":")[0]
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
logger.debug("Checking %d peer(s) row[domain]='%s' ...", len(raw["json"]["metadata"]["peers"]), row["domain"])
for domain in raw["json"]["metadata"]["peers"]:
logger.debug("domain='%s' - BEFORE!", domain)
- domain = tidyup.domain(domain) if domain != None and domain != "" else None
+ domain = tidyup.domain(domain) if domain not in[None, ""] else None
logger.debug("domain='%s' - AFTER!", domain)
if domain is None or domain == "":
+++ /dev/null
-# Fedi API Block - An aggregator for fetching blocking data from fediverse nodes
-# Copyright (C) 2023 Free Software Foundation
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-def fetch_joinfediverse(args: argparse.Namespace) -> int:
- logger.debug("args[]='%s' - CALLED!", type(args))
-
- logger.debug("Invoking locking.acquire() ...")
- locking.acquire()
-
- source_domain = "joinfediverse.wiki"
- if sources.is_recent(source_domain):
- logger.info("API from source_domain='%s' has recently being accessed - EXIT!", source_domain)
- return 1
- else:
- logger.debug("source_domain='%s' has not been recently used, marking ...", source_domain)
- sources.update(source_domain)
-
- logger.info("Fetching /FediBlock wiki page from source_domain='%s' ...", source_domain)
- raw = utils.fetch_url(
- f"https://{source_domain}/FediBlock",
- network.web_headers,
- (config.get("connection_timeout"), config.get("read_timeout"))
- ).text
- logger.debug("raw()=%d,raw[]='%s'", len(raw), type(raw))
-
- doc = bs4.BeautifulSoup(raw, "html.parser")
- logger.debug("doc[]='%s'", type(doc))
-
- tables = doc.findAll("table", {"class": "wikitable"})
-
- logger.info("Analyzing %d table(s) ...", len(tables))
- blocklist = list()
- for table in tables:
- logger.debug("table[]='%s'", type(table))
-
- rows = table.findAll("tr")
- logger.info("Checking %d row(s) ...", len(rows))
- block_headers = dict()
- for row in rows:
- logger.debug("row[%s]='%s'", type(row), row)
-
- headers = row.findAll("th")
- logger.debug("Found headers()=%d header(s)", len(headers))
- if len(headers) > 1:
- block_headers = dict()
- cnt = 0
- for header in headers:
- cnt = cnt + 1
- logger.debug("header[]='%s',cnt=%d", type(header), cnt)
- text = header.contents[0]
-
- logger.debug("text[]='%s'", type(text))
- if not isinstance(text, str):
- logger.debug("text[]='%s' is not of type 'str' - SKIPPED!", type(text))
- continue
- elif validators.domain(text.strip()):
- logger.debug("text='%s' is a domain - SKIPPED!", text.strip())
- continue
-
- text = tidyup.domain(text.strip())
- logger.debug("text='%s' - AFTER!", text)
- if text in ["domain", "instance", "subdomain(s)", "block reason(s)"]:
- logger.debug("Found header: '%s'=%d", text, cnt)
- block_headers[cnt] = text
-
- elif len(block_headers) == 0:
- logger.debug("row is not scrapable - SKIPPED!")
- continue
- elif len(block_headers) > 0:
- logger.debug("Found a row with %d scrapable headers ...", len(block_headers))
- cnt = 0
- block = dict()
-
- for element in row.find_all(["th", "td"]):
- cnt = cnt + 1
- logger.debug("element[]='%s',cnt=%d", type(element), cnt)
- if cnt in block_headers:
- logger.debug("block_headers[%d]='%s'", cnt, block_headers[cnt])
-
- text = element.text.strip()
- key = block_headers[cnt] if block_headers[cnt] not in ["domain", "instance"] else "blocked"
-
- logger.debug("cnt=%d is wanted: key='%s',text[%s]='%s'", cnt, key, type(text), text)
- if key in ["domain", "instance"]:
- block[key] = text
- elif key == "reason":
- block[key] = tidyup.reason(text)
- elif key == "subdomain(s)":
- block[key] = list()
- if text != "":
- block[key] = text.split("/")
- else:
- logger.debug("key='%s'", key)
- block[key] = text
-
- logger.debug("block()=%d ...", len(block))
- if len(block) > 0:
- logger.debug("Appending block()=%d ...", len(block))
- blocklist.append(block)
-
- logger.debug("blocklist()=%d", len(blocklist))
-
- database.cursor.execute("SELECT domain FROM instances WHERE domain LIKE 'climatejustice.%'")
- domains = database.cursor.fetchall()
-
- logger.debug("domains(%d)[]='%s'", len(domains), type(domains))
- blocking = list()
- for block in blocklist:
- logger.debug("block='%s'", block)
- if "subdomain(s)" in block and len(block["subdomain(s)"]) > 0:
- origin = block["blocked"]
- logger.debug("origin='%s'", origin)
- for subdomain in block["subdomain(s)"]:
- block["blocked"] = subdomain + "." + origin
- logger.debug("block[blocked]='%s'", block["blocked"])
- blocking.append(block)
- else:
- blocking.append(block)
-
- logger.debug("blocking()=%d", blocking)
- for block in blocking:
- logger.debug("block[]='%s'", type(block))
- if "blocked" not in block:
- raise KeyError(f"block()={len(block)} does not have element 'blocked'")
-
- block["blocked"] = tidyup.domain(block["blocked"]).encode("idna").decode("utf-8")
- logger.debug("block[blocked]='%s' - AFTER!", block["blocked"])
-
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("block[blocked]='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
- elif instances.is_recent(block["blocked"]):
- logger.debug("block[blocked]='%s' has been recently checked - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("Proccessing blocked='%s' ...", block["blocked"])
- processing.instance(block["blocked"], "climatejustice.social", inspect.currentframe().f_code.co_name)
-
- blockdict = list()
- for blocker in domains:
- blocker = blocker[0]
- logger.debug("blocker[%s]='%s'", type(blocker), blocker)
- instances.set_last_blocked(blocker)
-
- for block in blocking:
- logger.debug("block[blocked]='%s',block[block reason(s)]='%s' - BEFORE!", block["blocked"], block["block reason(s)"] if "block reason(s)" in block else None)
- block["reason"] = tidyup.reason(block["block reason(s)"]) if "block reason(s)" in block else None
-
- logger.debug("block[blocked]='%s',block[reason]='%s' - AFTER!", block["blocked"], block["reason"])
- if block["blocked"] == "":
- logger.debug("block[blocked] is empty - SKIPPED!")
- continue
- elif not domain_helper.is_wanted(block["blocked"]):
- logger.debug("blocked='%s' is not wanted - SKIPPED!", block["blocked"])
- continue
-
- logger.debug("blocked='%s',reason='%s'", block["blocked"], block["reason"])
- if processing.block(blocker, block["blocked"], block["reason"], "reject") and config.get("bot_enabled"):
- logger.debug("Appending blocked='%s',reason='%s' for blocker='%s' ...", block["blocked"], block["block_level"], blocker)
- blockdict.append({
- "blocked": block["blocked"],
- "reason" : block["reason"],
- })
-
- if instances.has_pending(blocker):
- logger.debug("Flushing updates for blocker='%s' ...", blocker)
- instances.update(blocker)
-
- logger.debug("Invoking commit() ...")
- database.connection.commit()
-
- logger.debug("config.get(bot_enabled)='%s',blockdict()=%d", config.get("bot_enabled"), len(blockdict))
- if config.get("bot_enabled") and len(blockdict) > 0:
- logger.info("Sending bot POST for blocker='%s,blockdict()=%d ...", blocker, len(blockdict))
- network.send_bot_post(blocker, blockdict)
-
- logger.debug("Success! - EXIT!")
- return 0
reject_media = reject_reports = False
if "#domain" in row:
- domain = tidyup.domain(row["#domain"]) if row["#domain"] != None and row["#domain"] != "" else None
+ domain = tidyup.domain(row["#domain"]) if row["#domain"] not in [None, ""] else None
elif "domain" in row:
- domain = tidyup.domain(row["domain"]) if row["domain"] != None and row["domain"] != "" else None
+ domain = tidyup.domain(row["domain"]) if row["domain"] not in [None, ""] else None
elif "Domain" in row:
- domain = tidyup.domain(row["Domain"]) if row["Domain"] != None and row["Domain"] != "" else None
+ domain = tidyup.domain(row["Domain"]) if row["Domain"] not in [None, ""] else None
else:
logger.warning("row='%s' does not contain domain column - SKIPPED!", row)
continue
severity = "reject"
if "reason" in row:
- reason = tidyup.reason(row["reason"]) if row["reason"] != None and row["reason"] != "" else None
+ reason = tidyup.reason(row["reason"]) if row["reason"] not in [None, ""] else None
elif "comment" in row:
- reason = tidyup.reason(row["comment"]) if row["comment"] != None and row["comment"] != "" else None
+ reason = tidyup.reason(row["comment"]) if row["comment"] not in [None, ""] else None
elif "Comment" in row:
- reason = tidyup.reason(row["Comment"]) if row["Comment"] != None and row["Comment"] != "" else None
+ reason = tidyup.reason(row["Comment"]) if row["Comment"] not in [None, ""] else None
else:
logger.debug("row='%s' has no reason/comment key provided", row)
logger.debug("software='%s'", software)
if software_helper.is_relay(software):
logger.debug("software='%s' is a relay software - EXIT!", software)
- return list()
+ return
logger.debug("Updating last_instance_fetch for domain='%s' ...", domain)
instances.set_last_instance_fetch(domain)
blocklist.append({
"blocker" : domain,
"blocked" : block["domain"],
- "hash" : block["digest"] if "digest" in block else None,
+ "digest" : block["digest"] if "digest" in block else None,
"reason" : reason,
"block_level": blocks.alias_block_level(block["severity"]),
})
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import logging
+import time
import reqto
import requests
-import time
import urllib3
from fba import utils
domain_helper.raise_on(domain)
if not isinstance(response_time, float):
- raise ValueException(f"response_time[]='{type(response_time)}' is not of type 'float'")
+ raise ValueError(f"response_time[]='{type(response_time)}' is not of type 'float'")
elif response_time < 0:
- raise ValueException(f"response_time={response_time} is below zero")
+ raise ValueError(f"response_time={response_time} is below zero")
# Set timestamp
_set_data("last_response_time", domain, response_time)
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
-from fba.http import federation
from fba.http import network
from fba.models import blocks
# replaced find_next_siblings with find_all_next to account for instances that e.g. hide lists in dropdown menu
for line in header.find_all_next("table")[0].find_all("tr")[1:]:
domain = line.find("span").text
- hash = line.find("span")["title"][9:]
+ digest = line.find("span")["title"][9:]
reason = line.find_all("td")[1].text
logger.debug("domain='%s',reason='%s' - BEFORE!", domain, reason)
logger.warning("domain='%s' is empty,line='%s' - SKIPPED!", domain, line)
continue
- logger.debug("Appending domain='%s',hash='%s',reason='%s' to blocklist header_text='%s' ...", domain, hash, reason, blocklist)
+ logger.debug("Appending domain='%s',digest='%s',reason='%s' to blocklist header_text='%s' ...", domain, digest, reason, blocklist)
blocklist[header_text].append({
"domain": domain,
- "hash" : hash,
+ "digest": digest,
"reason": reason,
})
else:
blocklist.append({
"blocker" : domain,
"blocked" : block["domain"],
- "hash" : block["digest"] if "digest" in block else None,
+ "digest" : block["digest"] if "digest" in block else None,
"reason" : reason,
"block_level": blocks.alias_block_level(block["severity"]),
})
logger.warning("instance(%d)='%s' has no key 'host' - SKIPPED!", len(instance), instance)
continue
elif instance["host"] is None or instance["host"] == "":
- logger.debug("instance[host]='%s' is None or empty - SKIPPED!", instance["host"])
- continue
+ logger.debug("instance[host]='%s' is None or empty - SKIPPED!", instance["host"])
+ continue
logger.debug("instance[host]='%s' - BEFORE!", instance["host"])
blocked = tidyup.domain(instance["host"])