logger = logging.getLogger(__name__)
#logger.setLevel(logging.DEBUG)
-with open("config.json") as f:
+with open("config.json", 'r', encoding='utf-8') as f:
logger.debug("Loading configuration file ...")
_config = json.loads(f.read())
_config["max_crawl_depth"] = min(_config["max_crawl_depth"], (sys.getrecursionlimit() - 50))
import validators
from fba.helpers import blacklist
-from fba.helpers import config
from fba.helpers import cookies
from fba.helpers import domain as domain_helper
from fba.http import network
from fba.http import nodeinfo
-from fba.models import blocks
from fba.models import instances
from fba.networks import lemmy
import bs4
from fba.helpers import blacklist
-from fba.helpers import config
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
from fba.helpers import blacklist
from fba.helpers import blocks as blocks_helper
-from fba.helpers import config
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
from fba.http import network
-from fba.models import blocks
from fba.models import instances
logging.basicConfig(level=logging.INFO)
from fba.helpers import blacklist
from fba.helpers import blocks as blocks_helper
-from fba.helpers import config
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
from fba.http import network
from fba.http import nodeinfo
-from fba.models import blocks
from fba.models import instances
logging.basicConfig(level=logging.INFO)
import hashlib
import logging
-from urllib.parse import urlparse
-
import bs4
-import requests
import validators
from fba.helpers import blacklist
from fba.helpers import domain as domain_helper
from fba.helpers import tidyup
-from fba.http import network
-
from fba.models import instances
logging.basicConfig(level=logging.INFO)