response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing RSS feed (%d Bytes) ...", len(response.text))
rss = atoma.parse_rss_bytes(response.content)
response = utils.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing ATOM feed (%d Bytes) ...", len(response.text))
atom = atoma.parse_atom_bytes(response.content)
response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Returned %d Bytes for processing", len(response.text.strip()))
domains = response.text.split("\n")
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Parsing %d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text.strip() != "" and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
+ if response.ok and response.status_code == 200 and response.text.strip() != "" and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
# Save cookies
logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
cookies.store(domain, response.cookies.get_dict())
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if ((response.ok and response.status_code < 300) or response.status_code == 410) and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
+ if ((response.ok and response.status_code == 200) or response.status_code == 410) and response.text.find("<html") > 0 and domain_helper.is_in_url(domain, response.url):
logger.debug("Parsing response.text()=%d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- return response.ok and response.status_code < 300 and response.text.strip() != ""
+ return response.ok and response.status_code == 200 and response.text.strip() != ""
def fetch_response(domain: str, path: str, headers: dict, timeout: tuple, allow_redirects: bool = False) -> requests.models.Response:
logger.debug("domain='%s',path='%s',headers()=%d,timeout='%s',allow_redirects='%s' - CALLED!", domain, path, len(headers), timeout, allow_redirects)
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Parsing %s Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
"reason" : None,
"block_level": "reject",
})
+ else:
+ logger.warning("Cannot fetch /instances due to error: response.ok='%s',response.status_code=%d,response.details='%s'", response.ok, response.status_code, response.reason)
+ instances.set_last_error(domain, response)
except network.exceptions as exception:
logger.warning("domain='%s',exception[%s]:'%s'", domain, type(exception), str(exception))
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Parsing %s Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")
if len(peers) == 0:
logger.debug("Found no peers for domain='%s', trying script tag ...", domain)
peers = parse_script(doc)
+ else:
+ logger.warning("Cannot fetch /instances due to error: response.ok='%s',response.status_code=%d,response.details='%s'", response.ok, response.status_code, response.reason)
+ instances.set_last_error(domain, response)
logger.debug("Marking domain='%s' as successfully handled, peers()=%d ...", domain, len(peers))
instances.set_success(domain)