response = utils.fetch_url(args.feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing RSS feed (%d Bytes) ...", len(response.text))
rss = atoma.parse_rss_bytes(response.content)
response = utils.fetch_url(feed, network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and len(response.text) > 0:
+ if response.ok and response.status_code == 200 and len(response.text) > 0:
logger.debug("Parsing ATOM feed (%d Bytes) ...", len(response.text))
atom = atoma.parse_atom_bytes(response.content)
response = utils.fetch_url(row["url"], network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Returned %d Bytes for processing", len(response.text.strip()))
domains = response.text.split("\n")
)
logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if response.ok and response.status_code < 300 and response.text != "":
+ if response.ok and response.status_code == 200 and response.text != "":
logger.debug("Parsing %d Bytes ...", len(response.text))
doc = bs4.BeautifulSoup(response.text, "html.parser")