logger.info("Fetching csv_url='%s' for blocker='%s' ...", block["csv_url"], block["blocker"])
response = utils.fetch_url(f"{base_url}/{block['csv_url']}", network.web_headers, (config.get("connection_timeout"), config.get("read_timeout")))
- logger.debug("response.ok='%s',response.status_code=%d,response.text()=%d", response.ok, response.status_code, len(response.text))
- if not response.ok or response.status_code > 399 or response.content == "":
+ logger.debug("response.ok='%s',response.status_code=%d,response.content()=%d", response.ok, response.status_code, len(response.content))
+ if not response.ok or response.status_code >= 300 or response.content == "":
logger.warning("Could not fetch csv_url='%s' for blocker='%s' - SKIPPED!", block["csv_url"], block["blocker"])
continue
instances.set_total_blocks(block["blocker"], cnt)
logger.debug("Checking if blocker='%s' has pending updates ...", block["blocker"])
- if instances.has_pending(blocker):
+ if instances.has_pending(block["blocker"]):
logger.debug("Flushing updates for block[blocker]='%s' ...", block["blocker"])
instances.update_data(block["blocker"])