+ logger.debug("peers()=%d - EXIT!", len(peers))
+ return peers
+
+def parse_script(doc: bs4.BeautifulSoup, only: str = None) -> list:
+ logger.debug("doc[]='%s',only='%s' - CALLED!")
+
+ if not isinstance(doc, bs4.BeautifulSoup):
+ raise ValueError(f"Parameter doc[]='{type(only)}' is not of type 'bs4.BeautifulSoup'")
+ elif not isinstance(only, str) and only is not None:
+ raise ValueError(f"Parameter only[]='{type(only)}' is not of type 'str'")
+ elif isinstance(only, str) and only == "":
+ raise ValueError("Parameter 'only' is empty")
+
+ scripts = doc.find_all("script")
+ peers = list()
+
+ logger.debug("scripts()=%d", len(scripts))
+ for script in scripts:
+ logger.debug("script[%s].contents()=%d", type(script), len(script.contents))
+ if len(script.contents) == 0:
+ logger.debug("script has no contents - SKIPPED!")
+ continue
+ elif not script.contents[0].startswith("window.isoData"):
+ logger.debug("script.contents[0]='%s' does not start with window.isoData - SKIPPED!", script.contents[0])
+ continue
+
+ logger.debug("script.contents[0][]='%s'", type(script.contents[0]))
+
+ iso_data = script.contents[0].split("=")[1].strip().replace(":undefined", ":\"undefined\"")
+ logger.debug("iso_data[%s]='%s'", type(iso_data), iso_data)
+
+ parsed = None
+ try:
+ parsed = json.loads(iso_data)
+ except json.decoder.JSONDecodeError as exception:
+ logger.warning("Exception '%s' during parsing %d Bytes: '%s' - EXIT!", type(exception), len(iso_data), str(exception))
+ return list()
+
+ logger.debug("parsed[%s]()=%d", type(parsed), len(parsed))
+
+ if "routeData" not in parsed:
+ logger.warning("parsed[%s]()=%d does not contain element 'routeData'", type(parsed), len(parsed))
+ continue
+ elif "federatedInstancesResponse" not in parsed["routeData"]:
+ logger.warning("parsed[routeData][%s]()=%d does not contain element 'federatedInstancesResponse'", type(parsed["routeData"]), len(parsed["routeData"]))
+ continue
+ elif "data" not in parsed["routeData"]["federatedInstancesResponse"]:
+ logger.warning("parsed[routeData][federatedInstancesResponse][%s]()=%d does not contain element 'data'", type(parsed["routeData"]["federatedInstancesResponse"]), len(parsed["routeData"]["federatedInstancesResponse"]))
+ continue
+ elif "federated_instances" not in parsed["routeData"]["federatedInstancesResponse"]["data"]:
+ logger.warning("parsed[routeData][federatedInstancesResponse][data][%s]()=%d does not contain element 'data'", type(parsed["routeData"]["federatedInstancesResponse"]["data"]), len(parsed["routeData"]["federatedInstancesResponse"]["data"]))
+ continue
+
+ data = parsed["routeData"]["federatedInstancesResponse"]["data"]["federated_instances"]
+ logger.debug("Checking %d data elements ...", len(data))
+ for element in data:
+ logger.debug("element='%s'", element)
+ if isinstance(only, str) and only != element:
+ logger.debug("Skipping unwanted element='%s',only='%s'", element, only)
+ continue
+
+ logger.debug("Checking data[%s]()=%d row(s) ...", element, len(data[element]))
+ for row in data[element]:
+ logger.debug("row[]='%s'", type(row))
+ if "domain" not in row:
+ logger.warning("row()=%d has no element 'domain' - SKIPPED!", len(row))
+ continue
+
+ logger.debug("row[domain]='%s' - BEFORE!", row["domain"])
+ peer = tidyup.domain(row["domain"])
+ logger.debug("peer='%s' - AFTER!", peer)
+
+ if peer is None or peer == "":
+ logger.warning("peer='%s' is empty, row[domain]='%s' - SKIPPED!", peer, row["domain"])
+ continue
+ elif not domain_helper.is_wanted(peer):
+ logger.debug("peer='%s' is not wanted - SKIPPED!", peer)
+ elif peer in peers:
+ logger.debug("peer='%s' already added - SKIPPED!", peer)
+ continue
+
+ logger.debug("Appending peer='%s' ...", peer)
+ peers.append(peer)
+
+ logger.debug("peers()=%d - EXIT!", len(peers))
+ return peers