chg: Cosmetic changes to support upcomming ete4

pull/753/head
Raphaël Vinot 2023-08-10 17:31:19 +02:00
parent 206e5957b5
commit 1f517b1562
3 changed files with 7 additions and 7 deletions

View File

@ -497,7 +497,7 @@ class CapturesIndex(Mapping):
_all_ips = set()
for node in ct.root_hartree.hostname_tree.traverse():
if hasattr(node, 'hostname_is_ip') and node.hostname_is_ip:
if 'hostname_is_ip' in node.features and node.hostname_is_ip:
continue
if node.name not in host_cnames or node.name not in host_ips:
host_cnames[node.name] = ''
@ -570,7 +570,7 @@ class CapturesIndex(Mapping):
if ipasn or cflare_hits:
# retraverse tree to populate it with the features
for node in ct.root_hartree.hostname_tree.traverse():
if not hasattr(node, 'resolved_ips'):
if 'resolved_ips' not in node.features:
continue
ipasn_entries = {}
cflare_entries = {}

View File

@ -208,7 +208,7 @@ class Context():
elif all(urlnode.empty_response for urlnode in hostnode.urls):
hostnode.add_feature('all_empty', True)
else:
legit = [True for urlnode in hostnode.urls if hasattr(urlnode, 'legitimate')]
legit = [True for urlnode in hostnode.urls if 'legitimate' in urlnode.features]
if len(legit) == len(hostnode.urls):
hostnode.add_feature('legitimate', True)
return tree

View File

@ -65,7 +65,7 @@ class Indexing():
already_loaded: Set[Tuple[str, str]] = set()
already_cleaned_up: Set[str] = set()
for urlnode in crawled_tree.root_hartree.url_tree.traverse():
if not hasattr(urlnode, 'cookies_received'):
if 'cookies_received' not in urlnode.features:
continue
for domain, cookie, _ in urlnode.cookies_received:
name, value = cookie.split('=', 1)
@ -92,7 +92,7 @@ class Indexing():
pipeline = self.redis.pipeline()
already_loaded: Set[Tuple[str, str]] = set()
for urlnode in crawled_tree.root_hartree.url_tree.traverse():
if not hasattr(urlnode, 'cookies_received'):
if 'cookies_received' not in urlnode.features:
continue
for domain, cookie, _ in urlnode.cookies_received:
name, value = cookie.split('=', 1)
@ -246,7 +246,7 @@ class Indexing():
already_loaded: Set[str] = set()
already_cleaned_up: Set[str] = set()
for urlnode in crawled_tree.root_hartree.url_tree.traverse():
if not hasattr(urlnode, 'hhhash'):
if 'hhhash' not in urlnode.features:
continue
if urlnode.hhhash in already_loaded:
# Only add cookie name once / capture
@ -271,7 +271,7 @@ class Indexing():
pipeline = self.redis.pipeline()
already_loaded: Set[str] = set()
for urlnode in crawled_tree.root_hartree.url_tree.traverse():
if not hasattr(urlnode, 'hhhash'):
if 'hhhash' not in urlnode.features:
continue
if urlnode.hhhash in already_loaded:
# Only add cookie name once / capture