From 1f517b1562b2f4074d3f18fbaddb621e2b6d6449 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Thu, 10 Aug 2023 17:31:19 +0200 Subject: [PATCH] chg: Cosmetic changes to support upcomming ete4 --- lookyloo/capturecache.py | 4 ++-- lookyloo/context.py | 2 +- lookyloo/indexing.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lookyloo/capturecache.py b/lookyloo/capturecache.py index 149ad57..04ad709 100644 --- a/lookyloo/capturecache.py +++ b/lookyloo/capturecache.py @@ -497,7 +497,7 @@ class CapturesIndex(Mapping): _all_ips = set() for node in ct.root_hartree.hostname_tree.traverse(): - if hasattr(node, 'hostname_is_ip') and node.hostname_is_ip: + if 'hostname_is_ip' in node.features and node.hostname_is_ip: continue if node.name not in host_cnames or node.name not in host_ips: host_cnames[node.name] = '' @@ -570,7 +570,7 @@ class CapturesIndex(Mapping): if ipasn or cflare_hits: # retraverse tree to populate it with the features for node in ct.root_hartree.hostname_tree.traverse(): - if not hasattr(node, 'resolved_ips'): + if 'resolved_ips' not in node.features: continue ipasn_entries = {} cflare_entries = {} diff --git a/lookyloo/context.py b/lookyloo/context.py index 49676bb..e9b21b6 100644 --- a/lookyloo/context.py +++ b/lookyloo/context.py @@ -208,7 +208,7 @@ class Context(): elif all(urlnode.empty_response for urlnode in hostnode.urls): hostnode.add_feature('all_empty', True) else: - legit = [True for urlnode in hostnode.urls if hasattr(urlnode, 'legitimate')] + legit = [True for urlnode in hostnode.urls if 'legitimate' in urlnode.features] if len(legit) == len(hostnode.urls): hostnode.add_feature('legitimate', True) return tree diff --git a/lookyloo/indexing.py b/lookyloo/indexing.py index 80d916c..b712a8b 100644 --- a/lookyloo/indexing.py +++ b/lookyloo/indexing.py @@ -65,7 +65,7 @@ class Indexing(): already_loaded: Set[Tuple[str, str]] = set() already_cleaned_up: Set[str] = set() for urlnode in crawled_tree.root_hartree.url_tree.traverse(): - if not hasattr(urlnode, 'cookies_received'): + if 'cookies_received' not in urlnode.features: continue for domain, cookie, _ in urlnode.cookies_received: name, value = cookie.split('=', 1) @@ -92,7 +92,7 @@ class Indexing(): pipeline = self.redis.pipeline() already_loaded: Set[Tuple[str, str]] = set() for urlnode in crawled_tree.root_hartree.url_tree.traverse(): - if not hasattr(urlnode, 'cookies_received'): + if 'cookies_received' not in urlnode.features: continue for domain, cookie, _ in urlnode.cookies_received: name, value = cookie.split('=', 1) @@ -246,7 +246,7 @@ class Indexing(): already_loaded: Set[str] = set() already_cleaned_up: Set[str] = set() for urlnode in crawled_tree.root_hartree.url_tree.traverse(): - if not hasattr(urlnode, 'hhhash'): + if 'hhhash' not in urlnode.features: continue if urlnode.hhhash in already_loaded: # Only add cookie name once / capture @@ -271,7 +271,7 @@ class Indexing(): pipeline = self.redis.pipeline() already_loaded: Set[str] = set() for urlnode in crawled_tree.root_hartree.url_tree.traverse(): - if not hasattr(urlnode, 'hhhash'): + if 'hhhash' not in urlnode.features: continue if urlnode.hhhash in already_loaded: # Only add cookie name once / capture