chg: Use keywords only paramaters when relevant

pull/204/head
Raphaël Vinot 2021-05-17 17:08:43 -07:00
parent e6753a5a06
commit bb214d9e2b
3 changed files with 54 additions and 69 deletions

View File

@ -130,7 +130,7 @@ class Lookyloo():
# Remove the UA / IP mapping. # Remove the UA / IP mapping.
self.redis.delete(f'user_agents|{yesterday.isoformat()}') self.redis.delete(f'user_agents|{yesterday.isoformat()}')
def _cache_capture(self, capture_uuid: str) -> CrawledTree: def _cache_capture(self, capture_uuid: str, /) -> CrawledTree:
'''Generate the pickle, set the cache, add capture in the indexes''' '''Generate the pickle, set the cache, add capture in the indexes'''
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
har_files = sorted(capture_dir.glob('*.har')) har_files = sorted(capture_dir.glob('*.har'))
@ -231,7 +231,7 @@ class Lookyloo():
json.dump(host_ips, f) json.dump(host_ips, f)
return ct return ct
def get_crawled_tree(self, capture_uuid: str) -> CrawledTree: def get_crawled_tree(self, capture_uuid: str, /) -> CrawledTree:
'''Get the generated tree in ETE Toolkit format. '''Get the generated tree in ETE Toolkit format.
Loads the pickle if it exists, creates it otherwise.''' Loads the pickle if it exists, creates it otherwise.'''
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
@ -242,20 +242,21 @@ class Lookyloo():
raise NoValidHarFile(f'Unable to get tree from {capture_dir}') raise NoValidHarFile(f'Unable to get tree from {capture_dir}')
return ct return ct
def add_context(self, capture_uuid: str, urlnode_uuid: str, ressource_hash: str, legitimate: bool, malicious: bool, details: Dict[str, Dict[str, str]]): def add_context(self, capture_uuid: str, /, urlnode_uuid: str, *, ressource_hash: str,
legitimate: bool, malicious: bool, details: Dict[str, Dict[str, str]]):
'''Adds context information to a capture or a URL node''' '''Adds context information to a capture or a URL node'''
if malicious: if malicious:
self.context.add_malicious(ressource_hash, details['malicious']) self.context.add_malicious(ressource_hash, details['malicious'])
if legitimate: if legitimate:
self.context.add_legitimate(ressource_hash, details['legitimate']) self.context.add_legitimate(ressource_hash, details['legitimate'])
def add_to_legitimate(self, capture_uuid: str, hostnode_uuid: Optional[str]=None, urlnode_uuid: Optional[str]=None): def add_to_legitimate(self, capture_uuid: str, /, hostnode_uuid: Optional[str]=None, urlnode_uuid: Optional[str]=None):
'''Mark a full captyre as legitimate. '''Mark a full captyre as legitimate.
Iterates over all the nodes and mark them all as legitimate too.''' Iterates over all the nodes and mark them all as legitimate too.'''
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
self.context.mark_as_legitimate(ct, hostnode_uuid, urlnode_uuid) self.context.mark_as_legitimate(ct, hostnode_uuid, urlnode_uuid)
def remove_pickle(self, capture_uuid: str) -> None: def remove_pickle(self, capture_uuid: str, /) -> None:
'''Remove the pickle from a specific capture.''' '''Remove the pickle from a specific capture.'''
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
remove_pickle_tree(capture_dir) remove_pickle_tree(capture_dir)
@ -270,22 +271,22 @@ class Lookyloo():
[remove_pickle_tree(capture_dir) for capture_dir in self.capture_dirs] # type: ignore [remove_pickle_tree(capture_dir) for capture_dir in self.capture_dirs] # type: ignore
self.rebuild_cache() self.rebuild_cache()
def get_urlnode_from_tree(self, capture_uuid: str, node_uuid: str) -> URLNode: def get_urlnode_from_tree(self, capture_uuid: str, /, node_uuid: str) -> URLNode:
'''Get a URL node from a tree, by UUID''' '''Get a URL node from a tree, by UUID'''
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
return ct.root_hartree.get_url_node_by_uuid(node_uuid) return ct.root_hartree.get_url_node_by_uuid(node_uuid)
def get_hostnode_from_tree(self, capture_uuid: str, node_uuid: str) -> HostNode: def get_hostnode_from_tree(self, capture_uuid: str, /, node_uuid: str) -> HostNode:
'''Get a host node from a tree, by UUID''' '''Get a host node from a tree, by UUID'''
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
return ct.root_hartree.get_host_node_by_uuid(node_uuid) return ct.root_hartree.get_host_node_by_uuid(node_uuid)
def get_statistics(self, capture_uuid: str) -> Dict[str, Any]: def get_statistics(self, capture_uuid: str, /) -> Dict[str, Any]:
'''Get the statistics of a capture.''' '''Get the statistics of a capture.'''
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
return ct.root_hartree.stats return ct.root_hartree.stats
def get_meta(self, capture_uuid: str) -> Dict[str, str]: def get_meta(self, capture_uuid: str, /) -> Dict[str, str]:
'''Get the meta informations from a capture (mostly, details about the User Agent used.)''' '''Get the meta informations from a capture (mostly, details about the User Agent used.)'''
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
meta = {} meta = {}
@ -294,7 +295,7 @@ class Lookyloo():
meta = json.load(f) meta = json.load(f)
return meta return meta
def categories_capture(self, capture_uuid: str) -> Dict[str, Any]: def categories_capture(self, capture_uuid: str, /) -> Dict[str, Any]:
'''Get all the categories related to a capture, in MISP Taxonomies format''' '''Get all the categories related to a capture, in MISP Taxonomies format'''
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
# get existing categories if possible # get existing categories if possible
@ -304,7 +305,7 @@ class Lookyloo():
return {e: self.taxonomies.revert_machinetag(e) for e in current_categories} return {e: self.taxonomies.revert_machinetag(e) for e in current_categories}
return {} return {}
def categorize_capture(self, capture_uuid: str, category: str) -> None: def categorize_capture(self, capture_uuid: str, /, category: str) -> None:
'''Add a category (MISP Taxonomy tag) to a capture.''' '''Add a category (MISP Taxonomy tag) to a capture.'''
if not get_config('generic', 'enable_categorization'): if not get_config('generic', 'enable_categorization'):
return return
@ -322,7 +323,7 @@ class Lookyloo():
with (capture_dir / 'categories').open('w') as f: with (capture_dir / 'categories').open('w') as f:
f.writelines(f'{t}\n' for t in current_categories) f.writelines(f'{t}\n' for t in current_categories)
def uncategorize_capture(self, capture_uuid: str, category: str) -> None: def uncategorize_capture(self, capture_uuid: str, /, category: str) -> None:
'''Remove a category (MISP Taxonomy tag) from a capture.''' '''Remove a category (MISP Taxonomy tag) from a capture.'''
if not get_config('generic', 'enable_categorization'): if not get_config('generic', 'enable_categorization'):
return return
@ -337,7 +338,7 @@ class Lookyloo():
with (capture_dir / 'categories').open('w') as f: with (capture_dir / 'categories').open('w') as f:
f.writelines(f'{t}\n' for t in current_categories) f.writelines(f'{t}\n' for t in current_categories)
def trigger_modules(self, capture_uuid: str, force: bool=False) -> None: def trigger_modules(self, capture_uuid: str, /, force: bool=False) -> None:
'''Launch the 3rd party modules on a capture. '''Launch the 3rd party modules on a capture.
It uses the cached result *if* the module was triggered the same day. It uses the cached result *if* the module was triggered the same day.
The `force` flag re-triggers the module regardless of the cache.''' The `force` flag re-triggers the module regardless of the cache.'''
@ -361,7 +362,7 @@ class Lookyloo():
else: else:
self.vt.url_lookup(ct.root_hartree.har.root_url, force) self.vt.url_lookup(ct.root_hartree.har.root_url, force)
def get_modules_responses(self, capture_uuid: str) -> Optional[Dict[str, Any]]: def get_modules_responses(self, capture_uuid: str, /) -> Optional[Dict[str, Any]]:
'''Get the responses of the modules from the cached responses on the disk''' '''Get the responses of the modules from the cached responses on the disk'''
try: try:
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
@ -472,7 +473,7 @@ class Lookyloo():
# If the cache is re-created for some reason, pop from the local cache. # If the cache is re-created for some reason, pop from the local cache.
self._captures_index.pop(uuid, None) self._captures_index.pop(uuid, None)
def hide_capture(self, capture_uuid: str) -> None: def hide_capture(self, capture_uuid: str, /) -> None:
"""Add the capture in the hidden pool (not shown on the front page) """Add the capture in the hidden pool (not shown on the front page)
NOTE: it won't remove the correlations until they are rebuilt. NOTE: it won't remove the correlations until they are rebuilt.
""" """
@ -518,7 +519,7 @@ class Lookyloo():
all_cache.sort(key=operator.attrgetter('timestamp'), reverse=True) all_cache.sort(key=operator.attrgetter('timestamp'), reverse=True)
return all_cache return all_cache
def capture_cache(self, capture_uuid: str) -> Optional[CaptureCache]: def capture_cache(self, capture_uuid: str, /) -> Optional[CaptureCache]:
"""Get the cache from redis. """Get the cache from redis.
NOTE: Doesn't try to build the pickle""" NOTE: Doesn't try to build the pickle"""
if capture_uuid in self._captures_index: if capture_uuid in self._captures_index:
@ -556,7 +557,7 @@ class Lookyloo():
f.write(str(uuid4())) f.write(str(uuid4()))
return sorted(self.capture_dir.iterdir(), reverse=True) return sorted(self.capture_dir.iterdir(), reverse=True)
def _get_capture_dir(self, capture_uuid: str) -> Path: def _get_capture_dir(self, capture_uuid: str, /) -> Path:
'''Use the cache to get a capture directory from a capture UUID''' '''Use the cache to get a capture directory from a capture UUID'''
capture_dir: str = self.redis.hget('lookup_dirs', capture_uuid) # type: ignore capture_dir: str = self.redis.hget('lookup_dirs', capture_uuid) # type: ignore
if not capture_dir: if not capture_dir:
@ -569,7 +570,7 @@ class Lookyloo():
raise NoValidHarFile(f'UUID ({capture_uuid}) linked to a missing directory ({capture_dir}). Removed now.') raise NoValidHarFile(f'UUID ({capture_uuid}) linked to a missing directory ({capture_dir}). Removed now.')
return to_return return to_return
def get_capture_status(self, capture_uuid: str) -> CaptureStatus: def get_capture_status(self, capture_uuid: str, /) -> CaptureStatus:
if self.redis.sismember('to_capture', capture_uuid): if self.redis.sismember('to_capture', capture_uuid):
return CaptureStatus.QUEUED return CaptureStatus.QUEUED
elif self.redis.hexists('lookup_dirs', capture_uuid): elif self.redis.hexists('lookup_dirs', capture_uuid):
@ -622,7 +623,7 @@ class Lookyloo():
self.logger.warning(f'Unable to capture {to_capture["url"]}') self.logger.warning(f'Unable to capture {to_capture["url"]}')
return False return False
def send_mail(self, capture_uuid: str, email: str='', comment: str='') -> None: def send_mail(self, capture_uuid: str, /, email: str='', comment: str='') -> None:
'''Send an email notification regarding a specific capture''' '''Send an email notification regarding a specific capture'''
if not get_config('generic', 'enable_mail_notification'): if not get_config('generic', 'enable_mail_notification'):
return return
@ -688,7 +689,7 @@ class Lookyloo():
with metafile.open('w') as f: with metafile.open('w') as f:
json.dump(to_dump, f) json.dump(to_dump, f)
def _get_raw(self, capture_uuid: str, extension: str='*', all_files: bool=True) -> BytesIO: def _get_raw(self, capture_uuid: str, /, extension: str='*', all_files: bool=True) -> BytesIO:
'''Get file(s) from the capture directory''' '''Get file(s) from the capture directory'''
try: try:
capture_dir = self._get_capture_dir(capture_uuid) capture_dir = self._get_capture_dir(capture_uuid)
@ -710,19 +711,19 @@ class Lookyloo():
to_return.seek(0) to_return.seek(0)
return to_return return to_return
def get_html(self, capture_uuid: str, all_html: bool=False) -> BytesIO: def get_html(self, capture_uuid: str, /, all_html: bool=False) -> BytesIO:
'''Get rendered HTML''' '''Get rendered HTML'''
return self._get_raw(capture_uuid, 'html', all_html) return self._get_raw(capture_uuid, 'html', all_html)
def get_cookies(self, capture_uuid: str, all_cookies: bool=False) -> BytesIO: def get_cookies(self, capture_uuid: str, /, all_cookies: bool=False) -> BytesIO:
'''Get the cookie(s)''' '''Get the cookie(s)'''
return self._get_raw(capture_uuid, 'cookies.json', all_cookies) return self._get_raw(capture_uuid, 'cookies.json', all_cookies)
def get_screenshot(self, capture_uuid: str) -> BytesIO: def get_screenshot(self, capture_uuid: str, /) -> BytesIO:
'''Get the screenshot(s) of the rendered page''' '''Get the screenshot(s) of the rendered page'''
return self._get_raw(capture_uuid, 'png', all_files=False) return self._get_raw(capture_uuid, 'png', all_files=False)
def get_screenshot_thumbnail(self, capture_uuid: str, for_datauri: bool=False, width: int=64) -> Union[str, BytesIO]: def get_screenshot_thumbnail(self, capture_uuid: str, /, for_datauri: bool=False, width: int=64) -> Union[str, BytesIO]:
'''Get the thumbnail of the rendered page. Always crop to a square.''' '''Get the thumbnail of the rendered page. Always crop to a square.'''
to_return = BytesIO() to_return = BytesIO()
size = width, width size = width, width
@ -745,11 +746,11 @@ class Lookyloo():
else: else:
return to_return return to_return
def get_capture(self, capture_uuid: str) -> BytesIO: def get_capture(self, capture_uuid: str, /) -> BytesIO:
'''Get all the files related to this capture.''' '''Get all the files related to this capture.'''
return self._get_raw(capture_uuid) return self._get_raw(capture_uuid)
def get_urls_rendered_page(self, capture_uuid: str): def get_urls_rendered_page(self, capture_uuid: str, /):
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
return sorted(set(ct.root_hartree.rendered_node.urls_in_rendered_page) return sorted(set(ct.root_hartree.rendered_node.urls_in_rendered_page)
- set(ct.root_hartree.all_url_requests.keys())) - set(ct.root_hartree.all_url_requests.keys()))
@ -874,7 +875,7 @@ class Lookyloo():
self._set_capture_cache(dirpath) self._set_capture_cache(dirpath)
return perma_uuid return perma_uuid
def get_body_hash_investigator(self, body_hash: str) -> Tuple[List[Tuple[str, str]], List[Tuple[str, float]]]: def get_body_hash_investigator(self, body_hash: str, /) -> Tuple[List[Tuple[str, str]], List[Tuple[str, float]]]:
'''Returns all the captures related to a hash (sha512), used in the web interface.''' '''Returns all the captures related to a hash (sha512), used in the web interface.'''
total_captures, details = self.indexing.get_body_hash_captures(body_hash, limit=-1) total_captures, details = self.indexing.get_body_hash_captures(body_hash, limit=-1)
cached_captures = self.sorted_capture_cache([d[0] for d in details]) cached_captures = self.sorted_capture_cache([d[0] for d in details])
@ -882,7 +883,7 @@ class Lookyloo():
domains = self.indexing.get_body_hash_domains(body_hash) domains = self.indexing.get_body_hash_domains(body_hash)
return captures, domains return captures, domains
def get_body_hash_full(self, body_hash: str) -> Tuple[Dict[str, List[Dict[str, str]]], BytesIO]: def get_body_hash_full(self, body_hash: str, /) -> Tuple[Dict[str, List[Dict[str, str]]], BytesIO]:
'''Returns a lot of information about the hash (sha512) and the hits in the instance. '''Returns a lot of information about the hash (sha512) and the hits in the instance.
Also contains the data (base64 encoded)''' Also contains the data (base64 encoded)'''
details = self.indexing.get_body_hash_urls(body_hash) details = self.indexing.get_body_hash_urls(body_hash)
@ -904,14 +905,14 @@ class Lookyloo():
break break
return details, body_content return details, body_content
def get_latest_url_capture(self, url: str) -> Optional[CaptureCache]: def get_latest_url_capture(self, url: str, /) -> Optional[CaptureCache]:
'''Get the most recent capture with this URL''' '''Get the most recent capture with this URL'''
captures = self.sorted_capture_cache(self.indexing.get_captures_url(url)) captures = self.sorted_capture_cache(self.indexing.get_captures_url(url))
if captures: if captures:
return captures[0] return captures[0]
return None return None
def get_url_occurrences(self, url: str, limit: int=20) -> List[Dict]: def get_url_occurrences(self, url: str, /, limit: int=20) -> List[Dict]:
'''Get the most recent captures and URL nodes where the URL has been seen.''' '''Get the most recent captures and URL nodes where the URL has been seen.'''
captures = self.sorted_capture_cache(self.indexing.get_captures_url(url)) captures = self.sorted_capture_cache(self.indexing.get_captures_url(url))
@ -931,7 +932,7 @@ class Lookyloo():
to_return.append(to_append) to_return.append(to_append)
return to_return return to_return
def get_hostname_occurrences(self, hostname: str, with_urls_occurrences: bool=False, limit: int=20) -> List[Dict]: def get_hostname_occurrences(self, hostname: str, /, with_urls_occurrences: bool=False, limit: int=20) -> List[Dict]:
'''Get the most recent captures and URL nodes where the hostname has been seen.''' '''Get the most recent captures and URL nodes where the hostname has been seen.'''
captures = self.sorted_capture_cache(self.indexing.get_captures_hostname(hostname)) captures = self.sorted_capture_cache(self.indexing.get_captures_hostname(hostname))
@ -959,7 +960,7 @@ class Lookyloo():
to_return.append(to_append) to_return.append(to_append)
return to_return return to_return
def get_cookie_name_investigator(self, cookie_name: str) -> Tuple[List[Tuple[str, str]], List[Tuple[str, float, List[Tuple[str, float]]]]]: def get_cookie_name_investigator(self, cookie_name: str, /) -> Tuple[List[Tuple[str, str]], List[Tuple[str, float, List[Tuple[str, float]]]]]:
'''Returns all the captures related to a cookie name entry, used in the web interface.''' '''Returns all the captures related to a cookie name entry, used in the web interface.'''
cached_captures = self.sorted_capture_cache([entry[0] for entry in self.indexing.get_cookies_names_captures(cookie_name)]) cached_captures = self.sorted_capture_cache([entry[0] for entry in self.indexing.get_cookies_names_captures(cookie_name)])
captures = [(cache.uuid, cache.title) for cache in cached_captures] captures = [(cache.uuid, cache.title) for cache in cached_captures]
@ -982,7 +983,7 @@ class Lookyloo():
captures_list['different_url'].append((h_capture_uuid, url_uuid, cache.title, cache.timestamp.isoformat(), url_hostname)) captures_list['different_url'].append((h_capture_uuid, url_uuid, cache.title, cache.timestamp.isoformat(), url_hostname))
return total_captures, captures_list return total_captures, captures_list
def _normalize_known_content(self, h: str, known_content: Dict[str, Any], url: URLNode) -> Tuple[Optional[Union[str, List[Any]]], Optional[Tuple[bool, Any]]]: def _normalize_known_content(self, h: str, /, known_content: Dict[str, Any], url: URLNode) -> Tuple[Optional[Union[str, List[Any]]], Optional[Tuple[bool, Any]]]:
''' There are a few different sources to figure out known vs. legitimate content, ''' There are a few different sources to figure out known vs. legitimate content,
this method normalize it for the web interface.''' this method normalize it for the web interface.'''
known: Optional[Union[str, List[Any]]] = None known: Optional[Union[str, List[Any]]] = None
@ -1002,7 +1003,7 @@ class Lookyloo():
return known, legitimate return known, legitimate
def get_ressource(self, tree_uuid: str, urlnode_uuid: str, h: Optional[str]) -> Optional[Tuple[str, BytesIO, str]]: def get_ressource(self, tree_uuid: str, /, urlnode_uuid: str, h: Optional[str]) -> Optional[Tuple[str, BytesIO, str]]:
'''Get a specific resource from a URL node. If a hash s also given, we want an embeded resource''' '''Get a specific resource from a URL node. If a hash s also given, we want an embeded resource'''
try: try:
url = self.get_urlnode_from_tree(tree_uuid, urlnode_uuid) url = self.get_urlnode_from_tree(tree_uuid, urlnode_uuid)
@ -1045,7 +1046,7 @@ class Lookyloo():
obj.add_reference(vt_obj, 'analysed-with') obj.add_reference(vt_obj, 'analysed-with')
return vt_obj return vt_obj
def misp_export(self, capture_uuid: str, with_parent: bool=False) -> Union[List[MISPEvent], Dict[str, str]]: def misp_export(self, capture_uuid: str, /, with_parent: bool=False) -> Union[List[MISPEvent], Dict[str, str]]:
'''Export a capture in MISP format. You can POST the return of this method '''Export a capture in MISP format. You can POST the return of this method
directly to a MISP instance and it will create an event.''' directly to a MISP instance and it will create an event.'''
cache = self.capture_cache(capture_uuid) cache = self.capture_cache(capture_uuid)
@ -1127,7 +1128,7 @@ class Lookyloo():
return [event] return [event]
def get_hashes(self, tree_uuid: str, hostnode_uuid: Optional[str]=None, urlnode_uuid: Optional[str]=None) -> Set[str]: def get_hashes(self, tree_uuid: str, /, hostnode_uuid: Optional[str]=None, urlnode_uuid: Optional[str]=None) -> Set[str]:
"""Return hashes of resources. """Return hashes of resources.
Only tree_uuid: All the hashes Only tree_uuid: All the hashes
tree_uuid and hostnode_uuid: hashes of all the resources in that hostnode (including embedded ressources) tree_uuid and hostnode_uuid: hashes of all the resources in that hostnode (including embedded ressources)
@ -1142,7 +1143,7 @@ class Lookyloo():
container = self.get_crawled_tree(tree_uuid) container = self.get_crawled_tree(tree_uuid)
return get_resources_hashes(container) return get_resources_hashes(container)
def get_hostnode_investigator(self, capture_uuid: str, node_uuid: str) -> Tuple[HostNode, List[Dict[str, Any]]]: def get_hostnode_investigator(self, capture_uuid: str, /, node_uuid: str) -> Tuple[HostNode, List[Dict[str, Any]]]:
'''Gather all the informations needed to display the Hostnode investigator popup.''' '''Gather all the informations needed to display the Hostnode investigator popup.'''
ct = self.get_crawled_tree(capture_uuid) ct = self.get_crawled_tree(capture_uuid)
hostnode = ct.root_hartree.get_host_node_by_uuid(node_uuid) hostnode = ct.root_hartree.get_host_node_by_uuid(node_uuid)

37
poetry.lock generated
View File

@ -191,7 +191,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "decorator" name = "decorator"
version = "5.0.8" version = "5.0.9"
description = "Decorators for Humans" description = "Decorators for Humans"
category = "dev" category = "dev"
optional = false optional = false
@ -1221,6 +1221,13 @@ bootstrap-flask = [
{file = "Bootstrap_Flask-1.5.2-py2.py3-none-any.whl", hash = "sha256:e3291290320a1f6596325ad4006522a656d9e3cb688090af83d42a7d25bdc1e4"}, {file = "Bootstrap_Flask-1.5.2-py2.py3-none-any.whl", hash = "sha256:e3291290320a1f6596325ad4006522a656d9e3cb688090af83d42a7d25bdc1e4"},
] ]
cchardet = [ cchardet = [
{file = "cchardet-2.1.7-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:58005cd94e4bbb25fb4cee3ad75a0fe121384cb2b99ac5619aa45aecedf43c7b"},
{file = "cchardet-2.1.7-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0ba0793737edc6f58e668edb9d096e5234ddac2326fba984118f8275d54ec484"},
{file = "cchardet-2.1.7-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:445162d42a7b53cb7ec5c9b0d390b3c72660c8cabfaeb778f547a6e9efc9462f"},
{file = "cchardet-2.1.7-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:8ff7db6482622255cd42a7052e63ab36122a8d6df2b883effc37e2096ce4ee8a"},
{file = "cchardet-2.1.7-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:b5f4f09246aacae6d9bf5b741a06141980bcd314392e535239bb22267bd9d8b4"},
{file = "cchardet-2.1.7-cp35-cp35m-win32.whl", hash = "sha256:fe50d449f50c0dcb62ccd364daa1185cf6b72813ecf1ae8109d24cb66672fc2e"},
{file = "cchardet-2.1.7-cp35-cp35m-win_amd64.whl", hash = "sha256:9a57db6ddadb0727c528ce8028a1f963635324d2a17355c8415812d62ab2dda8"},
{file = "cchardet-2.1.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6f70139aaf47ffb94d89db603af849b82efdf756f187cdd3e566e30976c519f"}, {file = "cchardet-2.1.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6f70139aaf47ffb94d89db603af849b82efdf756f187cdd3e566e30976c519f"},
{file = "cchardet-2.1.7-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a25f9577e9bebe1a085eec2d6fdd72b7a9dd680811bba652ea6090fb2ff472f"}, {file = "cchardet-2.1.7-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a25f9577e9bebe1a085eec2d6fdd72b7a9dd680811bba652ea6090fb2ff472f"},
{file = "cchardet-2.1.7-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6b6397d8a32b976a333bdae060febd39ad5479817fabf489e5596a588ad05133"}, {file = "cchardet-2.1.7-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6b6397d8a32b976a333bdae060febd39ad5479817fabf489e5596a588ad05133"},
@ -1242,13 +1249,6 @@ cchardet = [
{file = "cchardet-2.1.7-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f16517f3697569822c6d09671217fdeab61dfebc7acb5068634d6b0728b86c0b"}, {file = "cchardet-2.1.7-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f16517f3697569822c6d09671217fdeab61dfebc7acb5068634d6b0728b86c0b"},
{file = "cchardet-2.1.7-cp38-cp38-win32.whl", hash = "sha256:0b859069bbb9d27c78a2c9eb997e6f4b738db2d7039a03f8792b4058d61d1109"}, {file = "cchardet-2.1.7-cp38-cp38-win32.whl", hash = "sha256:0b859069bbb9d27c78a2c9eb997e6f4b738db2d7039a03f8792b4058d61d1109"},
{file = "cchardet-2.1.7-cp38-cp38-win_amd64.whl", hash = "sha256:273699c4e5cd75377776501b72a7b291a988c6eec259c29505094553ee505597"}, {file = "cchardet-2.1.7-cp38-cp38-win_amd64.whl", hash = "sha256:273699c4e5cd75377776501b72a7b291a988c6eec259c29505094553ee505597"},
{file = "cchardet-2.1.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:48ba829badef61441e08805cfa474ccd2774be2ff44b34898f5854168c596d4d"},
{file = "cchardet-2.1.7-cp39-cp39-manylinux1_i686.whl", hash = "sha256:bd7f262f41fd9caf5a5f09207a55861a67af6ad5c66612043ed0f81c58cdf376"},
{file = "cchardet-2.1.7-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fdac1e4366d0579fff056d1280b8dc6348be964fda8ebb627c0269e097ab37fa"},
{file = "cchardet-2.1.7-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:80e6faae75ecb9be04a7b258dc4750d459529debb6b8dee024745b7b5a949a34"},
{file = "cchardet-2.1.7-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c96aee9ebd1147400e608a3eff97c44f49811f8904e5a43069d55603ac4d8c97"},
{file = "cchardet-2.1.7-cp39-cp39-win32.whl", hash = "sha256:2309ff8fc652b0fc3c0cff5dbb172530c7abb92fe9ba2417c9c0bcf688463c1c"},
{file = "cchardet-2.1.7-cp39-cp39-win_amd64.whl", hash = "sha256:24974b3e40fee9e7557bb352be625c39ec6f50bc2053f44a3d1191db70b51675"},
{file = "cchardet-2.1.7.tar.gz", hash = "sha256:c428b6336545053c2589f6caf24ea32276c6664cb86db817e03a94c60afa0eaf"}, {file = "cchardet-2.1.7.tar.gz", hash = "sha256:c428b6336545053c2589f6caf24ea32276c6664cb86db817e03a94c60afa0eaf"},
] ]
certifi = [ certifi = [
@ -1304,7 +1304,6 @@ click = [
] ]
colorama = [ colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
] ]
constantly = [ constantly = [
{file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"},
@ -1329,8 +1328,8 @@ cssselect = [
{file = "cssselect-1.1.0.tar.gz", hash = "sha256:f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc"}, {file = "cssselect-1.1.0.tar.gz", hash = "sha256:f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc"},
] ]
decorator = [ decorator = [
{file = "decorator-5.0.8-py3-none-any.whl", hash = "sha256:77a3141f7f5837b5de43569c35508ca4570022ba501db8c8a2a8b292bd35772a"}, {file = "decorator-5.0.9-py3-none-any.whl", hash = "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323"},
{file = "decorator-5.0.8.tar.gz", hash = "sha256:bff00cfb18698f9a19fa6400451fd7ea894f3845cedd7b8b7b0ce9c53171fefb"}, {file = "decorator-5.0.9.tar.gz", hash = "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5"},
] ]
defang = [ defang = [
{file = "defang-0.5.3.tar.gz", hash = "sha256:86aeff658d7cd4c3b61d16089872e1c1f0a1b7b3c64d4ca9525c017caeb284d7"}, {file = "defang-0.5.3.tar.gz", hash = "sha256:86aeff658d7cd4c3b61d16089872e1c1f0a1b7b3c64d4ca9525c017caeb284d7"},
@ -1359,7 +1358,6 @@ flask-login = [
{file = "Flask_Login-0.5.0-py2.py3-none-any.whl", hash = "sha256:7451b5001e17837ba58945aead261ba425fdf7b4f0448777e597ddab39f4fba0"}, {file = "Flask_Login-0.5.0-py2.py3-none-any.whl", hash = "sha256:7451b5001e17837ba58945aead261ba425fdf7b4f0448777e597ddab39f4fba0"},
] ]
gunicorn = [ gunicorn = [
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
{file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
] ]
h2 = [ h2 = [
@ -1437,13 +1435,11 @@ lief = [
{file = "lief-0.11.4-cp37-cp37m-win32.whl", hash = "sha256:846da4389a258f5525a147c7d29867ce0af59e1d81923f8dbed1ed83599f589f"}, {file = "lief-0.11.4-cp37-cp37m-win32.whl", hash = "sha256:846da4389a258f5525a147c7d29867ce0af59e1d81923f8dbed1ed83599f589f"},
{file = "lief-0.11.4-cp37-cp37m-win_amd64.whl", hash = "sha256:626bf3a31644e5790736cebfc366f3227ed76979e3e6e47c68365bd47e73d76a"}, {file = "lief-0.11.4-cp37-cp37m-win_amd64.whl", hash = "sha256:626bf3a31644e5790736cebfc366f3227ed76979e3e6e47c68365bd47e73d76a"},
{file = "lief-0.11.4-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5ca9e60eb55f4fa7fcd4e526273f56ef94d10995b50a681a1bba714098e9bccc"}, {file = "lief-0.11.4-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5ca9e60eb55f4fa7fcd4e526273f56ef94d10995b50a681a1bba714098e9bccc"},
{file = "lief-0.11.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b1998784f8475259b71c1f3c78086829724f88da518897a19bcec3c9765581a3"},
{file = "lief-0.11.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9abefaa0c2916bb7b15712dd2a85cc2d91beb5c380b819fe2b22156eb5b98546"}, {file = "lief-0.11.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9abefaa0c2916bb7b15712dd2a85cc2d91beb5c380b819fe2b22156eb5b98546"},
{file = "lief-0.11.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:599e8519398af84e1204791044f442b2d469ccc02196905cab8e378ee4d6da4d"}, {file = "lief-0.11.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:599e8519398af84e1204791044f442b2d469ccc02196905cab8e378ee4d6da4d"},
{file = "lief-0.11.4-cp38-cp38-win32.whl", hash = "sha256:bfe10417af317351a73babc7b7c82981ffe08efcd6fe6c79b816be1bcd52bab4"}, {file = "lief-0.11.4-cp38-cp38-win32.whl", hash = "sha256:bfe10417af317351a73babc7b7c82981ffe08efcd6fe6c79b816be1bcd52bab4"},
{file = "lief-0.11.4-cp38-cp38-win_amd64.whl", hash = "sha256:d42072d75b61e5314a1223d9afe666b6a62cf030fd494fe90a55d8baf8343204"}, {file = "lief-0.11.4-cp38-cp38-win_amd64.whl", hash = "sha256:d42072d75b61e5314a1223d9afe666b6a62cf030fd494fe90a55d8baf8343204"},
{file = "lief-0.11.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:254e391d1f640cb89c8a4ce3ebdbfe239bc615e50931e226cbca64b22a63d3e9"}, {file = "lief-0.11.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:254e391d1f640cb89c8a4ce3ebdbfe239bc615e50931e226cbca64b22a63d3e9"},
{file = "lief-0.11.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d38beaeef0832826d0b92afea467820005824bf0ec4c9c431557a9ed003c356e"},
{file = "lief-0.11.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:5266c387eec479663bab503d095c0c5ce1b13e69a81167cd6898215d07e001dc"}, {file = "lief-0.11.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:5266c387eec479663bab503d095c0c5ce1b13e69a81167cd6898215d07e001dc"},
{file = "lief-0.11.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f864c1dd918c49611779eb1f487d74bc97613a0690ce7c17a18949fc7dc5e79e"}, {file = "lief-0.11.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f864c1dd918c49611779eb1f487d74bc97613a0690ce7c17a18949fc7dc5e79e"},
{file = "lief-0.11.4-cp39-cp39-win32.whl", hash = "sha256:ba79766fb63096f96bdba1de748f81670b4d545cc2f79d8217e3a42b81cef864"}, {file = "lief-0.11.4-cp39-cp39-win32.whl", hash = "sha256:ba79766fb63096f96bdba1de748f81670b4d545cc2f79d8217e3a42b81cef864"},
@ -1458,45 +1454,32 @@ lxml = [
{file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"},
{file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"},
{file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"},
{file = "lxml-4.6.3-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16"},
{file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"},
{file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"},
{file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"},
{file = "lxml-4.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617"},
{file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"},
{file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"},
{file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"},
{file = "lxml-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92"},
{file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"},
{file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"},
{file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"},
{file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"},
{file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"},
{file = "lxml-4.6.3-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e"},
{file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"},
{file = "lxml-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae"},
{file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"},
{file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"},
{file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"},
{file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"},
{file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"},
{file = "lxml-4.6.3-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59"},
{file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"},
{file = "lxml-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a"},
{file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"},
{file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"},
{file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"},
] ]
markupsafe = [ markupsafe = [
{file = "MarkupSafe-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2efaeb1baff547063bad2b2893a8f5e9c459c4624e1a96644bbba08910ae34e0"}, {file = "MarkupSafe-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2efaeb1baff547063bad2b2893a8f5e9c459c4624e1a96644bbba08910ae34e0"},

View File

@ -741,13 +741,13 @@ def body_hash_details(body_hash: str):
@app.route('/urls/<string:url>', methods=['GET']) @app.route('/urls/<string:url>', methods=['GET'])
def url_details(url: str): def url_details(url: str):
url = unquote_plus(url).strip() url = unquote_plus(url).strip()
hits = lookyloo.get_url_occurrences(url=url, limit=50) hits = lookyloo.get_url_occurrences(url, limit=50)
return render_template('url.html', url=url, hits=hits) return render_template('url.html', url=url, hits=hits)
@app.route('/hostnames/<string:hostname>', methods=['GET']) @app.route('/hostnames/<string:hostname>', methods=['GET'])
def hostname_details(hostname: str): def hostname_details(hostname: str):
hits = lookyloo.get_hostname_occurrences(hostname=hostname.strip(), with_urls_occurrences=True, limit=50) hits = lookyloo.get_hostname_occurrences(hostname.strip(), with_urls_occurrences=True, limit=50)
return render_template('hostname.html', hostname=hostname, hits=hits) return render_template('hostname.html', hostname=hostname, hits=hits)
@ -900,7 +900,8 @@ def add_context(tree_uuid: str, node_uuid: str):
if context_data.get('legitimate_description'): if context_data.get('legitimate_description'):
legitimate_details['description'] = context_data['legitimate_description'] legitimate_details['description'] = context_data['legitimate_description']
details['legitimate'] = legitimate_details details['legitimate'] = legitimate_details
lookyloo.add_context(tree_uuid, node_uuid, ressource_hash, legitimate, malicious, details) lookyloo.add_context(tree_uuid, urlnode_uuid=node_uuid, ressource_hash=ressource_hash,
legitimate=legitimate, malicious=malicious, details=details)
if callback_str == 'hostnode_popup': if callback_str == 'hostnode_popup':
return redirect(url_for('hostnode_popup', tree_uuid=tree_uuid, node_uuid=hostnode_uuid)) return redirect(url_for('hostnode_popup', tree_uuid=tree_uuid, node_uuid=hostnode_uuid))
elif callback_str == 'ressources': elif callback_str == 'ressources':
@ -1066,14 +1067,14 @@ def json_hash_info(h: str):
@app.route('/json/url_info', methods=['POST']) @app.route('/json/url_info', methods=['POST'])
def json_url_info(): def json_url_info():
to_query: Dict = request.get_json(force=True) # type: ignore to_query: Dict = request.get_json(force=True) # type: ignore
occurrences = lookyloo.get_url_occurrences(**to_query) occurrences = lookyloo.get_url_occurrences(to_query.pop('url'), **to_query)
return jsonify(occurrences) return jsonify(occurrences)
@app.route('/json/hostname_info', methods=['POST']) @app.route('/json/hostname_info', methods=['POST'])
def json_hostname_info(): def json_hostname_info():
to_query: Dict = request.get_json(force=True) # type: ignore to_query: Dict = request.get_json(force=True) # type: ignore
occurrences = lookyloo.get_hostname_occurrences(**to_query) occurrences = lookyloo.get_hostname_occurrences(to_query.pop('hostname'), **to_query)
return jsonify(occurrences) return jsonify(occurrences)