chg: move to lacus, WiP

pull/523/head
Raphaël Vinot 2022-09-14 16:47:38 +02:00
parent 2e079e70df
commit 318f554db3
4 changed files with 166 additions and 263 deletions

View File

@ -1,25 +1,19 @@
#!/usr/bin/env python3
import asyncio
import ipaddress
import json
import logging
import os
import socket
from datetime import datetime
from io import BufferedIOBase
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Dict, List, Optional, Tuple, Union, Literal
from urllib.parse import urlsplit
from typing import Dict, List, Optional, Tuple
from defang import refang # type: ignore
from lacuscore import LacusCore
from redis.asyncio import Redis
from playwrightcapture import Capture, PlaywrightCaptureException
from redis import Redis as RedisSync
from lookyloo.default import AbstractManager, get_config, get_socket_path, safe_create_dir
from lookyloo.helpers import get_captures_dir, load_cookies, UserAgents, ParsedUserAgent
from lookyloo.helpers import get_captures_dir, UserAgents, CaptureStatus
from lookyloo.modules import FOX
@ -35,6 +29,8 @@ class AsyncCapture(AbstractManager):
self.only_global_lookups: bool = get_config('generic', 'only_global_lookups')
self.capture_dir: Path = get_captures_dir()
self.user_agents = UserAgents()
self.redis_sync: RedisSync = RedisSync(unix_socket_path=get_socket_path('cache'))
self.lacus = LacusCore(self.redis_sync)
self.fox = FOX(get_config('modules', 'FOX'))
if not self.fox.available:
@ -63,174 +59,54 @@ class AsyncCapture(AbstractManager):
# By default, the captures are not on the index, unless the user mark them as listed
listing = True if (b'listing' in to_capture and to_capture[b'listing'].lower() in [b'true', b'1']) else False
# Turn the freetext for the headers into a dict
headers: Dict[str, str] = {}
if b'headers' in to_capture:
for header_line in to_capture[b'headers'].decode().splitlines():
if header_line and ':' in header_line:
splitted = header_line.split(':', 1)
if splitted and len(splitted) == 2:
header, h_value = splitted
if header and h_value:
headers[header.strip()] = h_value.strip()
if to_capture.get(b'dnt'):
headers['DNT'] = to_capture[b'dnt'].decode()
await self.lacus.capture(uuid)
if to_capture.get(b'document'):
# we do not have a URL yet.
document_name = Path(to_capture[b'document_name'].decode()).name
tmp_f = NamedTemporaryFile(suffix=document_name, delete=False)
with open(tmp_f.name, "wb") as f:
f.write(to_capture[b'document'])
url = f'file://{tmp_f.name}'
elif to_capture.get(b'url'):
url = to_capture[b'url'].decode()
self.thirdparty_submit(url)
while True:
entries = self.lacus.get_capture(uuid, decode=True)
if entries['status'] == CaptureStatus.DONE.value:
break
elif entries['status'] == CaptureStatus.UNKNOWN.value:
self.logger.warning(f'Unable to find {uuid}.')
break
elif entries['status'] == CaptureStatus.QUEUED.value:
self.logger.info(f'{uuid} is in the queue.')
await asyncio.sleep(5)
elif entries['status'] == CaptureStatus.ONGOING.value:
self.logger.info(f'{uuid} is ongoing.')
await asyncio.sleep(5)
else:
self.logger.warning(f'Invalid capture (no URL provided): {to_capture}.')
url = ''
if url:
self.logger.info(f'Capturing {url} - {uuid}')
success, error_message = await self._capture(
url,
perma_uuid=uuid,
cookies_pseudofile=to_capture.get(b'cookies', None),
listing=listing,
user_agent=to_capture[b'user_agent'].decode() if to_capture.get(b'user_agent') else None,
referer=to_capture[b'referer'].decode() if to_capture.get(b'referer') else None,
headers=headers if headers else None,
proxy=to_capture[b'proxy'].decode() if to_capture.get(b'proxy') else None,
os=to_capture[b'os'].decode() if to_capture.get(b'os') else None,
browser=to_capture[b'browser'].decode() if to_capture.get(b'browser') else None,
browser_engine=to_capture[b'browser_engine'].decode() if to_capture.get(b'browser_engine') else None, # type: ignore
device_name=to_capture[b'device_name'].decode() if to_capture.get(b'device_name') else None,
parent=to_capture[b'parent'].decode() if to_capture.get(b'parent') else None
)
if to_capture.get(b'document'):
os.unlink(tmp_f.name)
if success:
self.logger.info(f'Successfully captured {url} - {uuid}')
else:
self.logger.warning(f'Unable to capture {url} - {uuid}: {error_message}')
await self.redis.setex(f'error_{uuid}', 36000, f'{error_message} - {url} - {uuid}')
async with self.redis.pipeline() as lazy_cleanup:
if queue and await self.redis.zscore('queues', queue):
await lazy_cleanup.zincrby('queues', -1, queue)
await lazy_cleanup.srem('ongoing', uuid)
await lazy_cleanup.delete(uuid)
# make sure to expire the key if nothing was processed for a while (= queues empty)
await lazy_cleanup.expire('queues', 600)
await lazy_cleanup.execute()
async def _capture(self, url: str, *, perma_uuid: str,
cookies_pseudofile: Optional[Union[BufferedIOBase, str, bytes]]=None,
listing: bool=True, user_agent: Optional[str]=None,
referer: Optional[str]=None,
headers: Optional[Dict[str, str]]=None,
proxy: Optional[Union[str, Dict]]=None, os: Optional[str]=None,
browser: Optional[str]=None, parent: Optional[str]=None,
browser_engine: Optional[Literal['chromium', 'firefox', 'webkit']]=None,
device_name: Optional[str]=None,
viewport: Optional[Dict[str, int]]=None) -> Tuple[bool, str]:
'''Launch a capture'''
url = url.strip()
url = refang(url)
if not url.startswith('data') and not url.startswith('http') and not url.startswith('file'):
url = f'http://{url}'
splitted_url = urlsplit(url)
if self.only_global_lookups:
if url.startswith('data') or url.startswith('file'):
pass
elif splitted_url.netloc:
if splitted_url.hostname and splitted_url.hostname.split('.')[-1] != 'onion':
try:
ip = socket.gethostbyname(splitted_url.hostname)
except socket.gaierror:
self.logger.info('Name or service not known')
return False, 'Name or service not known.'
if not ipaddress.ip_address(ip).is_global:
return False, 'Capturing ressources on private IPs is disabled.'
else:
return False, 'Unable to find hostname or IP in the query.'
# check if onion
if (not proxy and splitted_url.netloc and splitted_url.hostname
and splitted_url.hostname.split('.')[-1] == 'onion'):
proxy = get_config('generic', 'tor_proxy')
if not user_agent:
# Catch case where the UA is broken on the UI, and the async submission.
self.user_agents.user_agents # triggers an update of the default UAs
capture_ua = user_agent if user_agent else self.user_agents.default['useragent']
if not browser_engine:
# Automatically pick a browser
parsed_ua = ParsedUserAgent(capture_ua)
if not parsed_ua.browser:
browser_engine = 'webkit'
elif parsed_ua.browser.lower().startswith('chrom'):
browser_engine = 'chromium'
elif parsed_ua.browser.lower().startswith('firefox'):
browser_engine = 'firefox'
else:
browser_engine = 'webkit'
self.logger.info(f'Capturing {url}')
try:
async with Capture(browser=browser_engine, device_name=device_name, proxy=proxy) as capture:
if headers:
capture.headers = headers
if cookies_pseudofile:
# required by Mypy: https://github.com/python/mypy/issues/3004
capture.cookies = load_cookies(cookies_pseudofile) # type: ignore
if viewport:
# required by Mypy: https://github.com/python/mypy/issues/3004
capture.viewport = viewport # type: ignore
if not device_name:
capture.user_agent = capture_ua
await capture.initialize_context()
entries = await capture.capture_page(url, referer=referer)
except PlaywrightCaptureException as e:
self.logger.exception(f'Invalid parameters for the capture of {url} - {e}')
return False, 'Invalid parameters for the capture of {url} - {e}'
except Exception as e:
self.logger.exception(f'Something went terribly wrong when capturing {url} - {e}')
return False, f'Something went terribly wrong when capturing {url}.'
self.logger.warning(f'{entries["status"]} is not a valid status')
break
if not entries:
# broken
self.logger.critical(f'Something went terribly wrong when capturing {url}.')
return False, f'Something went terribly wrong when capturing {url}.'
self.logger.critical(f'Something went terribly wrong when capturing {uuid}.')
else:
now = datetime.now()
dirpath = self.capture_dir / str(now.year) / f'{now.month:02}' / now.isoformat()
safe_create_dir(dirpath)
if os or browser:
meta = {}
if os:
meta['os'] = os
if browser:
meta['browser'] = browser
if b'os' in to_capture or b'browser' in to_capture:
meta: Dict[str, str] = {}
if b'os' in to_capture:
meta['os'] = to_capture[b'os'].decode()
if b'browser' in to_capture:
meta['browser'] = to_capture[b'browser'].decode()
with (dirpath / 'meta').open('w') as _meta:
json.dump(meta, _meta)
# Write UUID
with (dirpath / 'uuid').open('w') as _uuid:
_uuid.write(perma_uuid)
_uuid.write(uuid)
# Write no_index marker (optional)
if not listing:
(dirpath / 'no_index').touch()
# Write parent UUID (optional)
if parent:
if b'parent' in to_capture:
with (dirpath / 'parent').open('w') as _parent:
_parent.write(parent)
_parent.write(to_capture[b'parent'].decode())
if 'downloaded_filename' in entries and entries['downloaded_filename']:
with (dirpath / '0.data.filename').open('w') as _downloaded_filename:
@ -244,9 +120,6 @@ class AsyncCapture(AbstractManager):
with (dirpath / 'error.txt').open('w') as _error:
json.dump(entries['error'], _error)
if 'har' not in entries:
return False, entries['error'] if entries['error'] else "Unknown error"
with (dirpath / '0.har').open('w') as _har:
json.dump(entries['har'], _har)
@ -265,8 +138,16 @@ class AsyncCapture(AbstractManager):
if 'cookies' in entries and entries['cookies']:
with (dirpath / '0.cookies.json').open('w') as _cookies:
json.dump(entries['cookies'], _cookies)
await self.redis.hset('lookup_dirs', perma_uuid, str(dirpath))
return True, 'All good!'
await self.redis.hset('lookup_dirs', uuid, str(dirpath))
async with self.redis.pipeline() as lazy_cleanup:
if queue and await self.redis.zscore('queues', queue):
await lazy_cleanup.zincrby('queues', -1, queue)
await lazy_cleanup.srem('ongoing', uuid)
await lazy_cleanup.delete(uuid)
# make sure to expire the key if nothing was processed for a while (= queues empty)
await lazy_cleanup.expire('queues', 600)
await lazy_cleanup.execute()
async def _to_run_forever_async(self):
self.redis: Redis = Redis(unix_socket_path=get_socket_path('cache'))

View File

@ -13,20 +13,20 @@ def main():
p.check_returncode()
print('done.')
print('Start archiving process...')
Popen(['archiver'])
#Popen(['archiver'])
print('done.')
print('Start asynchronous ingestor...')
for _ in range(get_config('generic', 'async_capture_processes')):
Popen(['async_capture'])
#for _ in range(get_config('generic', 'async_capture_processes')):
# Popen(['async_capture'])
print('done.')
print('Start background indexer...')
Popen(['background_indexer'])
#Popen(['background_indexer'])
print('done.')
print('Start background processing...')
Popen(['processing'])
# Popen(['processing'])
print('done.')
print('Start website...')
Popen(['start_website'])
#Popen(['start_website'])
print('done.')

View File

@ -15,17 +15,16 @@ from io import BytesIO
from pathlib import Path
from typing import (Any, Dict, Iterable, List, MutableMapping, Optional, Set,
Tuple, Union)
from uuid import uuid4
from zipfile import ZipFile
from defang import defang # type: ignore
from har2tree import CrawledTree, HostNode, URLNode
from lacuscore import LacusCore
from PIL import Image, UnidentifiedImageError
from playwrightcapture import get_devices
from pymisp import MISPAttribute, MISPEvent, MISPObject
from redis import ConnectionPool, Redis
from redis.connection import UnixDomainSocketConnection
from werkzeug.utils import secure_filename
from .capturecache import CaptureCache, CapturesIndex
from .context import Context
@ -101,6 +100,8 @@ class Lookyloo():
self._captures_index = CapturesIndex(self.redis, self.context)
self.logger.info('Index initialized.')
self.lacus = LacusCore(self.redis, get_config('generic', 'tor_proxy'))
@property
def redis(self):
return Redis(connection_pool=self.redis_pool)
@ -407,9 +408,6 @@ class Lookyloo():
elif isinstance(value, (list, dict)):
query[key] = json.dumps(value) if value else None
if 'document_name' in query:
query['document_name'] = secure_filename(query['document_name'])
query = {k: v for k, v in query.items() if v is not None} # Remove the none, it makes redis unhappy
# dirty deduplicate
hash_query = hashlib.sha512(pickle.dumps(query)).hexdigest()
@ -418,16 +416,40 @@ class Lookyloo():
if (existing_uuid := self.redis.get(f'query_hash:{hash_query}')):
return existing_uuid
perma_uuid = str(uuid4())
self.redis.set(f'query_hash:{hash_query}', perma_uuid, nx=True, ex=300)
priority = get_priority(source, user, authenticated)
# NOTE: Lookyloo' capture can pass a do not track header independently from the default headers, merging it here
headers = query.pop('headers', '')
if 'dnt' in query:
headers += f'\nDNT: {query.pop("dnt")}'
headers = headers.strip()
perma_uuid = self.lacus.enqueue(
url=query.pop('url', None),
document_name=query.pop('document_name', None),
document=query.pop('document', None),
depth=query.pop('depth', 0),
browser=query.pop('browser', None),
device_name=query.pop('device_name', None),
user_agent=query.pop('user_agent', None),
proxy=query.pop('proxy', None),
general_timeout_in_sec=query.pop('general_timeout_in_sec', None),
cookies=query.pop('cookies', None),
headers=headers if headers else None,
http_credentials=query.pop('http_credentials', None),
viewport=query.pop('viewport', None),
referer=query.pop('referer', None),
rendered_hostname_only=query.pop('rendered_hostname_only', True),
# force=query.pop('force', False),
# recapture_interval=query.pop('recapture_interval', 300),
priority=priority
)
p = self.redis.pipeline()
if priority < -10:
# Someone is probably abusing the system with useless URLs, remove them from the index
query['listing'] = 0
p.hset(perma_uuid, mapping=query)
p.zadd('to_capture', {perma_uuid: priority})
p.hset(perma_uuid, mapping=query) # This will add the remaining entries that are lookyloo specific
p.zincrby('queues', 1, f'{source}|{authenticated}|{user}')
p.set(f'{perma_uuid}_mgmt', f'{source}|{authenticated}|{user}')
p.execute()

88
poetry.lock generated
View File

@ -140,7 +140,7 @@ python-versions = "*"
[[package]]
name = "certifi"
version = "2022.6.15"
version = "2022.6.15.2"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@ -547,7 +547,7 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "jsonschema"
version = "4.15.0"
version = "4.16.0"
description = "An implementation of JSON Schema validation for Python"
category = "main"
optional = false
@ -640,7 +640,7 @@ python-versions = "*"
[[package]]
name = "numpy"
version = "1.23.2"
version = "1.23.3"
description = "NumPy is the fundamental package for array computing with Python."
category = "main"
optional = false
@ -1158,7 +1158,7 @@ python-versions = ">=3.7"
[[package]]
name = "traitlets"
version = "5.3.0"
version = "5.4.0"
description = ""
category = "dev"
optional = false
@ -1257,7 +1257,7 @@ python-versions = "*"
[[package]]
name = "types-requests"
version = "2.28.9"
version = "2.28.10"
description = "Typing stubs for requests"
category = "dev"
optional = false
@ -1268,7 +1268,7 @@ types-urllib3 = "<1.27"
[[package]]
name = "types-urllib3"
version = "1.26.23"
version = "1.26.24"
description = "Typing stubs for urllib3"
category = "dev"
optional = false
@ -1600,8 +1600,8 @@ cchardet = [
{file = "cchardet-2.1.7.tar.gz", hash = "sha256:c428b6336545053c2589f6caf24ea32276c6664cb86db817e03a94c60afa0eaf"},
]
certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
{file = "certifi-2022.6.15.2-py3-none-any.whl", hash = "sha256:0aa1a42fbd57645fabeb6290a7687c21755b0344ecaeaa05f4e9f6207ae2e9a8"},
{file = "certifi-2022.6.15.2.tar.gz", hash = "sha256:aa08c101214127b9b0472ca6338315113c9487d45376fd3e669201b477c71003"},
]
chardet = [
{file = "chardet-5.0.0-py3-none-any.whl", hash = "sha256:d3e64f022d254183001eccc5db4040520c0f23b1a3f33d6413e099eb7f126557"},
@ -1874,8 +1874,8 @@ Jinja2 = [
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
]
jsonschema = [
{file = "jsonschema-4.15.0-py3-none-any.whl", hash = "sha256:2df0fab225abb3b41967bb3a46fd37dc74b1536b5296d0b1c2078cd072adf0f7"},
{file = "jsonschema-4.15.0.tar.gz", hash = "sha256:21f4979391bdceb044e502fd8e79e738c0cdfbdc8773f9a49b5769461e82fe1e"},
{file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
{file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"},
]
lief = [
{file = "lief-0.12.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4fbbc9d520de87ac22210c62d22a9b088e5460f9a028741311e6f68ef8877ddd"},
@ -2112,34 +2112,34 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
numpy = [
{file = "numpy-1.23.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e603ca1fb47b913942f3e660a15e55a9ebca906857edfea476ae5f0fe9b457d5"},
{file = "numpy-1.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:633679a472934b1c20a12ed0c9a6c9eb167fbb4cb89031939bfd03dd9dbc62b8"},
{file = "numpy-1.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e5226674f6ea79e14e3b91bfbc153fdf3ac13f5cc54ee7bc8fdbe820a32da0"},
{file = "numpy-1.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdc02c0235b261925102b1bd586579b7158e9d0d07ecb61148a1799214a4afd5"},
{file = "numpy-1.23.2-cp310-cp310-win32.whl", hash = "sha256:df28dda02c9328e122661f399f7655cdcbcf22ea42daa3650a26bce08a187450"},
{file = "numpy-1.23.2-cp310-cp310-win_amd64.whl", hash = "sha256:8ebf7e194b89bc66b78475bd3624d92980fca4e5bb86dda08d677d786fefc414"},
{file = "numpy-1.23.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dc76bca1ca98f4b122114435f83f1fcf3c0fe48e4e6f660e07996abf2f53903c"},
{file = "numpy-1.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ecfdd68d334a6b97472ed032b5b37a30d8217c097acfff15e8452c710e775524"},
{file = "numpy-1.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5593f67e66dea4e237f5af998d31a43e447786b2154ba1ad833676c788f37cde"},
{file = "numpy-1.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac987b35df8c2a2eab495ee206658117e9ce867acf3ccb376a19e83070e69418"},
{file = "numpy-1.23.2-cp311-cp311-win32.whl", hash = "sha256:d98addfd3c8728ee8b2c49126f3c44c703e2b005d4a95998e2167af176a9e722"},
{file = "numpy-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ecb818231afe5f0f568c81f12ce50f2b828ff2b27487520d85eb44c71313b9e"},
{file = "numpy-1.23.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:909c56c4d4341ec8315291a105169d8aae732cfb4c250fbc375a1efb7a844f8f"},
{file = "numpy-1.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8247f01c4721479e482cc2f9f7d973f3f47810cbc8c65e38fd1bbd3141cc9842"},
{file = "numpy-1.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8b97a8a87cadcd3f94659b4ef6ec056261fa1e1c3317f4193ac231d4df70215"},
{file = "numpy-1.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5b7ccae24e3d8501ee5563e82febc1771e73bd268eef82a1e8d2b4d556ae66"},
{file = "numpy-1.23.2-cp38-cp38-win32.whl", hash = "sha256:9b83d48e464f393d46e8dd8171687394d39bc5abfe2978896b77dc2604e8635d"},
{file = "numpy-1.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:dec198619b7dbd6db58603cd256e092bcadef22a796f778bf87f8592b468441d"},
{file = "numpy-1.23.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4f41f5bf20d9a521f8cab3a34557cd77b6f205ab2116651f12959714494268b0"},
{file = "numpy-1.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:806cc25d5c43e240db709875e947076b2826f47c2c340a5a2f36da5bb10c58d6"},
{file = "numpy-1.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f9d84a24889ebb4c641a9b99e54adb8cab50972f0166a3abc14c3b93163f074"},
{file = "numpy-1.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c403c81bb8ffb1c993d0165a11493fd4bf1353d258f6997b3ee288b0a48fce77"},
{file = "numpy-1.23.2-cp39-cp39-win32.whl", hash = "sha256:cf8c6aed12a935abf2e290860af8e77b26a042eb7f2582ff83dc7ed5f963340c"},
{file = "numpy-1.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:5e28cd64624dc2354a349152599e55308eb6ca95a13ce6a7d5679ebff2962913"},
{file = "numpy-1.23.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:806970e69106556d1dd200e26647e9bee5e2b3f1814f9da104a943e8d548ca38"},
{file = "numpy-1.23.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd879d3ca4b6f39b7770829f73278b7c5e248c91d538aab1e506c628353e47f"},
{file = "numpy-1.23.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:be6b350dfbc7f708d9d853663772a9310783ea58f6035eec649fb9c4371b5389"},
{file = "numpy-1.23.2.tar.gz", hash = "sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01"},
{file = "numpy-1.23.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9f707b5bb73bf277d812ded9896f9512a43edff72712f31667d0a8c2f8e71ee"},
{file = "numpy-1.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffcf105ecdd9396e05a8e58e81faaaf34d3f9875f137c7372450baa5d77c9a54"},
{file = "numpy-1.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ea3f98a0ffce3f8f57675eb9119f3f4edb81888b6874bc1953f91e0b1d4f440"},
{file = "numpy-1.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004f0efcb2fe1c0bd6ae1fcfc69cc8b6bf2407e0f18be308612007a0762b4089"},
{file = "numpy-1.23.3-cp310-cp310-win32.whl", hash = "sha256:98dcbc02e39b1658dc4b4508442a560fe3ca5ca0d989f0df062534e5ca3a5c1a"},
{file = "numpy-1.23.3-cp310-cp310-win_amd64.whl", hash = "sha256:39a664e3d26ea854211867d20ebcc8023257c1800ae89773cbba9f9e97bae036"},
{file = "numpy-1.23.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1f27b5322ac4067e67c8f9378b41c746d8feac8bdd0e0ffede5324667b8a075c"},
{file = "numpy-1.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ad3ec9a748a8943e6eb4358201f7e1c12ede35f510b1a2221b70af4bb64295c"},
{file = "numpy-1.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdc9febce3e68b697d931941b263c59e0c74e8f18861f4064c1f712562903411"},
{file = "numpy-1.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301c00cf5e60e08e04d842fc47df641d4a181e651c7135c50dc2762ffe293dbd"},
{file = "numpy-1.23.3-cp311-cp311-win32.whl", hash = "sha256:7cd1328e5bdf0dee621912f5833648e2daca72e3839ec1d6695e91089625f0b4"},
{file = "numpy-1.23.3-cp311-cp311-win_amd64.whl", hash = "sha256:8355fc10fd33a5a70981a5b8a0de51d10af3688d7a9e4a34fcc8fa0d7467bb7f"},
{file = "numpy-1.23.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc6e8da415f359b578b00bcfb1d08411c96e9a97f9e6c7adada554a0812a6cc6"},
{file = "numpy-1.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:22d43376ee0acd547f3149b9ec12eec2f0ca4a6ab2f61753c5b29bb3e795ac4d"},
{file = "numpy-1.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a64403f634e5ffdcd85e0b12c08f04b3080d3e840aef118721021f9b48fc1460"},
{file = "numpy-1.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd9d3abe5774404becdb0748178b48a218f1d8c44e0375475732211ea47c67e"},
{file = "numpy-1.23.3-cp38-cp38-win32.whl", hash = "sha256:f8c02ec3c4c4fcb718fdf89a6c6f709b14949408e8cf2a2be5bfa9c49548fd85"},
{file = "numpy-1.23.3-cp38-cp38-win_amd64.whl", hash = "sha256:e868b0389c5ccfc092031a861d4e158ea164d8b7fdbb10e3b5689b4fc6498df6"},
{file = "numpy-1.23.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09f6b7bdffe57fc61d869a22f506049825d707b288039d30f26a0d0d8ea05164"},
{file = "numpy-1.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8c79d7cf86d049d0c5089231a5bcd31edb03555bd93d81a16870aa98c6cfb79d"},
{file = "numpy-1.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d5420053bbb3dd64c30e58f9363d7a9c27444c3648e61460c1237f9ec3fa14"},
{file = "numpy-1.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5422d6a1ea9b15577a9432e26608c73a78faf0b9039437b075cf322c92e98e7"},
{file = "numpy-1.23.3-cp39-cp39-win32.whl", hash = "sha256:c1ba66c48b19cc9c2975c0d354f24058888cdc674bebadceb3cdc9ec403fb5d1"},
{file = "numpy-1.23.3-cp39-cp39-win_amd64.whl", hash = "sha256:78a63d2df1d947bd9d1b11d35564c2f9e4b57898aae4626638056ec1a231c40c"},
{file = "numpy-1.23.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:17c0e467ade9bda685d5ac7f5fa729d8d3e76b23195471adae2d6a6941bd2c18"},
{file = "numpy-1.23.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91b8d6768a75247026e951dce3b2aac79dc7e78622fc148329135ba189813584"},
{file = "numpy-1.23.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:94c15ca4e52671a59219146ff584488907b1f9b3fc232622b47e2cf832e94fb8"},
{file = "numpy-1.23.3.tar.gz", hash = "sha256:51bf49c0cd1d52be0a240aa66f3458afc4b95d8993d2d04f0d91fa60c10af6cd"},
]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
@ -2475,8 +2475,8 @@ tomli = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
traitlets = [
{file = "traitlets-5.3.0-py3-none-any.whl", hash = "sha256:65fa18961659635933100db8ca120ef6220555286949774b9cfc106f941d1c7a"},
{file = "traitlets-5.3.0.tar.gz", hash = "sha256:0bb9f1f9f017aa8ec187d8b1b2a7a6626a2a1d877116baba52a129bfa124f8e2"},
{file = "traitlets-5.4.0-py3-none-any.whl", hash = "sha256:93663cc8236093d48150e2af5e2ed30fc7904a11a6195e21bab0408af4e6d6c8"},
{file = "traitlets-5.4.0.tar.gz", hash = "sha256:3f2c4e435e271592fe4390f1746ea56836e3a080f84e7833f0f801d9613fec39"},
]
types-beautifulsoup4 = [
{file = "types-beautifulsoup4-4.11.6.tar.gz", hash = "sha256:2670dd71995df464041e2941fa9bbb694795271e3dedd7262b4766649a1cbe82"},
@ -2519,12 +2519,12 @@ types-redis = [
{file = "types_redis-4.3.20-py3-none-any.whl", hash = "sha256:b22e0f5a18b98b6a197dd403daed52a22cb76f50e3cbd7ddc539196af52ec23e"},
]
types-requests = [
{file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"},
{file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"},
{file = "types-requests-2.28.10.tar.gz", hash = "sha256:97d8f40aa1ffe1e58c3726c77d63c182daea9a72d9f1fa2cafdea756b2a19f2c"},
{file = "types_requests-2.28.10-py3-none-any.whl", hash = "sha256:45b485725ed58752f2b23461252f1c1ad9205b884a1e35f786bb295525a3e16a"},
]
types-urllib3 = [
{file = "types-urllib3-1.26.23.tar.gz", hash = "sha256:b78e819f0e350221d0689a5666162e467ba3910737bafda14b5c2c85e9bb1e56"},
{file = "types_urllib3-1.26.23-py3-none-any.whl", hash = "sha256:333e675b188a1c1fd980b4b352f9e40572413a4c1ac689c23cd546e96310070a"},
{file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"},
{file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"},
]
types-Werkzeug = [
{file = "types-Werkzeug-1.0.9.tar.gz", hash = "sha256:5cc269604c400133d452a40cee6397655f878fc460e03fde291b9e3a5eaa518c"},