From c3fc91a86853afe0c1433345b611e63b1835f925 Mon Sep 17 00:00:00 2001 From: AntoniaBK Date: Mon, 10 Jun 2024 13:26:10 +0200 Subject: [PATCH 01/19] New: upload a capture via the API --- website/web/genericapi.py | 74 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/website/web/genericapi.py b/website/web/genericapi.py index 56332bd..8ab4e03 100644 --- a/website/web/genericapi.py +++ b/website/web/genericapi.py @@ -3,11 +3,14 @@ from __future__ import annotations import base64 +import gzip import hashlib +import io import json from io import BytesIO from typing import Any +from uuid import uuid4 from zipfile import ZipFile import flask_login # type: ignore[import-untyped] @@ -441,6 +444,77 @@ class CaptureReport(Resource): # type: ignore[misc] return lookyloo.send_mail(capture_uuid, parameters.get('email', ''), parameters.get('comment')) +@api.route('/json/upload') +@api.doc(description='Submits a capture from another instance') +class UploadCapture(Resource): # type: ignore[misc] + def post(self) -> str | tuple[dict[str, Any], int]: + parameters: dict[str, Any] = request.get_json(force=True) + uuid = str(uuid4()) # NOTE: new UUID, because we do not want duplicates + listing = True if parameters['listing'] else False + har: dict[str, Any] | None = None + html: str | None = None + last_redirected_url: str | None = None + screenshot: bytes | None = None + + if 'har_file' in parameters and parameters.get('har_file'): + try: + har_decoded = base64.b64decode(parameters['har_file']) + except Exception as e: + return {'error': "Invalid base64-encoding"}, 400 + har = json.loads(gzip.decompress(har_decoded)) + last_redirected_url = parameters.get('landing_page') + if 'screenshot_file' in parameters: + screenshot = base64.b64decode(parameters['screenshot_file']) + if 'html_file' in parameters: + html = base64.b64decode(parameters['html_file']).decode() + lookyloo.store_capture(uuid, is_public=listing, har=har, + last_redirected_url=last_redirected_url, + png=screenshot, html=html) + return uuid + + elif 'full_capture' in parameters and parameters.get('full_capture'): + try: + zipped_capture = base64.b64decode(parameters['full_capture'].encode()) + except Exception as e: + return {'error': "Invalid base64-encoding"}, 400 + # it *only* accepts a lookyloo export. + cookies: list[dict[str, str]] | None = None + has_error = False + with ZipFile(BytesIO(zipped_capture), 'r') as lookyloo_capture: + potential_favicons = set() + for filename in lookyloo_capture.namelist(): + if filename.endswith('0.har.gz'): + # new formal + har = json.loads(gzip.decompress(lookyloo_capture.read(filename))) + elif filename.endswith('0.har'): + # old format + har = json.loads(lookyloo_capture.read(filename)) + elif filename.endswith('0.html'): + html = lookyloo_capture.read(filename).decode() + elif filename.endswith('0.last_redirect.txt'): + last_redirected_url = lookyloo_capture.read(filename).decode() + elif filename.endswith('0.png'): + screenshot = lookyloo_capture.read(filename) + elif filename.endswith('0.cookies.json'): + # Not required + cookies = json.loads(lookyloo_capture.read(filename)) + elif filename.endswith('potential_favicons.ico'): + # We may have more than one favicon + potential_favicons.add(lookyloo_capture.read(filename)) + if not har or not html or not last_redirected_url or not screenshot: + has_error = True + if not has_error: + lookyloo.store_capture(uuid, is_public=listing, har=har, + last_redirected_url=last_redirected_url, + png=screenshot, html=html, cookies=cookies, + potential_favicons=potential_favicons) + return uuid + return {'error': "Capture has error"}, 400 + + else: + return {'error': "Full capture or at least har-file is required"}, 400 + + auto_report_model = api.model('AutoReportModel', { 'email': fields.String(description="Email of the reporter, used by the analyst to get in touch.", example=''), 'comment': fields.String(description="Description of the URL, will be given to the analyst.", example='') From e2a8121898bd5b52edb7d6e9c58b8ded76849814 Mon Sep 17 00:00:00 2001 From: AntoniaBK Date: Mon, 10 Jun 2024 16:27:40 +0200 Subject: [PATCH 02/19] Fix: catch errors --- website/web/genericapi.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/website/web/genericapi.py b/website/web/genericapi.py index 8ab4e03..3fba63c 100644 --- a/website/web/genericapi.py +++ b/website/web/genericapi.py @@ -459,17 +459,24 @@ class UploadCapture(Resource): # type: ignore[misc] if 'har_file' in parameters and parameters.get('har_file'): try: har_decoded = base64.b64decode(parameters['har_file']) + try: + # new format + har_uncompressed = gzip.decompress(har_decoded) + except gzip.BadGzipFile: + # old format + har_uncompressed = har_decoded + + har = json.loads(har_uncompressed) + last_redirected_url = parameters.get('landing_page') + if 'screenshot_file' in parameters: + screenshot = base64.b64decode(parameters['screenshot_file']) + if 'html_file' in parameters: + html = base64.b64decode(parameters['html_file']).decode() + lookyloo.store_capture(uuid, is_public=listing, har=har, + last_redirected_url=last_redirected_url, + png=screenshot, html=html) except Exception as e: - return {'error': "Invalid base64-encoding"}, 400 - har = json.loads(gzip.decompress(har_decoded)) - last_redirected_url = parameters.get('landing_page') - if 'screenshot_file' in parameters: - screenshot = base64.b64decode(parameters['screenshot_file']) - if 'html_file' in parameters: - html = base64.b64decode(parameters['html_file']).decode() - lookyloo.store_capture(uuid, is_public=listing, har=har, - last_redirected_url=last_redirected_url, - png=screenshot, html=html) + return {'error': f"Invalid encodings"}, 400 return uuid elif 'full_capture' in parameters and parameters.get('full_capture'): @@ -501,8 +508,8 @@ class UploadCapture(Resource): # type: ignore[misc] elif filename.endswith('potential_favicons.ico'): # We may have more than one favicon potential_favicons.add(lookyloo_capture.read(filename)) - if not har or not html or not last_redirected_url or not screenshot: - has_error = True + if not har or not html or not last_redirected_url or not screenshot: + has_error = True if not has_error: lookyloo.store_capture(uuid, is_public=listing, har=har, last_redirected_url=last_redirected_url, From 0a973be5ddcd535d02e3f99e0a31ef38da1fc7a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Fri, 17 May 2024 17:29:11 +0200 Subject: [PATCH 03/19] fix: Avoid stashing the web interface when lacus becomes unavailable --- bin/async_capture.py | 40 ++++++++++++++++++-------------- bin/background_processing.py | 44 +++++++++++++++++++++--------------- lookyloo/exceptions.py | 4 ++++ lookyloo/lookyloo.py | 14 ++++++------ website/web/__init__.py | 30 +++++++++++++----------- 5 files changed, 77 insertions(+), 55 deletions(-) diff --git a/bin/async_capture.py b/bin/async_capture.py index 7e017c1..7b85337 100755 --- a/bin/async_capture.py +++ b/bin/async_capture.py @@ -14,6 +14,7 @@ from lacuscore import LacusCore, CaptureStatus as CaptureStatusCore, CaptureResp from pylacus import PyLacus, CaptureStatus as CaptureStatusPy, CaptureResponse as CaptureResponsePy from lookyloo import Lookyloo, CaptureSettings +from lookyloo.exceptions import LacusUnreachable from lookyloo.default import AbstractManager, get_config from lookyloo.helpers import get_captures_dir @@ -31,8 +32,7 @@ class AsyncCapture(AbstractManager): self.capture_dir: Path = get_captures_dir() self.lookyloo = Lookyloo() - if isinstance(self.lookyloo.lacus, LacusCore): - self.captures: set[asyncio.Task] = set() # type: ignore[type-arg] + self.captures: set[asyncio.Task] = set() # type: ignore[type-arg] self.fox = FOX(config_name='FOX') if not self.fox.available: @@ -135,24 +135,30 @@ class AsyncCapture(AbstractManager): if self.force_stop: return None - if isinstance(self.lookyloo.lacus, LacusCore): - await self._trigger_captures() - # NOTE: +1 because running this method also counts for one and will - # be decremented when it finishes - self.set_running(len(self.captures) + 1) + try: + if isinstance(self.lookyloo.lacus, LacusCore): + await self._trigger_captures() + # NOTE: +1 because running this method also counts for one and will + # be decremented when it finishes + self.set_running(len(self.captures) + 1) - self.process_capture_queue() + self.process_capture_queue() + except LacusUnreachable: + self.logger.error('Lacus is unreachable, retrying later.') async def _wait_to_finish_async(self) -> None: - if isinstance(self.lookyloo.lacus, LacusCore): - while self.captures: - self.logger.info(f'Waiting for {len(self.captures)} capture(s) to finish...') - await asyncio.sleep(5) - # NOTE: +1 so we don't quit before the final process capture queue - self.set_running(len(self.captures) + 1) - self.process_capture_queue() - self.unset_running() - self.logger.info('No more captures') + try: + if isinstance(self.lookyloo.lacus, LacusCore): + while self.captures: + self.logger.info(f'Waiting for {len(self.captures)} capture(s) to finish...') + await asyncio.sleep(5) + # NOTE: +1 so we don't quit before the final process capture queue + self.set_running(len(self.captures) + 1) + self.process_capture_queue() + self.unset_running() + self.logger.info('No more captures') + except LacusUnreachable: + self.logger.error('Lacus is unreachable, nothing to wait for') def main() -> None: diff --git a/bin/background_processing.py b/bin/background_processing.py index 788f82e..d67b338 100755 --- a/bin/background_processing.py +++ b/bin/background_processing.py @@ -12,6 +12,7 @@ from typing import Any from lacuscore import CaptureStatus as CaptureStatusCore from lookyloo import Lookyloo +from lookyloo.exceptions import LacusUnreachable from lookyloo.default import AbstractManager, get_config, get_homedir, safe_create_dir from lookyloo.helpers import ParsedUserAgent, serialize_to_json from pylacus import CaptureStatus as CaptureStatusPy @@ -78,25 +79,29 @@ class Processing(AbstractManager): def _retry_failed_enqueue(self) -> None: '''If enqueuing failed, the settings are added, with a UUID in the 'to_capture key', and they have a UUID''' to_requeue: list[str] = [] - for uuid, _ in self.lookyloo.redis.zscan_iter('to_capture'): - if self.lookyloo.redis.hexists(uuid, 'not_queued'): - # The capture is marked as not queued - to_requeue.append(uuid) - elif self.lookyloo.lacus.get_capture_status(uuid) in [CaptureStatusPy.UNKNOWN, CaptureStatusCore.UNKNOWN]: - # The capture is unknown on lacus side. It might be a race condition. - # Let's retry a few times. - retry = 3 - while retry > 0: - time.sleep(1) - if self.lookyloo.lacus.get_capture_status(uuid) not in [CaptureStatusPy.UNKNOWN, CaptureStatusCore.UNKNOWN]: - # Was a race condition, the UUID has been or is being processed by Lacus - self.logger.info(f'UUID {uuid} was only temporary unknown') - break - retry -= 1 - else: - # UUID is still unknown - self.logger.info(f'UUID {uuid} is still unknown') + try: + for uuid, _ in self.lookyloo.redis.zscan_iter('to_capture'): + if self.lookyloo.redis.hexists(uuid, 'not_queued'): + # The capture is marked as not queued to_requeue.append(uuid) + elif self.lookyloo.lacus.get_capture_status(uuid) in [CaptureStatusPy.UNKNOWN, CaptureStatusCore.UNKNOWN]: + # The capture is unknown on lacus side. It might be a race condition. + # Let's retry a few times. + retry = 3 + while retry > 0: + time.sleep(1) + if self.lookyloo.lacus.get_capture_status(uuid) not in [CaptureStatusPy.UNKNOWN, CaptureStatusCore.UNKNOWN]: + # Was a race condition, the UUID has been or is being processed by Lacus + self.logger.info(f'UUID {uuid} was only temporary unknown') + break + retry -= 1 + else: + # UUID is still unknown + self.logger.info(f'UUID {uuid} is still unknown') + to_requeue.append(uuid) + except LacusUnreachable: + self.logger.warning('Lacus still unreachable, trying again later') + return None for uuid in to_requeue: if self.lookyloo.redis.zscore('to_capture', uuid) is None: @@ -130,6 +135,9 @@ class Processing(AbstractManager): if new_uuid != uuid: # somehow, between the check and queuing, the UUID isn't UNKNOWN anymore, just checking that self.logger.warning(f'Had to change the capture UUID (duplicate). Old: {uuid} / New: {new_uuid}') + except LacusUnreachable: + self.logger.warning('Lacus still unreachable.') + break except Exception as e: self.logger.warning(f'Still unable to enqueue capture: {e}') break diff --git a/lookyloo/exceptions.py b/lookyloo/exceptions.py index 0981d1b..b8283cb 100644 --- a/lookyloo/exceptions.py +++ b/lookyloo/exceptions.py @@ -21,3 +21,7 @@ class TreeNeedsRebuild(LookylooException): class ModuleError(LookylooException): pass + + +class LacusUnreachable(LookylooException): + pass diff --git a/lookyloo/lookyloo.py b/lookyloo/lookyloo.py index 0834878..01f3c19 100644 --- a/lookyloo/lookyloo.py +++ b/lookyloo/lookyloo.py @@ -54,7 +54,7 @@ from .capturecache import CaptureCache, CapturesIndex from .context import Context from .default import LookylooException, get_homedir, get_config, get_socket_path, safe_create_dir from .exceptions import (MissingCaptureDirectory, - MissingUUID, TreeNeedsRebuild, NoValidHarFile) + MissingUUID, TreeNeedsRebuild, NoValidHarFile, LacusUnreachable) from .helpers import (get_captures_dir, get_email_template, get_resources_hashes, get_taxonomies, uniq_domains, ParsedUserAgent, load_cookies, UserAgents, @@ -177,9 +177,6 @@ class Lookyloo(): self._captures_index = CapturesIndex(self.redis, self.context, maxsize=cache_max_size) self.logger.info('Index initialized.') - # init lacus - self.lacus - @property def redis(self) -> Redis: # type: ignore[type-arg] return Redis(connection_pool=self.redis_pool) @@ -192,7 +189,7 @@ class Lookyloo(): remote_lacus_config = get_config('generic', 'remote_lacus') if remote_lacus_config.get('enable'): self.logger.info("Remote lacus enabled, trying to set it up...") - lacus_retries = 10 + lacus_retries = 2 while lacus_retries > 0: remote_lacus_url = remote_lacus_config.get('url') self._lacus = PyLacus(remote_lacus_url) @@ -202,9 +199,9 @@ class Lookyloo(): break lacus_retries -= 1 self.logger.warning(f"Unable to setup remote lacus to {remote_lacus_url}, trying again {lacus_retries} more time(s).") - time.sleep(10) + time.sleep(3) else: - raise LookylooException('Remote lacus is enabled but unreachable.') + raise LacusUnreachable('Remote lacus is enabled but unreachable.') if not has_remote_lacus: # We need a redis connector that doesn't decode. @@ -544,6 +541,9 @@ class Lookyloo(): return CaptureStatusCore.ONGOING try: lacus_status = self.lacus.get_capture_status(capture_uuid) + except LacusUnreachable as e: + self.logger.warning(f'Unable to connect to lacus: {e}') + raise e except Exception as e: self.logger.warning(f'Unable to get the status for {capture_uuid} from lacus: {e}') if self.redis.zscore('to_capture', capture_uuid) is not None: diff --git a/website/web/__init__.py b/website/web/__init__.py index bd3357b..90c5bfa 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -41,7 +41,7 @@ from werkzeug.wrappers.response import Response as WerkzeugResponse from lookyloo import Lookyloo, CaptureSettings from lookyloo.default import get_config -from lookyloo.exceptions import MissingUUID, NoValidHarFile +from lookyloo.exceptions import MissingUUID, NoValidHarFile, LacusUnreachable from lookyloo.helpers import get_taxonomies, UserAgents, load_cookies if sys.version_info < (3, 9): @@ -1084,18 +1084,22 @@ def tree(tree_uuid: str, node_uuid: str | None=None) -> Response | str | Werkzeu if tree_uuid == 'False': flash("Unable to process your request.", 'warning') return redirect(url_for('index')) - cache = lookyloo.capture_cache(tree_uuid, force_update=True) - if not cache: - status = lookyloo.get_capture_status(tree_uuid) - if status == CaptureStatus.UNKNOWN: - flash(f'Unable to find this UUID ({tree_uuid}).', 'warning') - return index_generic() - elif status == CaptureStatus.QUEUED: - message = "The capture is queued, but didn't start yet." - elif status in [CaptureStatus.ONGOING, CaptureStatus.DONE]: - # If CaptureStatus.DONE, the capture finished between the query to the cache and - # the request for a status. Give it an extra few seconds. - message = "The capture is ongoing." + try: + cache = lookyloo.capture_cache(tree_uuid, force_update=True) + if not cache: + status = lookyloo.get_capture_status(tree_uuid) + if status == CaptureStatus.UNKNOWN: + flash(f'Unable to find this UUID ({tree_uuid}).', 'warning') + return index_generic() + elif status == CaptureStatus.QUEUED: + message = "The capture is queued, but didn't start yet." + elif status in [CaptureStatus.ONGOING, CaptureStatus.DONE]: + # If CaptureStatus.DONE, the capture finished between the query to the cache and + # the request for a status. Give it an extra few seconds. + message = "The capture is ongoing." + return render_template('tree_wait.html', message=message, tree_uuid=tree_uuid) + except LacusUnreachable: + message = "Unable to connect to the Lacus backend, the capture will start as soon as the administrator wakes up." return render_template('tree_wait.html', message=message, tree_uuid=tree_uuid) try: From abbc242d8a478751cb1a1eaaa0819e1b84eba3aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 21 May 2024 01:12:19 +0200 Subject: [PATCH 04/19] chg: Bump deps --- poetry.lock | 343 +++++++++++++++++++++++++++++++++++++++++-------- pyproject.toml | 6 +- 2 files changed, 293 insertions(+), 56 deletions(-) diff --git a/poetry.lock b/poetry.lock index b2b4f88..e1e68f4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,24 +2,38 @@ [[package]] name = "aiobotocore" -version = "2.12.3" +version = "2.13.0" description = "Async client for aws services using botocore and aiohttp" optional = false python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.12.3-py3-none-any.whl", hash = "sha256:86737685f4625e8f05c4e7a608a07cc97607263279f66cf6b02b640c4eafd324"}, - {file = "aiobotocore-2.12.3.tar.gz", hash = "sha256:e2a2929207bc5d62eb556106c2224c1fd106d5c65be2eb69f15cc8c34c44c236"}, + {file = "aiobotocore-2.13.0-py3-none-any.whl", hash = "sha256:f812afc678d71b0038fd1ce712ff111ab7f47bab81ce5b4c7d222d4b83bc0cb2"}, + {file = "aiobotocore-2.13.0.tar.gz", hash = "sha256:4badf5cab6ad400216319d14278e2c99ad9b708e28a0f231605a412e632de401"}, ] [package.dependencies] -aiohttp = ">=3.7.4.post0,<4.0.0" +aiohttp = ">=3.9.2,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.34.41,<1.34.70" +botocore = ">=1.34.70,<1.34.107" wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.32.41,<1.32.70)"] -boto3 = ["boto3 (>=1.34.41,<1.34.70)"] +awscli = ["awscli (>=1.32.70,<1.32.107)"] +boto3 = ["boto3 (>=1.34.70,<1.34.107)"] + +[[package]] +name = "aiodns" +version = "3.2.0" +description = "Simple DNS resolver for asyncio" +optional = false +python-versions = "*" +files = [ + {file = "aiodns-3.2.0-py3-none-any.whl", hash = "sha256:e443c0c27b07da3174a109fd9e736d69058d808f144d3c9d56dbd1776964c5f5"}, + {file = "aiodns-3.2.0.tar.gz", hash = "sha256:62869b23409349c21b072883ec8998316b234c9a9e36675756e8e317e8768f72"}, +] + +[package.dependencies] +pycares = ">=4.0.0" [[package]] name = "aiohttp" @@ -107,9 +121,12 @@ files = [ ] [package.dependencies] +aiodns = {version = "*", optional = true, markers = "(sys_platform == \"linux\" or sys_platform == \"darwin\") and extra == \"speedups\""} aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" +Brotli = {version = "*", optional = true, markers = "platform_python_implementation == \"CPython\" and extra == \"speedups\""} +brotlicffi = {version = "*", optional = true, markers = "platform_python_implementation != \"CPython\" and extra == \"speedups\""} frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" @@ -117,6 +134,21 @@ yarl = ">=1.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns", "brotlicffi"] +[[package]] +name = "aiohttp-socks" +version = "0.8.4" +description = "Proxy connector for aiohttp" +optional = false +python-versions = "*" +files = [ + {file = "aiohttp_socks-0.8.4-py3-none-any.whl", hash = "sha256:74b21105634ed31d56ed6fee43701ca16218b53475e606d56950a4d17e8290ea"}, + {file = "aiohttp_socks-0.8.4.tar.gz", hash = "sha256:6b611d4ce838e9cf2c2fed5e0dba447cc84824a6cba95dc5747606201da46cb4"}, +] + +[package.dependencies] +aiohttp = ">=2.3.2" +python-socks = {version = ">=2.4.3,<3.0.0", extras = ["asyncio"]} + [[package]] name = "aioitertools" version = "0.11.0" @@ -308,13 +340,13 @@ WTForms = "*" [[package]] name = "botocore" -version = "1.34.69" +version = "1.34.106" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.69-py3-none-any.whl", hash = "sha256:d3802d076d4d507bf506f9845a6970ce43adc3d819dd57c2791f5c19ed6e5950"}, - {file = "botocore-1.34.69.tar.gz", hash = "sha256:d1ab2bff3c2fd51719c2021d9fa2f30fbb9ed0a308f69e9a774ac92c8091380a"}, + {file = "botocore-1.34.106-py3-none-any.whl", hash = "sha256:4baf0e27c2dfc4f4d0dee7c217c716e0782f9b30e8e1fff983fce237d88f73ae"}, + {file = "botocore-1.34.106.tar.gz", hash = "sha256:921fa5202f88c3e58fdcb4b3acffd56d65b24bca47092ee4b27aa988556c0be6"}, ] [package.dependencies] @@ -326,7 +358,138 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.19.19)"] +crt = ["awscrt (==0.20.9)"] + +[[package]] +name = "brotli" +version = "1.1.0" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, + {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, + {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, + {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, + {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, + {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, + {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, + {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, + {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, + {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, +] + +[[package]] +name = "brotlicffi" +version = "1.1.0.0" +description = "Python CFFI bindings to the Brotli library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"}, + {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"}, +] + +[package.dependencies] +cffi = ">=1.0.0" [[package]] name = "cattrs" @@ -1067,13 +1230,13 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "har2tree" -version = "1.24.1" +version = "1.24.2" description = "HTTP Archive (HAR) to ETE Toolkit generator" optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "har2tree-1.24.1-py3-none-any.whl", hash = "sha256:49c5269772313bb7baf660e5993a3ef2039e2647cf3bc9ff58950cab08e30e4d"}, - {file = "har2tree-1.24.1.tar.gz", hash = "sha256:c5492cd5589dd25d3bad16bd774fd291ebdc499c19aad03a9ce8f97e61fed34a"}, + {file = "har2tree-1.24.2-py3-none-any.whl", hash = "sha256:2d7f3917b1fa2cd958b77d6f7dca2a3643d9bbef95200dc2b696a9194f8477e2"}, + {file = "har2tree-1.24.2.tar.gz", hash = "sha256:3fc0eeb4a57f21097396fffab000c9c1082e953446c74fc4d8446943ac23b3ac"}, ] [package.dependencies] @@ -2318,18 +2481,18 @@ type = ["mypy (>=1.8)"] [[package]] name = "playwright" -version = "1.43.0" +version = "1.44.0" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.8" files = [ - {file = "playwright-1.43.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b03b12bd4da9c2cfb78dff820deac8b52892fe3c2f89a4d95d6f08c59e41deb9"}, - {file = "playwright-1.43.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e9ec21b141727392f630761c7f4dec46d80c98243614257cc501b64ff636d337"}, - {file = "playwright-1.43.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:e05a8d8fb2040c630429cca07e843c8fa33059717837c8f50c01b7d1fc651ce1"}, - {file = "playwright-1.43.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:50d9a5c07c76456945a2296d63f78fdf6eb11aed3e8d39bb5ccbda760a8d6d41"}, - {file = "playwright-1.43.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87191272c40b4c282cf2c9449ca3acaf705f38ac6e2372270f1617ce16b661b8"}, - {file = "playwright-1.43.0-py3-none-win32.whl", hash = "sha256:bd8b818904b17e2914be23e7bc2a340b203f57fe81678520b10f908485b056ea"}, - {file = "playwright-1.43.0-py3-none-win_amd64.whl", hash = "sha256:9b7bd707eeeaebee47f656b2de90aa9bd85e9ca2c6af7a08efd73896299e4d50"}, + {file = "playwright-1.44.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:c2317a80896796fdeb03d60f06cc229e775ff2e19b80c64b1bb9b29c8a59d992"}, + {file = "playwright-1.44.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54d44fb634d870839301c2326e1e12a178a1be0de76d0caaec230ab075c2e077"}, + {file = "playwright-1.44.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:64b67194e73b47ae72acf25f1a9cfacfef38ca2b52e4bb8b0abd385c5deeaadf"}, + {file = "playwright-1.44.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:29161b1fae71f7c402df5b15f0bd3deaeecd8b3d1ecd9ff01271700c66210e7b"}, + {file = "playwright-1.44.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8c8a3bfea17576d3f94a2363eee195cbda8dbba86975588c7eaac7792b25eee"}, + {file = "playwright-1.44.0-py3-none-win32.whl", hash = "sha256:235e37832deaa9af8a629d09955396259ab757533cc1922f9b0308b4ee0d9cdf"}, + {file = "playwright-1.44.0-py3-none-win_amd64.whl", hash = "sha256:5b8a4a1d4d50f4ff99b47965576322a8c4e34631854b862a25c1feb824be22a8"}, ] [package.dependencies] @@ -2355,32 +2518,33 @@ test = ["pytest"] [[package]] name = "playwrightcapture" -version = "1.24.8" +version = "1.24.9" description = "A simple library to capture websites using playwright" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "playwrightcapture-1.24.8-py3-none-any.whl", hash = "sha256:8210cb1f94a36187da7ebc1cb5bd92d47bc065889a617644d7b42a2089365045"}, - {file = "playwrightcapture-1.24.8.tar.gz", hash = "sha256:8f0693e62599cdf593883dfd7d5bb7827e7e77c01062fa85a00b5f95293e6ee5"}, + {file = "playwrightcapture-1.24.9-py3-none-any.whl", hash = "sha256:f5dc3acbde2e474e495909e8b313ddb5b2a328c6fb5f9eb8ac8821348819e54b"}, + {file = "playwrightcapture-1.24.9.tar.gz", hash = "sha256:5dc02dbcdf79aef90573e35ea1f7c2110a25fba98439b2817888fcbd736ba6fa"}, ] [package.dependencies] +aiohttp = {version = ">=3.9.5,<4.0.0", extras = ["speedups"]} +aiohttp-socks = ">=0.8.4,<0.9.0" async-timeout = {version = ">=4.0.3,<5.0.0", markers = "python_version < \"3.11\""} beautifulsoup4 = {version = ">=4.12.3,<5.0.0", extras = ["charset-normalizer", "lxml"]} dateparser = ">=1.2.0,<2.0.0" playwright = ">=1.43.0,<2.0.0" playwright-stealth = ">=1.0.6,<2.0.0" -puremagic = ">=1.22,<2.0" +puremagic = ">=1.23,<2.0" pydub = {version = ">=0.25.1,<0.26.0", optional = true, markers = "extra == \"recaptcha\""} pytz = {version = ">=2024.1,<2025.0", markers = "python_version < \"3.9\""} -requests = {version = ">=2.31.0,<3.0.0", extras = ["socks"], optional = true, markers = "extra == \"recaptcha\""} setuptools = ">=69.5.1,<70.0.0" -SpeechRecognition = {version = ">=3.10.3,<4.0.0", optional = true, markers = "extra == \"recaptcha\""} +SpeechRecognition = {version = ">=3.10.4,<4.0.0", optional = true, markers = "extra == \"recaptcha\""} tzdata = ">=2024.1,<2025.0" w3lib = ">=2.1.2,<3.0.0" [package.extras] -recaptcha = ["SpeechRecognition (>=3.10.3,<4.0.0)", "pydub (>=0.25.1,<0.26.0)", "requests[socks] (>=2.31.0,<3.0.0)"] +recaptcha = ["SpeechRecognition (>=3.10.4,<4.0.0)", "pydub (>=0.25.1,<0.26.0)"] [[package]] name = "prompt-toolkit" @@ -2447,6 +2611,72 @@ files = [ {file = "puremagic-1.23.tar.gz", hash = "sha256:e0bb7dc814b9d606225b57d4d49175d27c24fb745de1a7b3506067f2be54438f"}, ] +[[package]] +name = "pycares" +version = "4.4.0" +description = "Python interface for c-ares" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycares-4.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:24da119850841d16996713d9c3374ca28a21deee056d609fbbed29065d17e1f6"}, + {file = "pycares-4.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8f64cb58729689d4d0e78f0bfb4c25ce2f851d0274c0273ac751795c04b8798a"}, + {file = "pycares-4.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33e2a1120887e89075f7f814ec144f66a6ce06a54f5722ccefc62fbeda83cff"}, + {file = "pycares-4.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c680fef1b502ee680f8f0b95a41af4ec2c234e50e16c0af5bbda31999d3584bd"}, + {file = "pycares-4.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fff16b09042ba077f7b8aa5868d1d22456f0002574d0ba43462b10a009331677"}, + {file = "pycares-4.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:229a1675eb33bc9afb1fc463e73ee334950ccc485bc83a43f6ae5839fb4d5fa3"}, + {file = "pycares-4.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3aebc73e5ad70464f998f77f2da2063aa617cbd8d3e8174dd7c5b4518f967153"}, + {file = "pycares-4.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ef64649eba56448f65e26546d85c860709844d2fc22ef14d324fe0b27f761a9"}, + {file = "pycares-4.4.0-cp310-cp310-win32.whl", hash = "sha256:4afc2644423f4eef97857a9fd61be9758ce5e336b4b0bd3d591238bb4b8b03e0"}, + {file = "pycares-4.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5ed4e04af4012f875b78219d34434a6d08a67175150ac1b79eb70ab585d4ba8c"}, + {file = "pycares-4.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bce8db2fc6f3174bd39b81405210b9b88d7b607d33e56a970c34a0c190da0490"}, + {file = "pycares-4.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a0303428d013ccf5c51de59c83f9127aba6200adb7fd4be57eddb432a1edd2a"}, + {file = "pycares-4.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb91792f1556f97be7f7acb57dc7756d89c5a87bd8b90363a77dbf9ea653817"}, + {file = "pycares-4.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b61579cecf1f4d616e5ea31a6e423a16680ab0d3a24a2ffe7bb1d4ee162477ff"}, + {file = "pycares-4.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7af06968cbf6851566e806bf3e72825b0e6671832a2cbe840be1d2d65350710"}, + {file = "pycares-4.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ceb12974367b0a68a05d52f4162b29f575d241bd53de155efe632bf2c943c7f6"}, + {file = "pycares-4.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2eeec144bcf6a7b6f2d74d6e70cbba7886a84dd373c886f06cb137a07de4954c"}, + {file = "pycares-4.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3a6f7cfdfd11eb5493d6d632e582408c8f3b429f295f8799c584c108b28db6f"}, + {file = "pycares-4.4.0-cp311-cp311-win32.whl", hash = "sha256:34736a2ffaa9c08ca9c707011a2d7b69074bbf82d645d8138bba771479b2362f"}, + {file = "pycares-4.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:eb66c30eb11e877976b7ead13632082a8621df648c408b8e15cdb91a452dd502"}, + {file = "pycares-4.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fd644505a8cfd7f6584d33a9066d4e3d47700f050ef1490230c962de5dfb28c6"}, + {file = "pycares-4.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52084961262232ec04bd75f5043aed7e5d8d9695e542ff691dfef0110209f2d4"}, + {file = "pycares-4.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0c5368206057884cde18602580083aeaad9b860e2eac14fd253543158ce1e93"}, + {file = "pycares-4.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:112a4979c695b1c86f6782163d7dec58d57a3b9510536dcf4826550f9053dd9a"}, + {file = "pycares-4.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d186dafccdaa3409194c0f94db93c1a5d191145a275f19da6591f9499b8e7b8"}, + {file = "pycares-4.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:64965dc19c578a683ea73487a215a8897276224e004d50eeb21f0bc7a0b63c88"}, + {file = "pycares-4.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ed2a38e34bec6f2586435f6ff0bc5fe11d14bebd7ed492cf739a424e81681540"}, + {file = "pycares-4.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:94d6962db81541eb0396d2f0dfcbb18cdb8c8b251d165efc2d974ae652c547d4"}, + {file = "pycares-4.4.0-cp312-cp312-win32.whl", hash = "sha256:1168a48a834813aa80f412be2df4abaf630528a58d15c704857448b20b1675c0"}, + {file = "pycares-4.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:db24c4e7fea4a052c6e869cbf387dd85d53b9736cfe1ef5d8d568d1ca925e977"}, + {file = "pycares-4.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:21a5a0468861ec7df7befa69050f952da13db5427ae41ffe4713bc96291d1d95"}, + {file = "pycares-4.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:22c00bf659a9fa44d7b405cf1cd69b68b9d37537899898d8cbe5dffa4016b273"}, + {file = "pycares-4.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23aa3993a352491a47fcf17867f61472f32f874df4adcbb486294bd9fbe8abee"}, + {file = "pycares-4.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:813d661cbe2e37d87da2d16b7110a6860e93ddb11735c6919c8a3545c7b9c8d8"}, + {file = "pycares-4.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77cf5a2fd5583c670de41a7f4a7b46e5cbabe7180d8029f728571f4d2e864084"}, + {file = "pycares-4.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3eaa6681c0a3e3f3868c77aca14b7760fed35fdfda2fe587e15c701950e7bc69"}, + {file = "pycares-4.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad58e284a658a8a6a84af2e0b62f2f961f303cedfe551854d7bd40c3cbb61912"}, + {file = "pycares-4.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bfb89ca9e3d0a9b5332deeb666b2ede9d3469107742158f4aeda5ce032d003f4"}, + {file = "pycares-4.4.0-cp38-cp38-win32.whl", hash = "sha256:f36bdc1562142e3695555d2f4ac0cb69af165eddcefa98efc1c79495b533481f"}, + {file = "pycares-4.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:902461a92b6a80fd5041a2ec5235680c7cc35e43615639ec2a40e63fca2dfb51"}, + {file = "pycares-4.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7bddc6adba8f699728f7fc1c9ce8cef359817ad78e2ed52b9502cb5f8dc7f741"}, + {file = "pycares-4.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cb49d5805cd347c404f928c5ae7c35e86ba0c58ffa701dbe905365e77ce7d641"}, + {file = "pycares-4.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56cf3349fa3a2e67ed387a7974c11d233734636fe19facfcda261b411af14d80"}, + {file = "pycares-4.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf2eaa83a5987e48fa63302f0fe7ce3275cfda87b34d40fef9ce703fb3ac002"}, + {file = "pycares-4.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82bba2ab77eb5addbf9758d514d9bdef3c1bfe7d1649a47bd9a0d55a23ef478b"}, + {file = "pycares-4.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c6a8bde63106f162fca736e842a916853cad3c8d9d137e11c9ffa37efa818b02"}, + {file = "pycares-4.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5f646eec041db6ffdbcaf3e0756fb92018f7af3266138c756bb09d2b5baadec"}, + {file = "pycares-4.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9dc04c54c6ea615210c1b9e803d0e2d2255f87a3d5d119b6482c8f0dfa15b26b"}, + {file = "pycares-4.4.0-cp39-cp39-win32.whl", hash = "sha256:97892cced5794d721fb4ff8765764aa4ea48fe8b2c3820677505b96b83d4ef47"}, + {file = "pycares-4.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:917f08f0b5d9324e9a34211e68d27447c552b50ab967044776bbab7e42a553a2"}, + {file = "pycares-4.4.0.tar.gz", hash = "sha256:f47579d508f2f56eddd16ce72045782ad3b1b3b678098699e2b6a1b30733e1c2"}, +] + +[package.dependencies] +cffi = ">=1.5.0" + +[package.extras] +idna = ["idna (>=2.1)"] + [[package]] name = "pycparser" version = "2.22" @@ -2752,18 +2982,6 @@ requests = ">=2.31.0,<3.0.0" [package.extras] docs = ["Sphinx (<7.2)", "Sphinx (>=7.2,<8.0)"] -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] - [[package]] name = "pytaxonomies" version = "1.5.0" @@ -2804,6 +3022,26 @@ files = [ {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, ] +[[package]] +name = "python-socks" +version = "2.4.4" +description = "Core proxy (SOCKS4, SOCKS5, HTTP tunneling) functionality for Python" +optional = false +python-versions = "*" +files = [ + {file = "python-socks-2.4.4.tar.gz", hash = "sha256:e5a8e4f78203612c813946feacd87b98943965a04389fe221fa1e9ab263ad22e"}, + {file = "python_socks-2.4.4-py3-none-any.whl", hash = "sha256:fda465d3ef229119ee614eb85f2b7c0ad28be6dd40e0ef8dd317c49e8725e514"}, +] + +[package.dependencies] +async-timeout = {version = ">=3.0.1", optional = true, markers = "extra == \"asyncio\""} + +[package.extras] +anyio = ["anyio (>=3.3.4,<5.0.0)"] +asyncio = ["async-timeout (>=3.0.1)"] +curio = ["curio (>=1.4)"] +trio = ["trio (>=0.16.0)"] + [[package]] name = "pytz" version = "2024.1" @@ -2939,20 +3177,19 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.1" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.1-py3-none-any.whl", hash = "sha256:21ac9465cdf8c1650fe1ecde8a71669a93d4e6f147550483a2967d08396a56a5"}, + {file = "requests-2.32.1.tar.gz", hash = "sha256:eb97e87e64c79e64e5b8ac75cee9dd1f97f49e289b083ee6be96268930725685"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} urllib3 = ">=1.21.1,<3" [package.extras] @@ -3326,13 +3563,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.2.0.20240511" +version = "10.2.0.20240520" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240511.tar.gz", hash = "sha256:b2fcc27b8e15ae3741941e43b4f39eba6fce6bcb152af90bbb07b387d2585783"}, - {file = "types_Pillow-10.2.0.20240511-py3-none-any.whl", hash = "sha256:ef87a19ea0a02a89c784cbc1b99dfff6c00dd0d5796a8ac868cf7ec69c5f88ff"}, + {file = "types-Pillow-10.2.0.20240520.tar.gz", hash = "sha256:130b979195465fa1e1676d8e81c9c7c30319e8e95b12fae945e8f0d525213107"}, + {file = "types_Pillow-10.2.0.20240520-py3-none-any.whl", hash = "sha256:33c36494b380e2a269bb742181bea5d9b00820367822dbd3760f07210a1da23d"}, ] [[package]] @@ -3428,13 +3665,13 @@ urllib3 = ">=2" [[package]] name = "types-setuptools" -version = "69.5.0.20240513" +version = "69.5.0.20240519" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-69.5.0.20240513.tar.gz", hash = "sha256:3a8ccea3e3f1f639856a1dd622be282f74e94e00fdc364630240f999cc9594fc"}, - {file = "types_setuptools-69.5.0.20240513-py3-none-any.whl", hash = "sha256:bd3964c08cffd5a057d9cabe61641c86a41a1b5dd2b652b8d371eed64d89d726"}, + {file = "types-setuptools-69.5.0.20240519.tar.gz", hash = "sha256:275fb72048b0203d3fbef268298ea78a0913cd114a74872d93f8638ccc5b7c63"}, + {file = "types_setuptools-69.5.0.20240519-py3-none-any.whl", hash = "sha256:52b264eff8913b5d85848d83bd98efea935fc6129d681d370eb957783880b720"}, ] [[package]] @@ -3819,4 +4056,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.13" -content-hash = "505254f10aaedfe5b7b7d7cc116cf31b05126e2fa93004354af893e7c9d99873" +content-hash = "23db3d1c50aae4e70413d5868dcd02e51baf54da3b3454959a86711195725fc5" diff --git a/pyproject.toml b/pyproject.toml index 14b2896..e758d18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ start_website = "bin.start_website:main" [tool.poetry.dependencies] python = ">=3.8.1,<3.13" -requests = "^2.31.0" +requests = "^2.32.1" flask = "^3.0.3" gunicorn = "^22.0.0" charset-normalizer = "^3.3.2" @@ -62,7 +62,7 @@ pyhashlookup = "^1.2.3" lief = "^0.14" ua-parser = "^0.18.0" Flask-Login = "^0.6.3" -har2tree = "^1.24.1" +har2tree = "^1.24.2" passivetotal = "^2.5.9" werkzeug = "^3.0.3" filetype = "^1.2.0" @@ -101,7 +101,7 @@ types-pkg-resources = "^0.1.3" types-Deprecated = "^1.2.9.20240311" types-python-dateutil = "^2.9.0.20240316" types-beautifulsoup4 = "^4.12.0.20240511" -types-Pillow = "^10.2.0.20240511" +types-Pillow = "^10.2.0.20240520" types-pytz = "^2024.1.0.20240417" [build-system] From 2840351a9f222f30b63536d674fc628f2429143f Mon Sep 17 00:00:00 2001 From: Christophe Vandeplas Date: Mon, 20 May 2024 10:41:15 +0200 Subject: [PATCH 05/19] fix: [modules] Gracefully accept no hashlookup fixes #916 --- cache/run_redis.sh | 4 +++- indexing/run_redis.sh | 5 ++++- website/web/__init__.py | 12 ++++++++---- website/web/templates/hashlookup.html | 5 +++++ website/web/templates/tree.html | 2 +- 5 files changed, 21 insertions(+), 7 deletions(-) diff --git a/cache/run_redis.sh b/cache/run_redis.sh index c7e47b5..50a49c7 100755 --- a/cache/run_redis.sh +++ b/cache/run_redis.sh @@ -5,6 +5,8 @@ set -x if [ -f ../../valkey/src/valkey-server ]; then ../../valkey/src/valkey-server ./cache.conf -else +elif [ -f ../../redis/src/redis-server ]; then ../../redis/src/redis-server ./cache.conf +else + /usr/bin/redis-server ./cache.conf fi diff --git a/indexing/run_redis.sh b/indexing/run_redis.sh index 056d33c..46c4a00 100755 --- a/indexing/run_redis.sh +++ b/indexing/run_redis.sh @@ -5,6 +5,9 @@ set -x if [ -f ../../valkey/src/valkey-server ]; then ../../valkey/src/valkey-server ./indexing.conf -else +elif [ -f ../../redis/src/redis-server ]; then ../../redis/src/redis-server ./indexing.conf +else + /usr/bin/redis-server ./indexing.conf + fi diff --git a/website/web/__init__.py b/website/web/__init__.py index 90c5bfa..ee26554 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -962,10 +962,14 @@ def urls_rendered_page(tree_uuid: str) -> WerkzeugResponse | str | Response: @app.route('/tree//hashlookup', methods=['GET']) def hashlookup(tree_uuid: str) -> str | WerkzeugResponse | Response: - merged, total_ressources = lookyloo.merge_hashlookup_tree(tree_uuid) - # We only want unique URLs for the template - for sha1, entries in merged.items(): - entries['nodes'] = {node.name for node in entries['nodes']} + try: + merged, total_ressources = lookyloo.merge_hashlookup_tree(tree_uuid) + # We only want unique URLs for the template + for sha1, entries in merged.items(): + entries['nodes'] = {node.name for node in entries['nodes']} + except Exception: # error or module not enabled + merged = False + total_ressources = False return render_template('hashlookup.html', base_tree_uuid=tree_uuid, merged=merged, total_ressources=total_ressources) diff --git a/website/web/templates/hashlookup.html b/website/web/templates/hashlookup.html index 95e56f0..3d31f4a 100644 --- a/website/web/templates/hashlookup.html +++ b/website/web/templates/hashlookup.html @@ -1,4 +1,8 @@
+{% if not merged %} + No result data available or hashlookup module not enabled. +{%else%} + Total Hits: {{ merged|length }}
Total ressources: {{total_ressources}}

{% for sha1, entries in merged.items() %} @@ -25,4 +29,5 @@ {% endfor %} +{%endif%}
diff --git a/website/web/templates/tree.html b/website/web/templates/tree.html index c292897..c48d279 100644 --- a/website/web/templates/tree.html +++ b/website/web/templates/tree.html @@ -434,7 +434,7 @@ {% endif %} - ? + ? From daec76231fcd9edd4d0e33a7c1072a9edb981b4f Mon Sep 17 00:00:00 2001 From: Christophe Vandeplas Date: Mon, 20 May 2024 14:56:36 +0200 Subject: [PATCH 06/19] fix: [modules] corect variable type --- cache/run_redis.sh | 1 + indexing/run_redis.sh | 2 +- website/web/__init__.py | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cache/run_redis.sh b/cache/run_redis.sh index 50a49c7..8daf6d8 100755 --- a/cache/run_redis.sh +++ b/cache/run_redis.sh @@ -8,5 +8,6 @@ if [ -f ../../valkey/src/valkey-server ]; then elif [ -f ../../redis/src/redis-server ]; then ../../redis/src/redis-server ./cache.conf else + echo "Warning: using system redis-server. Valkey-server or redis-server from source is recommended." >&2 /usr/bin/redis-server ./cache.conf fi diff --git a/indexing/run_redis.sh b/indexing/run_redis.sh index 46c4a00..d6924a0 100755 --- a/indexing/run_redis.sh +++ b/indexing/run_redis.sh @@ -8,6 +8,6 @@ if [ -f ../../valkey/src/valkey-server ]; then elif [ -f ../../redis/src/redis-server ]; then ../../redis/src/redis-server ./indexing.conf else + echo "Warning: using system redis-server. Valkey-server or redis-server from source is recommended." >&2 /usr/bin/redis-server ./indexing.conf - fi diff --git a/website/web/__init__.py b/website/web/__init__.py index ee26554..46d75fa 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -968,8 +968,8 @@ def hashlookup(tree_uuid: str) -> str | WerkzeugResponse | Response: for sha1, entries in merged.items(): entries['nodes'] = {node.name for node in entries['nodes']} except Exception: # error or module not enabled - merged = False - total_ressources = False + merged = [] + total_ressources = [] return render_template('hashlookup.html', base_tree_uuid=tree_uuid, merged=merged, total_ressources=total_ressources) From 48b398c6492b397f5a9281a7083e1995c918295a Mon Sep 17 00:00:00 2001 From: Christophe Vandeplas Date: Tue, 21 May 2024 14:54:31 +0200 Subject: [PATCH 07/19] fix: fixes type issue --- website/web/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/web/__init__.py b/website/web/__init__.py index 46d75fa..a542380 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -968,8 +968,8 @@ def hashlookup(tree_uuid: str) -> str | WerkzeugResponse | Response: for sha1, entries in merged.items(): entries['nodes'] = {node.name for node in entries['nodes']} except Exception: # error or module not enabled - merged = [] - total_ressources = [] + merged = {} + total_ressources = 0 return render_template('hashlookup.html', base_tree_uuid=tree_uuid, merged=merged, total_ressources=total_ressources) From 5cbfbe26cda27e57c6b637ce8ed620dbc5978bc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 21 May 2024 16:45:50 +0200 Subject: [PATCH 08/19] new: Optionally make the capture page the default landing page --- config/generic.json.sample | 2 ++ website/web/__init__.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/config/generic.json.sample b/config/generic.json.sample index 97bfaf2..6ac9402 100644 --- a/config/generic.json.sample +++ b/config/generic.json.sample @@ -7,6 +7,7 @@ "website_listen_port": 5100, "systemd_service_name": "lookyloo", "default_public": true, + "index_is_capture": false, "users": {}, "time_delta_on_index": { "weeks": 1, @@ -90,6 +91,7 @@ "website_listen_port": "Port Flask will listen on.", "systemd_service_name": "(Optional) Name of the systemd service if your project has one.", "default_public": "If true, the capture is public and will be visible on the index page by default (can be unticked on the capture page).", + "index_is_capture": "If true, the capture page is the default landing page (faster for big instances).", "users": "It is some kind of an admin accounts. Format: {username: password}", "time_delta_on_index": "Time interval of the capture displayed on the index", "async_capture_processes": "Number of async_capture processes to start. This should not be higher than the number of splash instances you have running. A very high number will use *a lot* of ram.", diff --git a/website/web/__init__.py b/website/web/__init__.py index a542380..6eef395 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -78,6 +78,15 @@ login_manager.init_app(app) # User agents manager user_agents = UserAgents() +if get_config('generic', 'index_is_capture'): + @app.route('/', methods=['GET']) + def landing_page() -> WerkzeugResponse: + return redirect(url_for('capture_web')) +else: + @app.route('/', methods=['GET']) + def landing_page() -> WerkzeugResponse: + return redirect(url_for('index')) + @login_manager.user_loader # type: ignore[misc] def user_loader(username: str) -> User | None: @@ -1317,7 +1326,7 @@ def get_index_params(request: Request) -> tuple[bool, str]: # ##### Index level methods ##### -@app.route('/', methods=['GET']) +@app.route('/index', methods=['GET']) def index() -> str: if request.method == 'HEAD': # Just returns ack if the webserver is running From e9150c50fb87dfa465bb0df7e8e758f06cb27771 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Wed, 22 May 2024 00:38:35 +0200 Subject: [PATCH 09/19] new: fast internal cache for index --- lookyloo/capturecache.py | 6 +++++- lookyloo/lookyloo.py | 6 +++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/lookyloo/capturecache.py b/lookyloo/capturecache.py index ab59029..5202982 100644 --- a/lookyloo/capturecache.py +++ b/lookyloo/capturecache.py @@ -28,7 +28,7 @@ from pyipasnhistory import IPASNHistory # type: ignore[attr-defined] from redis import Redis from .context import Context -from .helpers import get_captures_dir, is_locked +from .helpers import get_captures_dir, is_locked, make_ts_from_dirname from .indexing import Indexing from .default import LookylooException, try_make_file, get_config from .exceptions import MissingCaptureDirectory, NoValidHarFile, MissingUUID, TreeNeedsRebuild @@ -260,11 +260,13 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] return None p = self.redis.pipeline() has_new_cached_captures = False + recent_captures = {} for uuid, directory in self.redis.hscan_iter('lookup_dirs'): if uuid in self.__cache: continue has_new_cached_captures = True p.hgetall(directory) + recent_captures[uuid] = make_ts_from_dirname(directory.rsplit('/', 1)[-1]).timestamp() if not has_new_cached_captures: return for cache in p.execute(): @@ -276,6 +278,7 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] self.logger.warning(f'Unable to initialize the cache: {e}') continue self.__cache[cc.uuid] = cc + self.redis.zadd('recent_captures', recent_captures) def _get_capture_dir(self, uuid: str) -> str: # Try to get from the recent captures cache in redis @@ -285,6 +288,7 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] return capture_dir # The capture was either removed or archived, cleaning up self.redis.hdel('lookup_dirs', uuid) + self.redis.zrem('recent_captures', uuid) self.redis.delete(capture_dir) # Try to get from the archived captures cache in redis diff --git a/lookyloo/lookyloo.py b/lookyloo/lookyloo.py index 01f3c19..ebfeac7 100644 --- a/lookyloo/lookyloo.py +++ b/lookyloo/lookyloo.py @@ -58,7 +58,7 @@ from .exceptions import (MissingCaptureDirectory, from .helpers import (get_captures_dir, get_email_template, get_resources_hashes, get_taxonomies, uniq_domains, ParsedUserAgent, load_cookies, UserAgents, - get_useragent_for_requests, make_ts_from_dirname, load_takedown_filters + get_useragent_for_requests, load_takedown_filters ) from .modules import (MISPs, PhishingInitiative, UniversalWhois, UrlScan, VirusTotal, Phishtank, Hashlookup, @@ -510,8 +510,7 @@ class Lookyloo(): index_cut_time = cut_time if capture_uuids is None: - capture_uuids = {uuid for uuid, directory in self.redis.hscan_iter('lookup_dirs') - if make_ts_from_dirname(directory.rsplit('/', 1)[-1]) > index_cut_time} + capture_uuids = self.redis.zrevrangebyscore('recent_captures', '+inf', index_cut_time.timestamp()) # NOTE: we absolutely have to respect the cached_captures_only setting and # never overwrite it. This method is called to display the index # and if we try to display everything, including the non-cached entries, @@ -1503,3 +1502,4 @@ class Lookyloo(): _fw.write(favicon) self.redis.hset('lookup_dirs', uuid, str(dirpath)) + self.redis.zadd('recent_captures', {uuid: now.timestamp()}) From 0fef5241053c9348f67d9ce84f7ddcf09b573861 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Wed, 22 May 2024 01:06:19 +0200 Subject: [PATCH 10/19] fix: Make sure id_up in the API stil works --- website/web/__init__.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/website/web/__init__.py b/website/web/__init__.py index 6eef395..965c532 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -81,10 +81,16 @@ user_agents = UserAgents() if get_config('generic', 'index_is_capture'): @app.route('/', methods=['GET']) def landing_page() -> WerkzeugResponse: + if request.method == 'HEAD': + # Just returns ack if the webserver is running + return 'Ack' return redirect(url_for('capture_web')) else: @app.route('/', methods=['GET']) def landing_page() -> WerkzeugResponse: + if request.method == 'HEAD': + # Just returns ack if the webserver is running + return 'Ack' return redirect(url_for('index')) @@ -1328,9 +1334,6 @@ def get_index_params(request: Request) -> tuple[bool, str]: @app.route('/index', methods=['GET']) def index() -> str: - if request.method == 'HEAD': - # Just returns ack if the webserver is running - return 'Ack' show_error, category = get_index_params(request) return index_generic(show_error=show_error) From 0f6d5c164954ef83973d0e188aa67e2b8ea17ce6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Sat, 25 May 2024 12:29:01 +0200 Subject: [PATCH 11/19] fix: Speedup quick cache init --- bin/async_capture.py | 2 +- lookyloo/capturecache.py | 10 +++++++--- website/web/__init__.py | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/bin/async_capture.py b/bin/async_capture.py index 7b85337..2b741a5 100755 --- a/bin/async_capture.py +++ b/bin/async_capture.py @@ -30,7 +30,7 @@ class AsyncCapture(AbstractManager): self.script_name = 'async_capture' self.only_global_lookups: bool = get_config('generic', 'only_global_lookups') self.capture_dir: Path = get_captures_dir() - self.lookyloo = Lookyloo() + self.lookyloo = Lookyloo(cache_max_size=1) self.captures: set[asyncio.Task] = set() # type: ignore[type-arg] diff --git a/lookyloo/capturecache.py b/lookyloo/capturecache.py index 5202982..c4b7a55 100644 --- a/lookyloo/capturecache.py +++ b/lookyloo/capturecache.py @@ -28,7 +28,7 @@ from pyipasnhistory import IPASNHistory # type: ignore[attr-defined] from redis import Redis from .context import Context -from .helpers import get_captures_dir, is_locked, make_ts_from_dirname +from .helpers import get_captures_dir, is_locked from .indexing import Indexing from .default import LookylooException, try_make_file, get_config from .exceptions import MissingCaptureDirectory, NoValidHarFile, MissingUUID, TreeNeedsRebuild @@ -187,6 +187,7 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] self.ipasnhistory: IPASNHistory | None = IPASNHistory() if not self.ipasnhistory.is_up: self.ipasnhistory = None + self.logger.info('IPASN History ready') except Exception as e: # Unable to setup IPASN History self.logger.warning(f'Unable to setup IPASN History: {e}') @@ -195,6 +196,7 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] self.cloudflare: Cloudflare | None = Cloudflare() if not self.cloudflare.available: self.cloudflare = None + self.logger.info('Cloudflare ready') except Exception as e: self.logger.warning(f'Unable to setup Cloudflare: {e}') self.cloudflare = None @@ -266,7 +268,6 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] continue has_new_cached_captures = True p.hgetall(directory) - recent_captures[uuid] = make_ts_from_dirname(directory.rsplit('/', 1)[-1]).timestamp() if not has_new_cached_captures: return for cache in p.execute(): @@ -278,7 +279,10 @@ class CapturesIndex(Mapping): # type: ignore[type-arg] self.logger.warning(f'Unable to initialize the cache: {e}') continue self.__cache[cc.uuid] = cc - self.redis.zadd('recent_captures', recent_captures) + if hasattr(cc, 'timestamp'): + recent_captures[uuid] = cc.timestamp.timestamp() + if recent_captures: + self.redis.zadd('recent_captures', recent_captures) def _get_capture_dir(self, uuid: str) -> str: # Try to get from the recent captures cache in redis diff --git a/website/web/__init__.py b/website/web/__init__.py index 965c532..08448f1 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -80,14 +80,14 @@ user_agents = UserAgents() if get_config('generic', 'index_is_capture'): @app.route('/', methods=['GET']) - def landing_page() -> WerkzeugResponse: + def landing_page() -> WerkzeugResponse | str: if request.method == 'HEAD': # Just returns ack if the webserver is running return 'Ack' return redirect(url_for('capture_web')) else: @app.route('/', methods=['GET']) - def landing_page() -> WerkzeugResponse: + def landing_page() -> WerkzeugResponse | str: if request.method == 'HEAD': # Just returns ack if the webserver is running return 'Ack' From c687d929484dcb07de149a1f7e0757cbfb73fa7b Mon Sep 17 00:00:00 2001 From: Adrian Maraj <119583904+adrima01@users.noreply.github.com> Date: Tue, 28 May 2024 09:39:25 +0200 Subject: [PATCH 12/19] Fixing typo --- website/web/templates/tree.html | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/website/web/templates/tree.html b/website/web/templates/tree.html index c48d279..df52a8c 100644 --- a/website/web/templates/tree.html +++ b/website/web/templates/tree.html @@ -357,7 +357,7 @@ Ressources + title="All resources contained in the tree">Resources