From 80b5c045acd991f3d032492b7cba2da10de23d07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Mon, 6 Dec 2021 14:30:08 +0100 Subject: [PATCH] chg: Migrate to new framework --- .gitignore | 40 +- Pipfile | 32 - Pipfile.lock | 627 -------- bgpranking/abstractmanager.py | 33 - bgpranking/archive.py | 61 - bgpranking/asn_descriptions.py | 69 - bgpranking/{querying.py => bgpranking.py} | 171 +- bgpranking/config/bgpranking.json | 3 - bgpranking/config/modules/NothinkSNMP.json | 7 - bgpranking/config/modules/NothinkSSH.json | 7 - bgpranking/config/modules/NothinkTelnet.json | 7 - .../modules/PalevotrackerIPBlockList.json | 6 - .../config/modules/SSHBlackListBase.json | 6 - .../modules/ZeustrackerIPBlockList.json | 6 - bgpranking/dbinsert.py | 113 -- bgpranking/default/__init__.py | 16 + bgpranking/default/abstractmanager.py | 168 ++ bgpranking/{libs => default}/exceptions.py | 4 + bgpranking/default/helpers.py | 102 ++ bgpranking/helpers.py | 67 + bgpranking/libs/helpers.py | 142 -- bgpranking/modulesfetcher.py | 150 -- bgpranking/parser.py | 97 -- bgpranking/parsers/bambenekconsulting.py | 16 - bgpranking/ranking.py | 107 -- bgpranking/sanitizer.py | 85 - bgpranking/shadowserverfetcher.py | 171 -- bgpranking/{libs => }/statsripe.py | 41 +- {bgpranking/libs => bin}/__init__.py | 0 bin/archiver.py | 83 +- bin/asn_descriptions.py | 76 +- bin/dbinsert.py | 114 +- bin/fetcher.py | 181 ++- bin/manual_ranking.py | 10 +- bin/monitor.py | 22 - bin/parser.py | 115 +- bin/ranking.py | 125 +- bin/run_backend.py | 100 +- bin/sanitizer.py | 100 +- bin/shutdown.py | 25 +- bin/ssfetcher.py | 209 ++- bin/start.py | 38 +- bin/start_website.py | 42 +- bin/stop.py | 30 +- bin/update.py | 113 ++ client/bin/bgpranking | 55 - client/pybgpranking/__init__.py | 1 - client/pybgpranking/api.py | 49 - client/setup.py | 29 - config/generic.json.sample | 22 + .../config => config}/modules/Alienvault.json | 0 .../modules/BlocklistDeApache.json | 0 .../modules/BlocklistDeBots.json | 0 .../modules/BlocklistDeFTP.json | 0 .../modules/BlocklistDeIMAP.json | 0 .../modules/BlocklistDeMail.json | 0 .../modules/BlocklistDeSIP.json | 0 .../modules/BlocklistDeSSH.json | 0 .../modules/BlocklistDeStrong.json | 0 .../config => config}/modules/CIArmy.json | 0 .../modules/CleanMXMalwares.json | 0 .../modules/CleanMXPhishing.json | 0 .../modules/CleanMXPortals.json | 0 .../modules/CoinBlockerLists.json | 0 .../modules/DshieldDaily.json | 0 .../modules/DshieldTopIPs.json | 0 .../modules/EmergingThreatsCompromized.json | 0 .../modules/FeodotrackerIPBlockList.json | 0 .../config => config}/modules/Malc0de.json | 0 .../modules/MalwareDomainListIP.json | 0 .../modules/RansomwareIPBlockList.json | 0 .../config => config}/modules/greensnow.json | 0 .../modules/jq_all_the_things.sh | 0 .../config => config}/modules/module.schema | 0 .../modules/pop3gropers.json | 0 .../modules/shadowserver_only.sh | 0 .../config => config}/modules/validate_all.sh | 0 .../shadowserver.json.sample | 0 poetry.lock | 1418 +++++++++++++++++ pyproject.toml | 64 + ranking/kvrocks.conf | 497 ++++++ ranking/run_kvrocks.sh | 6 + requirements.txt | 39 - storage/ardb.conf | 468 ------ storage/kvrocks.conf | 497 ++++++ storage/run_ardb.sh | 6 - storage/run_kvrocks.sh | 6 + storage/shutdown_ardb.sh | 6 - tools/3rdparty.py | 30 + tools/migrate.py | 68 + bgpranking/monitor.py => tools/monitoring.py | 24 +- tools/validate_config_files.py | 94 ++ website/3drparty.sh | 13 - website/web/__init__.py | 133 +- website/web/genericapi.py | 32 + website/web/helpers.py | 27 + website/web/proxied.py | 17 + website/web/static/linegraph_country.js | 16 +- website/web/templates/main.html | 49 +- 99 files changed, 4552 insertions(+), 2851 deletions(-) delete mode 100644 Pipfile delete mode 100644 Pipfile.lock delete mode 100644 bgpranking/abstractmanager.py delete mode 100644 bgpranking/archive.py delete mode 100644 bgpranking/asn_descriptions.py rename bgpranking/{querying.py => bgpranking.py} (61%) delete mode 100644 bgpranking/config/bgpranking.json delete mode 100644 bgpranking/config/modules/NothinkSNMP.json delete mode 100644 bgpranking/config/modules/NothinkSSH.json delete mode 100644 bgpranking/config/modules/NothinkTelnet.json delete mode 100644 bgpranking/config/modules/PalevotrackerIPBlockList.json delete mode 100644 bgpranking/config/modules/SSHBlackListBase.json delete mode 100644 bgpranking/config/modules/ZeustrackerIPBlockList.json delete mode 100644 bgpranking/dbinsert.py create mode 100644 bgpranking/default/__init__.py create mode 100644 bgpranking/default/abstractmanager.py rename bgpranking/{libs => default}/exceptions.py (91%) create mode 100644 bgpranking/default/helpers.py create mode 100644 bgpranking/helpers.py delete mode 100644 bgpranking/libs/helpers.py delete mode 100644 bgpranking/modulesfetcher.py delete mode 100644 bgpranking/parser.py delete mode 100644 bgpranking/parsers/bambenekconsulting.py delete mode 100644 bgpranking/ranking.py delete mode 100644 bgpranking/sanitizer.py delete mode 100644 bgpranking/shadowserverfetcher.py rename bgpranking/{libs => }/statsripe.py (88%) rename {bgpranking/libs => bin}/__init__.py (100%) delete mode 100755 bin/monitor.py create mode 100755 bin/update.py delete mode 100755 client/bin/bgpranking delete mode 100644 client/pybgpranking/__init__.py delete mode 100644 client/pybgpranking/api.py delete mode 100644 client/setup.py create mode 100644 config/generic.json.sample rename {bgpranking/config => config}/modules/Alienvault.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeApache.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeBots.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeFTP.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeIMAP.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeMail.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeSIP.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeSSH.json (100%) rename {bgpranking/config => config}/modules/BlocklistDeStrong.json (100%) rename {bgpranking/config => config}/modules/CIArmy.json (100%) rename {bgpranking/config => config}/modules/CleanMXMalwares.json (100%) rename {bgpranking/config => config}/modules/CleanMXPhishing.json (100%) rename {bgpranking/config => config}/modules/CleanMXPortals.json (100%) rename {bgpranking/config => config}/modules/CoinBlockerLists.json (100%) rename {bgpranking/config => config}/modules/DshieldDaily.json (100%) rename {bgpranking/config => config}/modules/DshieldTopIPs.json (100%) rename {bgpranking/config => config}/modules/EmergingThreatsCompromized.json (100%) rename {bgpranking/config => config}/modules/FeodotrackerIPBlockList.json (100%) rename {bgpranking/config => config}/modules/Malc0de.json (100%) rename {bgpranking/config => config}/modules/MalwareDomainListIP.json (100%) rename {bgpranking/config => config}/modules/RansomwareIPBlockList.json (100%) rename {bgpranking/config => config}/modules/greensnow.json (100%) rename {bgpranking/config => config}/modules/jq_all_the_things.sh (100%) rename {bgpranking/config => config}/modules/module.schema (100%) rename {bgpranking/config => config}/modules/pop3gropers.json (100%) rename {bgpranking/config => config}/modules/shadowserver_only.sh (100%) rename {bgpranking/config => config}/modules/validate_all.sh (100%) rename bgpranking/config/shadowserver.json.example => config/shadowserver.json.sample (100%) create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 ranking/kvrocks.conf create mode 100755 ranking/run_kvrocks.sh delete mode 100644 requirements.txt delete mode 100644 storage/ardb.conf create mode 100644 storage/kvrocks.conf delete mode 100755 storage/run_ardb.sh create mode 100755 storage/run_kvrocks.sh delete mode 100755 storage/shutdown_ardb.sh create mode 100755 tools/3rdparty.py create mode 100644 tools/migrate.py rename bgpranking/monitor.py => tools/monitoring.py (52%) mode change 100644 => 100755 create mode 100755 tools/validate_config_files.py delete mode 100755 website/3drparty.sh create mode 100644 website/web/genericapi.py create mode 100644 website/web/helpers.py create mode 100644 website/web/proxied.py diff --git a/.gitignore b/.gitignore index 7da19a1..123d5a4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ +# Local exclude +scraped/ +*.swp +lookyloo/ete3_webserver/webapi.py + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -100,29 +105,20 @@ ENV/ # mypy .mypy_cache/ -# Redis + +# web +secret_key + +cache.pid *.rdb -# Storage +# Local config files +config/*.json +config/*.json.bkp + rawdata -# ardb -storage/ardb.pid -storage/data -storage/repl - -# Config file shadow server with password -bgpranking/config/shadowserver.json -# Ths shadow server config files are dynamically generated -bgpranking/config/modules/shadowserver_*.json - -# Do not store the d3 lib in the repo -website/web/static/d3*.js - -# Same got bootstrap-select -website/web/static/bootstrap-select* - -# Session key -website/secret_key - -*.swp +storage/db/ +storage/kvrocks* +website/web/static/d3.v5.js +website/web/static/bootstrap-select.min.* diff --git a/Pipfile b/Pipfile deleted file mode 100644 index c59e45d..0000000 --- a/Pipfile +++ /dev/null @@ -1,32 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.org/simple" -verify_ssl = true - -[dev-packages] - -[packages] -pybgpranking = {editable = true,path = "./client"} -bgpranking = {editable = true,path = "."} -redis = ">=3" -hiredis = "*" -python-dateutil = "*" -aiohttp = "*" -requests = "*" -simplejson = "*" -beautifulsoup4 = "*" -flask = "*" -flask-bootstrap = "*" -gunicorn = {extras = ["gevent"],version = "<20"} -pyipasnhistory = {editable = true,git = "https://github.com/D4-project/IPASN-History.git/",subdirectory = "client"} -pycountry = "*" -pid = {editable = true,git = "https://github.com/trbs/pid.git/"} -pytaxonomies = {editable = true,git = "https://github.com/MISP/PyTaxonomies.git"} -pymispgalaxies = {editable = true,git = "https://github.com/MISP/PyMISPGalaxies.git"} -Jinja2 = ">=2.10.1" # CVE-2019-10906 -idna-ssl = {markers = "python_version < '3.7'"} -typing-extensions = {markers = "python_version < '3.7'"} -werkzeug = ">=0.15.3" # CVE-2019-14806 - -[requires] -python_version = "3" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 126314b..0000000 --- a/Pipfile.lock +++ /dev/null @@ -1,627 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "8408df42fa1da47b55611862c09307fd4e2cb77b9dd45aef412c6b99b99f6b10" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "aiohttp": { - "hashes": [ - "sha256:119feb2bd551e58d83d1b38bfa4cb921af8ddedec9fad7183132db334c3133e0", - "sha256:16d0683ef8a6d803207f02b899c928223eb219111bd52420ef3d7a8aa76227b6", - "sha256:2eb3efe243e0f4ecbb654b08444ae6ffab37ac0ef8f69d3a2ffb958905379daf", - "sha256:2ffea7904e70350da429568113ae422c88d2234ae776519549513c8f217f58a9", - "sha256:40bd1b101b71a18a528ffce812cc14ff77d4a2a1272dfb8b11b200967489ef3e", - "sha256:418597633b5cd9639e514b1d748f358832c08cd5d9ef0870026535bd5eaefdd0", - "sha256:481d4b96969fbfdcc3ff35eea5305d8565a8300410d3d269ccac69e7256b1329", - "sha256:4c1bdbfdd231a20eee3e56bd0ac1cd88c4ff41b64ab679ed65b75c9c74b6c5c2", - "sha256:5563ad7fde451b1986d42b9bb9140e2599ecf4f8e42241f6da0d3d624b776f40", - "sha256:58c62152c4c8731a3152e7e650b29ace18304d086cb5552d317a54ff2749d32a", - "sha256:5b50e0b9460100fe05d7472264d1975f21ac007b35dcd6fd50279b72925a27f4", - "sha256:5d84ecc73141d0a0d61ece0742bb7ff5751b0657dab8405f899d3ceb104cc7de", - "sha256:5dde6d24bacac480be03f4f864e9a67faac5032e28841b00533cd168ab39cad9", - "sha256:5e91e927003d1ed9283dee9abcb989334fc8e72cf89ebe94dc3e07e3ff0b11e9", - "sha256:62bc216eafac3204877241569209d9ba6226185aa6d561c19159f2e1cbb6abfb", - "sha256:6c8200abc9dc5f27203986100579fc19ccad7a832c07d2bc151ce4ff17190076", - "sha256:6ca56bdfaf825f4439e9e3673775e1032d8b6ea63b8953d3812c71bd6a8b81de", - "sha256:71680321a8a7176a58dfbc230789790639db78dad61a6e120b39f314f43f1907", - "sha256:7c7820099e8b3171e54e7eedc33e9450afe7cd08172632d32128bd527f8cb77d", - "sha256:7dbd087ff2f4046b9b37ba28ed73f15fd0bc9f4fdc8ef6781913da7f808d9536", - "sha256:822bd4fd21abaa7b28d65fc9871ecabaddc42767884a626317ef5b75c20e8a2d", - "sha256:8ec1a38074f68d66ccb467ed9a673a726bb397142c273f90d4ba954666e87d54", - "sha256:950b7ef08b2afdab2488ee2edaff92a03ca500a48f1e1aaa5900e73d6cf992bc", - "sha256:99c5a5bf7135607959441b7d720d96c8e5c46a1f96e9d6d4c9498be8d5f24212", - "sha256:b84ad94868e1e6a5e30d30ec419956042815dfaea1b1df1cef623e4564c374d9", - "sha256:bc3d14bf71a3fb94e5acf5bbf67331ab335467129af6416a437bd6024e4f743d", - "sha256:c2a80fd9a8d7e41b4e38ea9fe149deed0d6aaede255c497e66b8213274d6d61b", - "sha256:c44d3c82a933c6cbc21039326767e778eface44fca55c65719921c4b9661a3f7", - "sha256:cc31e906be1cc121ee201adbdf844522ea3349600dd0a40366611ca18cd40e81", - "sha256:d5d102e945ecca93bcd9801a7bb2fa703e37ad188a2f81b1e65e4abe4b51b00c", - "sha256:dd7936f2a6daa861143e376b3a1fb56e9b802f4980923594edd9ca5670974895", - "sha256:dee68ec462ff10c1d836c0ea2642116aba6151c6880b688e56b4c0246770f297", - "sha256:e76e78863a4eaec3aee5722d85d04dcbd9844bc6cd3bfa6aa880ff46ad16bfcb", - "sha256:eab51036cac2da8a50d7ff0ea30be47750547c9aa1aa2cf1a1b710a1827e7dbe", - "sha256:f4496d8d04da2e98cc9133e238ccebf6a13ef39a93da2e87146c8c8ac9768242", - "sha256:fbd3b5e18d34683decc00d9a360179ac1e7a320a5fee10ab8053ffd6deab76e0", - "sha256:feb24ff1226beeb056e247cf2e24bba5232519efb5645121c4aea5b6ad74c1f2" - ], - "index": "pypi", - "version": "==3.7.4" - }, - "async-timeout": { - "hashes": [ - "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f", - "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3" - ], - "version": "==3.0.1" - }, - "attrs": { - "hashes": [ - "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", - "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" - ], - "version": "==20.3.0" - }, - "beautifulsoup4": { - "hashes": [ - "sha256:5279c36b4b2ec2cb4298d723791467e3000e5384a43ea0cdf5d45207c7e97169", - "sha256:6135db2ba678168c07950f9a16c4031822c6f4aec75a65e0a97bc5ca09789931", - "sha256:dcdef580e18a76d54002088602eba453eec38ebbcafafeaabd8cab12b6155d57" - ], - "index": "pypi", - "version": "==4.8.1" - }, - "bgpranking": { - "editable": true, - "path": "." - }, - "certifi": { - "hashes": [ - "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", - "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" - ], - "version": "==2020.12.5" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "click": { - "hashes": [ - "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", - "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" - ], - "version": "==7.1.2" - }, - "dominate": { - "hashes": [ - "sha256:76ec2cde23700a6fc4fee098168b9dee43b99c2f1dd0ca6a711f683e8eb7e1e4", - "sha256:84b5f71ed30021193cb0faa45d7776e1083f392cfe67a49f44e98cb2ed76c036" - ], - "version": "==2.6.0" - }, - "flask": { - "hashes": [ - "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", - "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6" - ], - "index": "pypi", - "version": "==1.1.1" - }, - "flask-bootstrap": { - "hashes": [ - "sha256:cb08ed940183f6343a64e465e83b3a3f13c53e1baabb8d72b5da4545ef123ac8" - ], - "index": "pypi", - "version": "==3.3.7.1" - }, - "gevent": { - "hashes": [ - "sha256:16574e4aa902ebc7bad564e25aa9740a82620fdeb61e0bbf5cbc32e84c13cb6a", - "sha256:188c3c6da67e17ffa28f960fc80f8b7e4ba0f4efdc7519822c9d3a1784ca78ea", - "sha256:1e5af63e452cc1758924528a2ba6d3e472f5338e1534b7233cd01d3429fc1082", - "sha256:242e32cc011ad7127525ca9181aef3379ce4ad9c733aefe311ecf90248ad9a6f", - "sha256:2a9ae0a0fd956cbbc9c326b8f290dcad2b58acfb2e2732855fe1155fb110a04d", - "sha256:33741e3cd51b90483b14f73b6a3b32b779acf965aeb91d22770c0c8e0c937b73", - "sha256:3694f393ab08372bd337b9bc8eebef3ccab3c1623ef94536762a1eee68821449", - "sha256:464ec84001ba5108a9022aded4c5e69ea4d13ef11a2386d3ec37c1d08f3074c9", - "sha256:520cc2a029a9eef436e4e56b007af7859315cafa21937d43c1d5269f12f2c981", - "sha256:77b65a68c83e1c680f52dc39d5e5406763dd10a18ce08420665504b6f047962e", - "sha256:7bdfee07be5eee4f687bf90c54c2a65c909bcf2b6c4878faee51218ffa5d5d3e", - "sha256:969743debf89d6409423aaeae978437cc042247f91f5801e946a07a0a3b59148", - "sha256:96f704561a9dd9a817c67f2e279e23bfad6166cf95d63d35c501317e17f68bcf", - "sha256:9f99c3ec61daed54dc074fbcf1a86bcf795b9dfac2f6d4cdae6dfdb8a9125692", - "sha256:a130a1885603eabd8cea11b3e1c3c7333d4341b537eca7f0c4794cb5c7120db1", - "sha256:a54b9c7516c211045d7897a73a4ccdc116b3720c9ad3c591ef9592b735202a3b", - "sha256:ac98570649d9c276e39501a1d1cbf6c652b78f57a0eb1445c5ff25ff80336b63", - "sha256:afaeda9a7e8e93d0d86bf1d65affe912366294913fe43f0d107145dc32cd9545", - "sha256:b6ffc1131e017aafa70d7ec19cc24010b19daa2f11d5dc2dc191a79c3c9ea147", - "sha256:ba0c6ad94614e9af4240affbe1b4839c54da5a0a7e60806c6f7f69c1a7f5426e", - "sha256:bdb3677e77ab4ebf20c4752ac49f3b1e47445678dd69f82f9905362c68196456", - "sha256:c2c4326bb507754ef354635c05f560a217c171d80f26ca65bea81aa59b1ac179", - "sha256:cfb2878c2ecf27baea436bb9c4d8ab8c2fa7763c3916386d5602992b6a056ff3", - "sha256:e370e0a861db6f63c75e74b6ee56a40f5cdac90212ec404621445afa12bfc94b", - "sha256:e8a5d9fcf5d031f2e4c499f5f4b53262face416e22e8769078354f641255a663", - "sha256:ecff28416c99e0f73137f35849c3027cc3edde9dc13b7707825ebbf728623928", - "sha256:f0498df97a303da77e180a9368c9228b0fc94d10dd2ce79fc5ebb63fec0d2fc9", - "sha256:f91fd07b9cf642f24e58ed381e19ec33e28b8eee8726c19b026ea24fcc9ff897" - ], - "version": "==21.1.2" - }, - "greenlet": { - "hashes": [ - "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196", - "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85", - "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683", - "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd", - "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7", - "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476", - "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c", - "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2", - "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c", - "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6", - "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d", - "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f", - "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664", - "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c", - "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0", - "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139", - "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef", - "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7", - "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8", - "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c", - "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce", - "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7", - "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36", - "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5", - "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a", - "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee", - "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70", - "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c", - "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128", - "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2", - "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218", - "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df", - "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e", - "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be", - "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770", - "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203", - "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06", - "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f", - "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379", - "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7", - "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b", - "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243", - "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378" - ], - "markers": "platform_python_implementation == 'CPython'", - "version": "==1.0.0" - }, - "gunicorn": { - "extras": [ - "gevent" - ], - "hashes": [ - "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", - "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" - ], - "index": "pypi", - "version": "==19.9.0" - }, - "hiredis": { - "hashes": [ - "sha256:01b577f84c20ecc9c07fc4c184231b08e3c3942de096fa99978e053de231c423", - "sha256:01ff0900134166961c9e339df77c33b72f7edc5cb41739f0babcd9faa345926e", - "sha256:03ed34a13316d0c34213c4fd46e0fa3a5299073f4d4f08e93fed8c2108b399b3", - "sha256:040436e91df5143aff9e0debb49530d0b17a6bd52200ce568621c31ef581b10d", - "sha256:091eb38fbf968d1c5b703e412bbbd25f43a7967d8400842cee33a5a07b33c27b", - "sha256:102f9b9dc6ed57feb3a7c9bdf7e71cb7c278fe8df1edfcfe896bc3e0c2be9447", - "sha256:2b4b392c7e3082860c8371fab3ae762139090f9115819e12d9f56060f9ede05d", - "sha256:2c9cc0b986397b833073f466e6b9e9c70d1d4dc2c2c1b3e9cae3a23102ff296c", - "sha256:2fa65a9df683bca72073cd77709ddeb289ea2b114d3775d225fbbcc5faf808c5", - "sha256:38437a681f17c975fd22349e72c29bc643f8e7eb2d6dc5df419eac59afa4d7ce", - "sha256:3b3428fa3cf1ee178807b52c9bee8950ab94cd4eaa9bfae8c1bbae3c49501d34", - "sha256:3dd8c2fae7f5494978facb0e93297dd627b1a3f536f3b070cf0a7d9157a07dcb", - "sha256:4414a96c212e732723b5c3d7c04d386ebbb2ec359e1de646322cbc3f875cbd0d", - "sha256:48c627581ad4ef60adbac980981407939acf13a0e18f093502c7b542223c4f19", - "sha256:4a60e71625a2d78d8ab84dfb2fa2cfd9458c964b6e6c04fea76d9ade153fb371", - "sha256:585ace09f434e43d8a8dbeb366865b1a044d7c06319b3c7372a0a00e63b860f4", - "sha256:74b364b3f06c9cf0a53f7df611045bc9437ed972a283fa1f0b12537236d23ddc", - "sha256:75c65c3850e89e9daa68d1b9bedd5806f177d60aa5a7b0953b4829481cfc1f72", - "sha256:7f052de8bf744730a9120dbdc67bfeb7605a01f69fb8e7ba5c475af33c24e145", - "sha256:8113a7d5e87ecf57cd4ae263cc9e429adb9a3e59f5a7768da5d3312a8d0a051a", - "sha256:84857ce239eb8ed191ac78e77ff65d52902f00f30f4ee83bf80eb71da73b70e6", - "sha256:8644a48ddc4a40b3e3a6b9443f396c2ee353afb2d45656c4fc68d04a82e8e3f7", - "sha256:936aa565e673536e8a211e43ec43197406f24cd1f290138bd143765079c8ba00", - "sha256:9afeb88c67bbc663b9f27385c496da056d06ad87f55df6e393e1516cfecb0461", - "sha256:9d62cc7880110e4f83b0a51d218f465d3095e2751fbddd34e553dbd106a929ff", - "sha256:a1fadd062fc8d647ff39220c57ea2b48c99bb73f18223828ec97f88fc27e7898", - "sha256:a7754a783b1e5d6f627c19d099b178059c62f782ab62b4d8ba165b9fbc2ee34c", - "sha256:aa59dd63bb3f736de4fc2d080114429d5d369dfb3265f771778e8349d67a97a4", - "sha256:ae2ee0992f8de249715435942137843a93db204dd7db1e7cc9bdc5a8436443e8", - "sha256:b36842d7cf32929d568f37ec5b3173b72b2ec6572dec4d6be6ce774762215aee", - "sha256:bcbf9379c553b5facc6c04c1e5569b44b38ff16bcbf354676287698d61ee0c92", - "sha256:cbccbda6f1c62ab460449d9c85fdf24d0d32a6bf45176581151e53cc26a5d910", - "sha256:d0caf98dfb8af395d6732bd16561c0a2458851bea522e39f12f04802dbf6f502", - "sha256:d6456afeddba036def1a36d8a2758eca53202308d83db20ab5d0b66590919627", - "sha256:dbaef9a21a4f10bc281684ee4124f169e62bb533c2a92b55f8c06f64f9af7b8f", - "sha256:dce84916c09aaece006272b37234ae84a8ed13abb3a4d341a23933b8701abfb5", - "sha256:eb8c9c8b9869539d58d60ff4a28373a22514d40495911451343971cb4835b7a9", - "sha256:efc98b14ee3a8595e40b1425e8d42f5fd26f11a7b215a81ef9259068931754f4", - "sha256:fa2dc05b87d97acc1c6ae63f3e0f39eae5246565232484b08db6bf2dc1580678", - "sha256:fe7d6ce9f6a5fbe24f09d95ea93e9c7271abc4e1565da511e1449b107b4d7848" - ], - "index": "pypi", - "version": "==1.0.1" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "idna-ssl": { - "hashes": [ - "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c" - ], - "index": "pypi", - "markers": "python_version < '3.7'", - "version": "==1.1.0" - }, - "itsdangerous": { - "hashes": [ - "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", - "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" - ], - "version": "==1.1.0" - }, - "jinja2": { - "hashes": [ - "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419", - "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6" - ], - "index": "pypi", - "version": "==2.11.3" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f", - "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014", - "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85", - "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850", - "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1", - "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5", - "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c", - "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be", - "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621" - ], - "version": "==1.1.1" - }, - "multidict": { - "hashes": [ - "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a", - "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93", - "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632", - "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656", - "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79", - "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7", - "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d", - "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5", - "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224", - "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26", - "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea", - "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348", - "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6", - "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76", - "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1", - "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f", - "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952", - "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a", - "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37", - "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9", - "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359", - "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8", - "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da", - "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3", - "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d", - "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf", - "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841", - "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d", - "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93", - "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f", - "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647", - "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635", - "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456", - "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda", - "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5", - "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281", - "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80" - ], - "version": "==5.1.0" - }, - "pid": { - "editable": true, - "git": "https://github.com/trbs/pid.git/", - "ref": "6a5d43a57149f6d70c61ad10f5622f88a3ae663c" - }, - "pybgpranking": { - "editable": true, - "path": "./client" - }, - "pycountry": { - "hashes": [ - "sha256:3c57aa40adcf293d59bebaffbe60d8c39976fba78d846a018dc0c2ec9c6cb3cb" - ], - "index": "pypi", - "version": "==19.8.18" - }, - "pyipasnhistory": { - "editable": true, - "git": "https://github.com/D4-project/IPASN-History.git/", - "ref": "98ed95eeb33dff69b350bbab638da5634614d685", - "subdirectory": "client" - }, - "pymispgalaxies": { - "editable": true, - "git": "https://github.com/MISP/PyMISPGalaxies.git", - "ref": "a43655ba85f00ba3a212a7f38e89380a2480adac" - }, - "pytaxonomies": { - "editable": true, - "git": "https://github.com/MISP/PyTaxonomies.git", - "ref": "01d18a50fd786d359df0a448200f10d64c06d175" - }, - "python-dateutil": { - "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" - ], - "index": "pypi", - "version": "==2.8.1" - }, - "redis": { - "hashes": [ - "sha256:3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62", - "sha256:8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2" - ], - "index": "pypi", - "version": "==3.3.11" - }, - "requests": { - "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" - ], - "index": "pypi", - "version": "==2.22.0" - }, - "simplejson": { - "hashes": [ - "sha256:067a7177ddfa32e1483ba5169ebea1bc2ea27f224853211ca669325648ca5642", - "sha256:2fc546e6af49fb45b93bbe878dea4c48edc34083729c0abd09981fe55bdf7f91", - "sha256:354fa32b02885e6dae925f1b5bbf842c333c1e11ea5453ddd67309dc31fdb40a", - "sha256:37e685986cf6f8144607f90340cff72d36acf654f3653a6c47b84c5c38d00df7", - "sha256:3af610ee72efbe644e19d5eaad575c73fb83026192114e5f6719f4901097fce2", - "sha256:3b919fc9cf508f13b929a9b274c40786036b31ad28657819b3b9ba44ba651f50", - "sha256:3dd289368bbd064974d9a5961101f080e939cbe051e6689a193c99fb6e9ac89b", - "sha256:6c3258ffff58712818a233b9737fe4be943d306c40cf63d14ddc82ba563f483a", - "sha256:75e3f0b12c28945c08f54350d91e624f8dd580ab74fd4f1bbea54bc6b0165610", - "sha256:b1f329139ba647a9548aa05fb95d046b4a677643070dc2afc05fa2e975d09ca5", - "sha256:ee9625fc8ee164902dfbb0ff932b26df112da9f871c32f0f9c1bcf20c350fe2a", - "sha256:fb2530b53c28f0d4d84990e945c2ebb470edb469d63e389bf02ff409012fe7c5" - ], - "index": "pypi", - "version": "==3.16.0" - }, - "six": { - "hashes": [ - "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", - "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" - ], - "version": "==1.15.0" - }, - "soupsieve": { - "hashes": [ - "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc", - "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b" - ], - "version": "==2.2.1" - }, - "typing-extensions": { - "hashes": [ - "sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2", - "sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d", - "sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575" - ], - "index": "pypi", - "markers": "python_version < '3.7'", - "version": "==3.7.4.1" - }, - "urllib3": { - "hashes": [ - "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2", - "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e" - ], - "version": "==1.25.11" - }, - "visitor": { - "hashes": [ - "sha256:2c737903b2b6864ebc6167eef7cf3b997126f1aa94bdf590f90f1436d23e480a" - ], - "version": "==0.1.3" - }, - "werkzeug": { - "hashes": [ - "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7", - "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4" - ], - "index": "pypi", - "version": "==0.16.0" - }, - "yarl": { - "hashes": [ - "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e", - "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434", - "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366", - "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3", - "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec", - "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959", - "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e", - "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c", - "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6", - "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a", - "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6", - "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424", - "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e", - "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f", - "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50", - "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2", - "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc", - "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4", - "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970", - "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10", - "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0", - "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406", - "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896", - "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643", - "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721", - "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478", - "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724", - "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e", - "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8", - "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96", - "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25", - "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76", - "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2", - "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2", - "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c", - "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a", - "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71" - ], - "version": "==1.6.3" - }, - "zope.event": { - "hashes": [ - "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42", - "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330" - ], - "version": "==4.5.0" - }, - "zope.interface": { - "hashes": [ - "sha256:05a97ba92c1c7c26f25c9f671aa1ef85ffead6cdad13770e5b689cf983adc7e1", - "sha256:07d61722dd7d85547b7c6b0f5486b4338001fab349f2ac5cabc0b7182eb3425d", - "sha256:0a990dcc97806e5980bbb54b2e46b9cde9e48932d8e6984daf71ef1745516123", - "sha256:150e8bcb7253a34a4535aeea3de36c0bb3b1a6a47a183a95d65a194b3e07f232", - "sha256:1743bcfe45af8846b775086471c28258f4c6e9ee8ef37484de4495f15a98b549", - "sha256:1b5f6c8fff4ed32aa2dd43e84061bc8346f32d3ba6ad6e58f088fe109608f102", - "sha256:21e49123f375703cf824214939d39df0af62c47d122d955b2a8d9153ea08cfd5", - "sha256:21f579134a47083ffb5ddd1307f0405c91aa8b61ad4be6fd5af0171474fe0c45", - "sha256:27c267dc38a0f0079e96a2945ee65786d38ef111e413c702fbaaacbab6361d00", - "sha256:299bde0ab9e5c4a92f01a152b7fbabb460f31343f1416f9b7b983167ab1e33bc", - "sha256:2ab88d8f228f803fcb8cb7d222c579d13dab2d3622c51e8cf321280da01102a7", - "sha256:2ced4c35061eea623bc84c7711eedce8ecc3c2c51cd9c6afa6290df3bae9e104", - "sha256:2dcab01c660983ba5e5a612e0c935141ccbee67d2e2e14b833e01c2354bd8034", - "sha256:32546af61a9a9b141ca38d971aa6eb9800450fa6620ce6323cc30eec447861f3", - "sha256:32b40a4c46d199827d79c86bb8cb88b1bbb764f127876f2cb6f3a47f63dbada3", - "sha256:3cc94c69f6bd48ed86e8e24f358cb75095c8129827df1298518ab860115269a4", - "sha256:42b278ac0989d6f5cf58d7e0828ea6b5951464e3cf2ff229dd09a96cb6ba0c86", - "sha256:495b63fd0302f282ee6c1e6ea0f1c12cb3d1a49c8292d27287f01845ff252a96", - "sha256:4af87cdc0d4b14e600e6d3d09793dce3b7171348a094ba818e2a68ae7ee67546", - "sha256:4b94df9f2fdde7b9314321bab8448e6ad5a23b80542dcab53e329527d4099dcb", - "sha256:4c48ddb63e2b20fba4c6a2bf81b4d49e99b6d4587fb67a6cd33a2c1f003af3e3", - "sha256:4df9afd17bd5477e9f8c8b6bb8507e18dd0f8b4efe73bb99729ff203279e9e3b", - "sha256:518950fe6a5d56f94ba125107895f938a4f34f704c658986eae8255edb41163b", - "sha256:538298e4e113ccb8b41658d5a4b605bebe75e46a30ceca22a5a289cf02c80bec", - "sha256:55465121e72e208a7b69b53de791402affe6165083b2ea71b892728bd19ba9ae", - "sha256:588384d70a0f19b47409cfdb10e0c27c20e4293b74fc891df3d8eb47782b8b3e", - "sha256:6278c080d4afffc9016e14325f8734456831124e8c12caa754fd544435c08386", - "sha256:64ea6c221aeee4796860405e1aedec63424cda4202a7ad27a5066876db5b0fd2", - "sha256:681dbb33e2b40262b33fd383bae63c36d33fd79fa1a8e4092945430744ffd34a", - "sha256:6936aa9da390402d646a32a6a38d5409c2d2afb2950f045a7d02ab25a4e7d08d", - "sha256:778d0ec38bbd288b150a3ae363c8ffd88d2207a756842495e9bffd8a8afbc89a", - "sha256:8251f06a77985a2729a8bdbefbae79ee78567dddc3acbd499b87e705ca59fe24", - "sha256:83b4aa5344cce005a9cff5d0321b2e318e871cc1dfc793b66c32dd4f59e9770d", - "sha256:844fad925ac5c2ad4faaceb3b2520ad016b5280105c6e16e79838cf951903a7b", - "sha256:8ceb3667dd13b8133f2e4d637b5b00f240f066448e2aa89a41f4c2d78a26ce50", - "sha256:92dc0fb79675882d0b6138be4bf0cec7ea7c7eede60aaca78303d8e8dbdaa523", - "sha256:9789bd945e9f5bd026ed3f5b453d640befb8b1fc33a779c1fe8d3eb21fe3fb4a", - "sha256:a2b6d6eb693bc2fc6c484f2e5d93bd0b0da803fa77bf974f160533e555e4d095", - "sha256:aab9f1e34d810feb00bf841993552b8fcc6ae71d473c505381627143d0018a6a", - "sha256:abb61afd84f23099ac6099d804cdba9bd3b902aaaded3ffff47e490b0a495520", - "sha256:adf9ee115ae8ff8b6da4b854b4152f253b390ba64407a22d75456fe07dcbda65", - "sha256:aedc6c672b351afe6dfe17ff83ee5e7eb6ed44718f879a9328a68bdb20b57e11", - "sha256:b7a00ecb1434f8183395fac5366a21ee73d14900082ca37cf74993cf46baa56c", - "sha256:ba32f4a91c1cb7314c429b03afbf87b1fff4fb1c8db32260e7310104bd77f0c7", - "sha256:cbd0f2cbd8689861209cd89141371d3a22a11613304d1f0736492590aa0ab332", - "sha256:e4bc372b953bf6cec65a8d48482ba574f6e051621d157cf224227dbb55486b1e", - "sha256:eccac3d9aadc68e994b6d228cb0c8919fc47a5350d85a1b4d3d81d1e98baf40c", - "sha256:efd550b3da28195746bb43bd1d815058181a7ca6d9d6aa89dd37f5eefe2cacb7", - "sha256:efef581c8ba4d990770875e1a2218e856849d32ada2680e53aebc5d154a17e20", - "sha256:f057897711a630a0b7a6a03f1acf379b6ba25d37dc5dc217a97191984ba7f2fc", - "sha256:f37d45fab14ffef9d33a0dc3bc59ce0c5313e2253323312d47739192da94f5fd", - "sha256:f44906f70205d456d503105023041f1e63aece7623b31c390a0103db4de17537" - ], - "version": "==5.2.0" - } - }, - "develop": {} -} diff --git a/bgpranking/abstractmanager.py b/bgpranking/abstractmanager.py deleted file mode 100644 index 2840eb2..0000000 --- a/bgpranking/abstractmanager.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from abc import ABC, abstractmethod -import logging - -from .libs.helpers import long_sleep, shutdown_requested - - -class AbstractManager(ABC): - - def __init__(self, loglevel: int=logging.DEBUG): - self.loglevel = loglevel - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - self.logger.info(f'Initializing {self.__class__.__name__}') - - @abstractmethod - def _to_run_forever(self): - pass - - def run(self, sleep_in_sec: int): - self.logger.info(f'Launching {self.__class__.__name__}') - while True: - if shutdown_requested(): - break - try: - self._to_run_forever() - except Exception: - self.logger.exception(f'Something went wrong in {self.__class__.__name__}.') - if not long_sleep(sleep_in_sec): - break - self.logger.info(f'Shutting down {self.__class__.__name__}') diff --git a/bgpranking/archive.py b/bgpranking/archive.py deleted file mode 100644 index 5e75643..0000000 --- a/bgpranking/archive.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from dateutil import parser -from datetime import date -from pathlib import Path -from dateutil.relativedelta import relativedelta -from collections import defaultdict -import zipfile -import logging -try: - import simplejson as json -except ImportError: - import json - -from .libs.helpers import safe_create_dir, set_running, unset_running - - -class DeepArchive(): - - def __init__(self, config_file: Path, storage_directory: Path, - loglevel: int=logging.DEBUG): - '''Archive everyfile older than 2 month.''' - with open(config_file, 'r') as f: - module_parameters = json.load(f) - self.vendor = module_parameters['vendor'] - self.listname = module_parameters['name'] - self.directory = storage_directory / self.vendor / self.listname / 'archive' - safe_create_dir(self.directory) - self.deep_archive = self.directory / 'deep' - safe_create_dir(self.deep_archive) - self.__init_logger(loglevel) - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - self.logger.setLevel(loglevel) - - def archive(self): - set_running(self.__class__.__name__) - - to_archive = defaultdict(list) - today = date.today() - last_day_to_keep = date(today.year, today.month, 1) - relativedelta(months=2) - for p in self.directory.iterdir(): - if not p.is_file(): - continue - filedate = parser.parse(p.name.split('.')[0]).date() - if filedate >= last_day_to_keep: - continue - to_archive['{}.zip'.format(filedate.strftime('%Y%m'))].append(p) - if to_archive: - self.logger.info('Found old files. Archiving: {}'.format(', '.join(to_archive.keys()))) - else: - self.logger.debug('No old files.') - for archivename, path_list in to_archive.items(): - with zipfile.ZipFile(self.deep_archive / archivename, 'x', zipfile.ZIP_DEFLATED) as z: - for f in path_list: - z.write(f, f.name) - # Delete all the files if the archiving worked out properly - [f.unlink() for f in path_list] - unset_running(self.__class__.__name__) diff --git a/bgpranking/asn_descriptions.py b/bgpranking/asn_descriptions.py deleted file mode 100644 index 19637d6..0000000 --- a/bgpranking/asn_descriptions.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import logging -from pathlib import Path -import requests -from redis import StrictRedis -from dateutil.parser import parse -import re - -from .libs.helpers import set_running, unset_running, get_socket_path, safe_create_dir - - -class ASNDescriptions(): - - def __init__(self, storage_directory: Path, loglevel: int=logging.DEBUG): - self.__init_logger(loglevel) - self.asn_meta = StrictRedis(unix_socket_path=get_socket_path('storage'), db=2, decode_responses=True) - self.logger.debug('Starting ASN History') - self.directory = storage_directory / 'asn_descriptions' - safe_create_dir(self.directory) - self.archives = self.directory / 'archive' - safe_create_dir(self.archives) - self.url = 'https://www.cidr-report.org/as2.0/autnums.html' - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - - def __update_available(self): - r = requests.head(self.url) - current_last_modified = parse(r.headers['Last-Modified']) - if not self.asn_meta.exists('ans_description_last_update'): - return True - last_update = parse(self.asn_meta.get('ans_description_last_update')) - if last_update < current_last_modified: - return True - return False - - def load_descriptions(self): - if not self.__update_available(): - self.logger.debug('No new file to import.') - return - set_running(self.__class__.__name__) - self.logger.info('Importing new ASN descriptions.') - r = requests.get(self.url) - last_modified = parse(r.headers['Last-Modified']).isoformat() - p = self.asn_meta.pipeline() - new_asn = 0 - new_description = 0 - for asn, descr in re.findall('as=AS(.*)&.* (.*)\n', r.text): - existing_descriptions = self.asn_meta.hgetall(f'{asn}|descriptions') - if not existing_descriptions: - self.logger.debug(f'New ASN: {asn} - {descr}') - p.hset(f'{asn}|descriptions', last_modified, descr) - new_asn += 1 - else: - last_descr = sorted(existing_descriptions.keys(), reverse=True)[0] - if descr != existing_descriptions[last_descr]: - self.logger.debug(f'New description for {asn}: {existing_descriptions[last_descr]} -> {descr}') - p.hset(f'{asn}|descriptions', last_modified, descr) - new_description += 1 - p.set('ans_description_last_update', last_modified) - p.execute() - self.logger.info(f'Done with import. New ASNs: {new_asn}, new descriptions: {new_description}') - if new_asn or new_description: - with open(self.archives / f'{last_modified}.html', 'w') as f: - f.write(r.text) - unset_running(self.__class__.__name__) diff --git a/bgpranking/querying.py b/bgpranking/bgpranking.py similarity index 61% rename from bgpranking/querying.py rename to bgpranking/bgpranking.py index 6c4fd17..a5588d7 100644 --- a/bgpranking/querying.py +++ b/bgpranking/bgpranking.py @@ -1,37 +1,52 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from typing import TypeVar, Union +import logging +import re + +from redis import ConnectionPool, Redis +from redis.connection import UnixDomainSocketConnection + +from .default import get_config, get_socket_path + +from typing import TypeVar, Union, Optional, Dict, Any, List, Tuple import datetime from datetime import timedelta from dateutil.parser import parse from collections import defaultdict -import logging import json -from redis import StrictRedis -from .libs.helpers import get_socket_path, get_config_path -from .libs.exceptions import InvalidDateFormat -from .libs.statsripe import StatsRIPE +from .default import InvalidDateFormat +from .helpers import get_modules +from .statsripe import StatsRIPE Dates = TypeVar('Dates', datetime.datetime, datetime.date, str) -class Querying(): +class BGPRanking(): - def __init__(self, loglevel: int=logging.DEBUG): - self.__init_logger(loglevel) - self.storage = StrictRedis(unix_socket_path=get_socket_path('storage'), decode_responses=True) - self.ranking = StrictRedis(unix_socket_path=get_socket_path('storage'), db=1) - self.asn_meta = StrictRedis(unix_socket_path=get_socket_path('storage'), db=2, decode_responses=True) - self.cache = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - - def __init_logger(self, loglevel: int): + def __init__(self) -> None: self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) + self.logger.setLevel(get_config('generic', 'loglevel')) - def __normalize_date(self, date: Dates): + self.cache_pool: ConnectionPool = ConnectionPool(connection_class=UnixDomainSocketConnection, + path=get_socket_path('cache'), decode_responses=True) + + self.storage = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port'), decode_responses=True) + self.asn_meta = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port'), decode_responses=True) + self.ranking = Redis(get_config('generic', 'ranking_db_hostname'), get_config('generic', 'ranking_db_port')) + + @property + def cache(self): + return Redis(connection_pool=self.cache_pool, db=1) + + def check_redis_up(self) -> bool: + return self.cache.ping() + + def __normalize_date(self, date: Optional[Dates]) -> str: + if not date: + return datetime.date.today().isoformat() if isinstance(date, datetime.datetime): return date.date().isoformat() elif isinstance(date, datetime.date): @@ -45,16 +60,19 @@ class Querying(): def _ranking_cache_wrapper(self, key): if not self.cache.exists(key): if self.ranking.exists(key): - key_dump = self.ranking.dump(key) + content: List[Tuple[bytes, float]] = self.ranking.zrangebyscore(key, '-Inf', '+Inf', withscores=True) # Cache for 10 hours - self.cache.restore(key, 36000, key_dump, True) + self.cache.zadd(key, {value: rank for value, rank in content}) + self.cache.expire(key, 36000) - def asns_global_ranking(self, date: Dates=datetime.date.today(), source: Union[list, str]='', + def asns_global_ranking(self, date: Optional[Dates]=None, source: Union[list, str]='', ipversion: str='v4', limit: int=100): '''Aggregated ranking of all the ASNs known in the system, weighted by source.''' - to_return = {'meta': {'ipversion': ipversion, 'limit': limit}, 'source': source, - 'response': set()} - + to_return: Dict[str, Any] = { + 'meta': {'ipversion': ipversion, 'limit': limit}, + 'source': source, + 'response': set() + } d = self.__normalize_date(date) to_return['meta']['date'] = d if source: @@ -76,11 +94,13 @@ class Querying(): to_return['response'] = self.cache.zrevrange(key, start=0, end=limit, withscores=True) return to_return - def asn_details(self, asn: int, date: Dates= datetime.date.today(), source: Union[list, str]='', + def asn_details(self, asn: int, date: Optional[Dates]=None, source: Union[list, str]='', ipversion: str='v4'): '''Aggregated ranking of all the prefixes anounced by the given ASN, weighted by source.''' - to_return = {'meta': {'asn': asn, 'ipversion': ipversion, 'source': source}, - 'response': set()} + to_return: Dict[str, Any] = { + 'meta': {'asn': asn, 'ipversion': ipversion, 'source': source}, + 'response': set() + } d = self.__normalize_date(date) to_return['meta']['date'] = d @@ -103,12 +123,14 @@ class Querying(): to_return['response'] = self.cache.zrevrange(key, start=0, end=-1, withscores=True) return to_return - def asn_rank(self, asn: int, date: Dates=datetime.date.today(), source: Union[list, str]='', + def asn_rank(self, asn: int, date: Optional[Dates]=None, source: Union[list, str]='', ipversion: str='v4', with_position: bool=False): '''Get the rank of a single ASN, weighted by source.''' - to_return = {'meta': {'asn': asn, 'ipversion': ipversion, - 'source': source, 'with_position': with_position}, - 'response': 0.0} + to_return: Dict[str, Any] = { + 'meta': {'asn': asn, 'ipversion': ipversion, + 'source': source, 'with_position': with_position}, + 'response': 0.0 + } d = self.__normalize_date(date) to_return['meta']['date'] = d @@ -141,9 +163,9 @@ class Querying(): to_return['response'] = float(r) return to_return - def get_sources(self, date: Dates=datetime.date.today()): + def get_sources(self, date: Optional[Dates]=None): '''Get the sources availables for a specific day (default: today).''' - to_return = {'meta': {}, 'response': set()} + to_return: Dict[str, Any] = {'meta': {}, 'response': set()} d = self.__normalize_date(date) to_return['meta']['date'] = d @@ -151,9 +173,11 @@ class Querying(): to_return['response'] = self.storage.smembers(key) return to_return - def get_asn_descriptions(self, asn: int, all_descriptions=False): - to_return = {'meta': {'asn': asn, 'all_descriptions': all_descriptions}, - 'response': []} + def get_asn_descriptions(self, asn: int, all_descriptions=False) -> Dict[str, Any]: + to_return: Dict[str, Union[Dict, List, str]] = { + 'meta': {'asn': asn, 'all_descriptions': all_descriptions}, + 'response': [] + } descriptions = self.asn_meta.hgetall(f'{asn}|descriptions') if all_descriptions or not descriptions: to_return['response'] = descriptions @@ -161,11 +185,13 @@ class Querying(): to_return['response'] = descriptions[sorted(descriptions.keys(), reverse=True)[0]] return to_return - def get_prefix_ips(self, asn: int, prefix: str, date: Dates=datetime.date.today(), + def get_prefix_ips(self, asn: int, prefix: str, date: Optional[Dates]=None, source: Union[list, str]='', ipversion: str='v4'): - to_return = {'meta': {'asn': asn, 'prefix': prefix, 'ipversion': ipversion, - 'source': source}, - 'response': defaultdict(list)} + to_return: Dict[str, Any] = { + 'meta': {'asn': asn, 'prefix': prefix, 'ipversion': ipversion, + 'source': source}, + 'response': defaultdict(list) + } d = self.__normalize_date(date) to_return['meta']['date'] = d @@ -186,33 +212,37 @@ class Querying(): return to_return def get_asn_history(self, asn: int, period: int=100, source: Union[list, str]='', - ipversion: str='v4', date: Dates=datetime.date.today()): - to_return = {'meta': {'asn': asn, 'period': period, 'ipversion': ipversion, - 'source': source}, - 'response': []} + ipversion: str='v4', date: Optional[Dates]=None): + to_return: Dict[str, Any] = { + 'meta': {'asn': asn, 'period': period, 'ipversion': ipversion, 'source': source}, + 'response': [] + } - if isinstance(date, str): - date = parse(date).date() - if date + timedelta(days=period / 3) > datetime.date.today(): - # the period to display will be around the date passed at least 2/3 before the date, at most 1/3 after - # FIXME: That is not doing what it is supposed to... - date = datetime.date.today() + if date is None: + python_date: datetime.date = datetime.date.today() + elif isinstance(date, str): + python_date = parse(date).date() + elif isinstance(date, datetime.datetime): + python_date = date.date() + else: + python_date = date - to_return['meta']['date'] = date.isoformat() + to_return['meta']['date'] = python_date.isoformat() for i in range(period): - d = date - timedelta(days=i) + d = python_date - timedelta(days=i) rank = self.asn_rank(asn, d, source, ipversion) if 'response' not in rank: rank = 0 to_return['response'].insert(0, (d.isoformat(), rank['response'])) return to_return - def country_rank(self, country: str, date: Dates=datetime.date.today(), source: Union[list, str]='', + def country_rank(self, country: str, date: Optional[Dates]=None, source: Union[list, str]='', ipversion: str='v4'): - to_return = {'meta': {'country': country, 'ipversion': ipversion, - 'source': source}, - 'response': []} + to_return: Dict[str, Any] = { + 'meta': {'country': country, 'ipversion': ipversion, 'source': source}, + 'response': [] + } d = self.__normalize_date(date) to_return['meta']['date'] = d @@ -224,29 +254,31 @@ class Querying(): logging.warning(f'Invalid response: {response}') # FIXME: return something return 0, [(0, 0)] - routed_asns = response['data']['countries'][0]['routed'] + routed_asns = re.findall(r"AsnSingle\(([\d]*)\)", response['data']['countries'][0]['routed']) ranks = [self.asn_rank(asn, d, source, ipversion)['response'] for asn in routed_asns] to_return['response'] = [sum(ranks), zip(routed_asns, ranks)] return to_return def country_history(self, country: Union[list, str], period: int=30, source: Union[list, str]='', - ipversion: str='v4', date: Dates=datetime.date.today()): - to_return = {} - to_return = {'meta': {'country': country, 'ipversion': ipversion, - 'source': source}, - 'response': defaultdict(list)} - - if isinstance(date, str): - date = parse(date).date() - if date + timedelta(days=period / 3) > datetime.date.today(): - # the period to display will be around the date passed at least 2/3 before the date, at most 1/3 after - date = datetime.date.today() + ipversion: str='v4', date: Optional[Dates]=None): + to_return: Dict[str, Any] = { + 'meta': {'country': country, 'ipversion': ipversion, 'source': source}, + 'response': defaultdict(list) + } + if date is None: + python_date: datetime.date = datetime.date.today() + elif isinstance(date, str): + python_date = parse(date).date() + elif isinstance(date, datetime.datetime): + python_date = date.date() + else: + python_date = date if isinstance(country, str): country = [country] for c in country: for i in range(period): - d = date - timedelta(days=i) + d = python_date - timedelta(days=i) rank, details = self.country_rank(c, d, source, ipversion)['response'] if rank is None: rank = 0 @@ -257,9 +289,8 @@ class Querying(): pass def get_sources_configs(self): - config_dir = get_config_path() / 'modules' loaded = [] - for modulepath in config_dir.glob('*.json'): + for modulepath in get_modules(): with open(modulepath) as f: loaded.append(json.load(f)) return {'{}-{}'.format(config['vendor'], config['name']): config for config in loaded} diff --git a/bgpranking/config/bgpranking.json b/bgpranking/config/bgpranking.json deleted file mode 100644 index ea5e28e..0000000 --- a/bgpranking/config/bgpranking.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "ipasnhistory_url": "https://ipasnhistory.circl.lu/" -} diff --git a/bgpranking/config/modules/NothinkSNMP.json b/bgpranking/config/modules/NothinkSNMP.json deleted file mode 100644 index 80cf8b6..0000000 --- a/bgpranking/config/modules/NothinkSNMP.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "url": "http://www.nothink.org/blacklist/blacklist_snmp_day.txt", - "vendor": "nothink", - "name": "snmp", - "impact": 5, - "parser": ".parsers.nothink" -} diff --git a/bgpranking/config/modules/NothinkSSH.json b/bgpranking/config/modules/NothinkSSH.json deleted file mode 100644 index 002ff94..0000000 --- a/bgpranking/config/modules/NothinkSSH.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "url": "http://www.nothink.org/blacklist/blacklist_ssh_day.txt", - "vendor": "nothink", - "name": "ssh", - "impact": 5, - "parser": ".parsers.nothink" -} diff --git a/bgpranking/config/modules/NothinkTelnet.json b/bgpranking/config/modules/NothinkTelnet.json deleted file mode 100644 index d8a1411..0000000 --- a/bgpranking/config/modules/NothinkTelnet.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "url": "http://www.nothink.org/blacklist/blacklist_telnet_day.txt", - "vendor": "nothink", - "name": "telnet", - "impact": 5, - "parser": ".parsers.nothink" -} diff --git a/bgpranking/config/modules/PalevotrackerIPBlockList.json b/bgpranking/config/modules/PalevotrackerIPBlockList.json deleted file mode 100644 index 89c1574..0000000 --- a/bgpranking/config/modules/PalevotrackerIPBlockList.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "url": "https://palevotracker.abuse.ch/blocklists.php?download=ipblocklist", - "vendor": "palevotracker", - "name": "ipblocklist", - "impact": 5 -} diff --git a/bgpranking/config/modules/SSHBlackListBase.json b/bgpranking/config/modules/SSHBlackListBase.json deleted file mode 100644 index a46d110..0000000 --- a/bgpranking/config/modules/SSHBlackListBase.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "url": "https://www.openbl.org/lists/base.txt", - "vendor": "sshbl", - "name": "base", - "impact": 5 -} diff --git a/bgpranking/config/modules/ZeustrackerIPBlockList.json b/bgpranking/config/modules/ZeustrackerIPBlockList.json deleted file mode 100644 index 3c4f65b..0000000 --- a/bgpranking/config/modules/ZeustrackerIPBlockList.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "url": "https://zeustracker.abuse.ch/blocklist.php?download=ipblocklist", - "vendor": "zeustracker", - "name": "ipblocklist", - "impact": 5 -} diff --git a/bgpranking/dbinsert.py b/bgpranking/dbinsert.py deleted file mode 100644 index 2541fce..0000000 --- a/bgpranking/dbinsert.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import logging -import time -from redis import StrictRedis -from .libs.helpers import shutdown_requested, set_running, unset_running, get_socket_path, get_ipasn, sanity_check_ipasn - - -class DatabaseInsert(): - - def __init__(self, loglevel: int=logging.DEBUG): - self.__init_logger(loglevel) - self.ardb_storage = StrictRedis(unix_socket_path=get_socket_path('storage'), decode_responses=True) - self.redis_sanitized = StrictRedis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) - self.ipasn = get_ipasn() - self.logger.debug('Starting import') - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - - def insert(self): - ready, message = sanity_check_ipasn(self.ipasn) - if not ready: - # Try again later. - self.logger.warning(message) - return - self.logger.debug(message) - - set_running(self.__class__.__name__) - while True: - if shutdown_requested(): - break - try: - if not self.ipasn.is_up: - break - except Exception: - self.logger.warning('Unable to query ipasnhistory') - time.sleep(10) - continue - uuids = self.redis_sanitized.spop('to_insert', 100) - if not uuids: - break - p = self.redis_sanitized.pipeline(transaction=False) - [p.hgetall(uuid) for uuid in uuids] - sanitized_data = p.execute() - - for_query = [] - for i, uuid in enumerate(uuids): - data = sanitized_data[i] - if not data: - self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.') - continue - for_query.append({'ip': data['ip'], 'address_family': data['address_family'], 'source': 'caida', - 'date': data['datetime'], 'precision_delta': {'days': 3}}) - try: - responses = self.ipasn.mass_query(for_query) - except Exception: - self.logger.exception('Mass query in IPASN History failed, trying again later.') - # Rollback the spop - self.redis_sanitized.sadd('to_insert', *uuids) - time.sleep(10) - continue - retry = [] - done = [] - ardb_pipeline = self.ardb_storage.pipeline(transaction=False) - for i, uuid in enumerate(uuids): - data = sanitized_data[i] - if not data: - self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.') - continue - routing_info = responses['responses'][i]['response'] # our queries are on one single date, not a range - # Data gathered from IPASN History: - # * IP Block of the IP - # * AS number - if 'error' in routing_info: - self.logger.warning(f"Unable to find routing information for {data['ip']} - {data['datetime']}: {routing_info['error']}") - continue - # Single date query, getting from the object - datetime_routing = list(routing_info.keys())[0] - entry = routing_info[datetime_routing] - if not entry: - # routing info is missing, need to try again later. - retry.append(uuid) - continue - if 'asn' in entry and entry['asn'] is None: - self.logger.warning(f"Unable to find the AS number associated to {data['ip']} - {data['datetime']} (got None). This should not happen...") - continue - if 'prefix' in entry and entry['prefix'] is None: - self.logger.warning(f"Unable to find the prefix associated to {data['ip']} - {data['datetime']} (got None). This should not happen...") - continue - - # Format: |sources -> set([, ...]) - ardb_pipeline.sadd(f"{data['date']}|sources", data['source']) - - # Format: | -> set([, ...]) - ardb_pipeline.sadd(f"{data['date']}|{data['source']}", entry['asn']) - # Format: || -> set([, ...]) - ardb_pipeline.sadd(f"{data['date']}|{data['source']}|{entry['asn']}", entry['prefix']) - - # Format: ||| -> set([|, ...]) - ardb_pipeline.sadd(f"{data['date']}|{data['source']}|{entry['asn']}|{entry['prefix']}", - f"{data['ip']}|{data['datetime']}") - done.append(uuid) - ardb_pipeline.execute() - p = self.redis_sanitized.pipeline(transaction=False) - if done: - p.delete(*done) - if retry: - p.sadd('to_insert', *retry) - p.execute() - unset_running(self.__class__.__name__) diff --git a/bgpranking/default/__init__.py b/bgpranking/default/__init__.py new file mode 100644 index 0000000..da77683 --- /dev/null +++ b/bgpranking/default/__init__.py @@ -0,0 +1,16 @@ +env_global_name: str = 'BGPRANKING_HOME' + +from .exceptions import (BGPRankingException, FetcherException, ArchiveException, # noqa + CreateDirectoryException, MissingEnv, InvalidDateFormat, # noqa + MissingConfigFile, MissingConfigEntry, ThirdPartyUnreachable) # noqa + +# NOTE: the imports below are there to avoid too long paths when importing the +# classes/methods in the rest of the project while keeping all that in a subdirectory +# and allow to update them easily. +# You should not have to change anything in this file below this line. + +from .abstractmanager import AbstractManager # noqa + +from .exceptions import MissingEnv, CreateDirectoryException, ConfigError # noqa + +from .helpers import get_homedir, load_configs, get_config, safe_create_dir, get_socket_path, try_make_file # noqa diff --git a/bgpranking/default/abstractmanager.py b/bgpranking/default/abstractmanager.py new file mode 100644 index 0000000..c15fa92 --- /dev/null +++ b/bgpranking/default/abstractmanager.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import asyncio +import logging +import signal +import time +from abc import ABC +from datetime import datetime, timedelta +from subprocess import Popen +from typing import List, Optional, Tuple + +from redis import Redis +from redis.exceptions import ConnectionError + +from .helpers import get_socket_path + + +class AbstractManager(ABC): + + script_name: str + + def __init__(self, loglevel: int=logging.DEBUG): + self.loglevel = loglevel + self.logger = logging.getLogger(f'{self.__class__.__name__}') + self.logger.setLevel(loglevel) + self.logger.info(f'Initializing {self.__class__.__name__}') + self.process: Optional[Popen] = None + self.__redis = Redis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) + + @staticmethod + def is_running() -> List[Tuple[str, float]]: + try: + r = Redis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) + return r.zrangebyscore('running', '-inf', '+inf', withscores=True) + except ConnectionError: + print('Unable to connect to redis, the system is down.') + return [] + + @staticmethod + def force_shutdown(): + try: + r = Redis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) + r.set('shutdown', 1) + except ConnectionError: + print('Unable to connect to redis, the system is down.') + + def set_running(self) -> None: + self.__redis.zincrby('running', 1, self.script_name) + + def unset_running(self) -> None: + current_running = self.__redis.zincrby('running', -1, self.script_name) + if int(current_running) <= 0: + self.__redis.zrem('running', self.script_name) + + def long_sleep(self, sleep_in_sec: int, shutdown_check: int=10) -> bool: + if shutdown_check > sleep_in_sec: + shutdown_check = sleep_in_sec + sleep_until = datetime.now() + timedelta(seconds=sleep_in_sec) + while sleep_until > datetime.now(): + time.sleep(shutdown_check) + if self.shutdown_requested(): + return False + return True + + async def long_sleep_async(self, sleep_in_sec: int, shutdown_check: int=10) -> bool: + if shutdown_check > sleep_in_sec: + shutdown_check = sleep_in_sec + sleep_until = datetime.now() + timedelta(seconds=sleep_in_sec) + while sleep_until > datetime.now(): + await asyncio.sleep(shutdown_check) + if self.shutdown_requested(): + return False + return True + + def shutdown_requested(self) -> bool: + try: + return True if self.__redis.exists('shutdown') else False + except ConnectionRefusedError: + return True + except ConnectionError: + return True + + def _to_run_forever(self) -> None: + pass + + def run(self, sleep_in_sec: int) -> None: + self.logger.info(f'Launching {self.__class__.__name__}') + try: + while True: + if self.shutdown_requested(): + break + try: + if self.process: + if self.process.poll() is not None: + self.logger.critical(f'Unable to start {self.script_name}.') + break + else: + self.set_running() + self._to_run_forever() + except Exception: + self.logger.exception(f'Something went terribly wrong in {self.__class__.__name__}.') + finally: + if not self.process: + # self.process means we run an external script, all the time, + # do not unset between sleep. + self.unset_running() + if not self.long_sleep(sleep_in_sec): + break + except KeyboardInterrupt: + self.logger.warning(f'{self.script_name} killed by user.') + finally: + if self.process: + try: + # Killing everything if possible. + self.process.send_signal(signal.SIGWINCH) + self.process.send_signal(signal.SIGTERM) + except Exception: + pass + try: + self.unset_running() + except Exception: + # the services can already be down at that point. + pass + self.logger.info(f'Shutting down {self.__class__.__name__}') + + async def _to_run_forever_async(self) -> None: + pass + + async def run_async(self, sleep_in_sec: int) -> None: + self.logger.info(f'Launching {self.__class__.__name__}') + try: + while True: + if self.shutdown_requested(): + break + try: + if self.process: + if self.process.poll() is not None: + self.logger.critical(f'Unable to start {self.script_name}.') + break + else: + self.set_running() + await self._to_run_forever_async() + except Exception: + self.logger.exception(f'Something went terribly wrong in {self.__class__.__name__}.') + finally: + if not self.process: + # self.process means we run an external script, all the time, + # do not unset between sleep. + self.unset_running() + if not await self.long_sleep_async(sleep_in_sec): + break + except KeyboardInterrupt: + self.logger.warning(f'{self.script_name} killed by user.') + finally: + if self.process: + try: + # Killing everything if possible. + self.process.send_signal(signal.SIGWINCH) + self.process.send_signal(signal.SIGTERM) + except Exception: + pass + try: + self.unset_running() + except Exception: + # the services can already be down at that point. + pass + self.logger.info(f'Shutting down {self.__class__.__name__}') diff --git a/bgpranking/libs/exceptions.py b/bgpranking/default/exceptions.py similarity index 91% rename from bgpranking/libs/exceptions.py rename to bgpranking/default/exceptions.py index 68805f1..4433072 100644 --- a/bgpranking/libs/exceptions.py +++ b/bgpranking/default/exceptions.py @@ -36,3 +36,7 @@ class MissingConfigEntry(BGPRankingException): class ThirdPartyUnreachable(BGPRankingException): pass + + +class ConfigError(BGPRankingException): + pass diff --git a/bgpranking/default/helpers.py b/bgpranking/default/helpers.py new file mode 100644 index 0000000..4a54e6e --- /dev/null +++ b/bgpranking/default/helpers.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import json +import logging +import os +from functools import lru_cache +from pathlib import Path +from typing import Any, Dict, Optional, Union + +from . import env_global_name +from .exceptions import ConfigError, CreateDirectoryException, MissingEnv + +configs: Dict[str, Dict[str, Any]] = {} +logger = logging.getLogger('Helpers') + + +@lru_cache(64) +def get_homedir() -> Path: + if not os.environ.get(env_global_name): + # Try to open a .env file in the home directory if it exists. + if (Path(__file__).resolve().parent.parent.parent / '.env').exists(): + with (Path(__file__).resolve().parent.parent.parent / '.env').open() as f: + for line in f: + key, value = line.strip().split('=', 1) + if value[0] in ['"', "'"]: + value = value[1:-1] + os.environ[key] = value + + if not os.environ.get(env_global_name): + guessed_home = Path(__file__).resolve().parent.parent.parent + raise MissingEnv(f"{env_global_name} is missing. \ +Run the following command (assuming you run the code from the clonned repository):\ + export {env_global_name}='{guessed_home}'") + return Path(os.environ[env_global_name]) + + +@lru_cache(64) +def load_configs(path_to_config_files: Optional[Union[str, Path]]=None): + global configs + if configs: + return + if path_to_config_files: + if isinstance(path_to_config_files, str): + config_path = Path(path_to_config_files) + else: + config_path = path_to_config_files + else: + config_path = get_homedir() / 'config' + if not config_path.exists(): + raise ConfigError(f'Configuration directory {config_path} does not exists.') + elif not config_path.is_dir(): + raise ConfigError(f'Configuration directory {config_path} is not a directory.') + + configs = {} + for path in config_path.glob('*.json'): + with path.open() as _c: + configs[path.stem] = json.load(_c) + + +@lru_cache(64) +def get_config(config_type: str, entry: str, quiet: bool=False) -> Any: + """Get an entry from the given config_type file. Automatic fallback to the sample file""" + global configs + if not configs: + load_configs() + if config_type in configs: + if entry in configs[config_type]: + return configs[config_type][entry] + else: + if not quiet: + logger.warning(f'Unable to find {entry} in config file.') + else: + if not quiet: + logger.warning(f'No {config_type} config file available.') + if not quiet: + logger.warning(f'Falling back on sample config, please initialize the {config_type} config file.') + with (get_homedir() / 'config' / f'{config_type}.json.sample').open() as _c: + sample_config = json.load(_c) + return sample_config[entry] + + +def safe_create_dir(to_create: Path) -> None: + if to_create.exists() and not to_create.is_dir(): + raise CreateDirectoryException(f'The path {to_create} already exists and is not a directory') + to_create.mkdir(parents=True, exist_ok=True) + + +def get_socket_path(name: str) -> str: + mapping = { + 'cache': Path('cache', 'cache.sock'), + 'intake': Path('temp', 'intake.sock'), + 'prepare': Path('temp', 'prepare.sock') + } + return str(get_homedir() / mapping[name]) + + +def try_make_file(filename: Path): + try: + filename.touch(exist_ok=False) + return True + except FileExistsError: + return False diff --git a/bgpranking/helpers.py b/bgpranking/helpers.py new file mode 100644 index 0000000..fd0c260 --- /dev/null +++ b/bgpranking/helpers.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import json +from functools import lru_cache +from pathlib import Path +from typing import Dict, List + +import requests + +from pyipasnhistory import IPASNHistory + +from .default import get_homedir, get_config, ThirdPartyUnreachable, safe_create_dir + + +@lru_cache(64) +def get_data_dir() -> Path: + capture_dir = get_homedir() / 'rawdata' + safe_create_dir(capture_dir) + return capture_dir + + +@lru_cache(64) +def get_modules_dir() -> Path: + modules_dir = get_homedir() / 'config' / 'modules' + safe_create_dir(modules_dir) + return modules_dir + + +@lru_cache(64) +def get_modules() -> List[Path]: + return [modulepath for modulepath in get_modules_dir().glob('*.json')] + + +@lru_cache(64) +def load_all_modules_configs() -> Dict[str, Dict]: + configs = {} + for p in get_modules(): + with p.open() as f: + j = json.load(f) + configs[f"{j['vendor']}-{j['name']}"] = j + return configs + + +def get_ipasn(): + ipasnhistory_url = get_config('generic', 'ipasnhistory_url') + ipasn = IPASNHistory(ipasnhistory_url) + if not ipasn.is_up: + raise ThirdPartyUnreachable(f"Unable to reach IPASNHistory on {ipasnhistory_url}") + return ipasn + + +def sanity_check_ipasn(ipasn): + try: + meta = ipasn.meta() + except requests.exceptions.ConnectionError: + return False, "IP ASN History is not reachable, try again later." + + if 'error' in meta: + raise ThirdPartyUnreachable(f'IP ASN History has a problem: {meta["error"]}') + + v4_percent = meta['cached_dates']['caida']['v4']['percent'] + v6_percent = meta['cached_dates']['caida']['v6']['percent'] + if v4_percent < 90 or v6_percent < 90: # (this way it works if we only load 10 days) + # Try again later. + return False, f"IP ASN History is not ready: v4 {v4_percent}% / v6 {v6_percent}% loaded" + return True, f"IP ASN History is ready: v4 {v4_percent}% / v6 {v6_percent}% loaded" diff --git a/bgpranking/libs/helpers.py b/bgpranking/libs/helpers.py deleted file mode 100644 index 067aecd..0000000 --- a/bgpranking/libs/helpers.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import os -import sys -from pathlib import Path -from .exceptions import CreateDirectoryException, MissingEnv, MissingConfigFile, MissingConfigEntry, ThirdPartyUnreachable -from redis import StrictRedis -from redis.exceptions import ConnectionError -from datetime import datetime, timedelta -import time -try: - import simplejson as json -except ImportError: - import json -from pyipasnhistory import IPASNHistory - - -def load_config_files(config_dir: Path=None) -> dict: - if not config_dir: - config_dir = get_config_path() - modules_config = config_dir / 'modules' - modules_paths = [modulepath for modulepath in modules_config.glob('*.json')] - configs = {} - for p in modules_paths: - with open(p, 'r') as f: - j = json.load(f) - configs[f"{j['vendor']}-{j['name']}"] = j - return configs - - -def get_config_path(): - if Path('bgpranking', 'config').exists(): - # Running from the repository - return Path('bgpranking', 'config') - return Path(sys.modules['bgpranking'].__file__).parent / 'config' - - -def get_list_storage_path(): - if not os.environ.get('VIRTUAL_ENV'): - raise MissingEnv("VIRTUAL_ENV is missing. This project really wants to run from a virtual envoronment.") - return Path(os.environ['VIRTUAL_ENV']) - - -def get_homedir(): - if not os.environ.get('BGPRANKING_HOME'): - guessed_home = Path(__file__).resolve().parent.parent.parent - raise MissingEnv(f"BGPRANKING_HOME is missing. \ -Run the following command (assuming you run the code from the clonned repository):\ - export BGPRANKING_HOME='{guessed_home}'") - return Path(os.environ['BGPRANKING_HOME']) - - -def safe_create_dir(to_create: Path): - if to_create.exists() and not to_create.is_dir(): - raise CreateDirectoryException(f'The path {to_create} already exists and is not a directory') - os.makedirs(to_create, exist_ok=True) - - -def set_running(name: str): - r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - r.hset('running', name, 1) - - -def unset_running(name: str): - r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - r.hdel('running', name) - - -def is_running(): - r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - return r.hgetall('running') - - -def get_socket_path(name: str): - mapping = { - 'cache': Path('cache', 'cache.sock'), - 'storage': Path('storage', 'storage.sock'), - 'intake': Path('temp', 'intake.sock'), - 'prepare': Path('temp', 'prepare.sock'), - } - return str(get_homedir() / mapping[name]) - - -def load_general_config(): - general_config_file = get_config_path() / 'bgpranking.json' - if not general_config_file.exists(): - raise MissingConfigFile(f'The general configuration file ({general_config_file}) does not exists.') - with open(general_config_file) as f: - config = json.load(f) - return config, general_config_file - - -def get_ipasn(): - config, general_config_file = load_general_config() - if 'ipasnhistory_url' not in config: - raise MissingConfigEntry(f'"ipasnhistory_url" is missing in {general_config_file}.') - ipasn = IPASNHistory(config['ipasnhistory_url']) - if not ipasn.is_up: - raise ThirdPartyUnreachable(f"Unable to reach IPASNHistory on {config['ipasnhistory_url']}") - return ipasn - - -def sanity_check_ipasn(ipasn): - meta = ipasn.meta() - if 'error' in meta: - raise ThirdPartyUnreachable(f'IP ASN History has a problem: meta["error"]') - - v4_percent = meta['cached_dates']['caida']['v4']['percent'] - v6_percent = meta['cached_dates']['caida']['v6']['percent'] - if v4_percent < 90 or v6_percent < 90: # (this way it works if we only load 10 days) - # Try again later. - return False, f"IP ASN History is not ready: v4 {v4_percent}% / v6 {v6_percent}% loaded" - return True, f"IP ASN History is ready: v4 {v4_percent}% / v6 {v6_percent}% loaded" - - -def check_running(name: str): - socket_path = get_socket_path(name) - try: - r = StrictRedis(unix_socket_path=socket_path) - return r.ping() - except ConnectionError: - return False - - -def shutdown_requested(): - try: - r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - return r.exists('shutdown') - except ConnectionRefusedError: - return True - except ConnectionError: - return True - - -def long_sleep(sleep_in_sec: int, shutdown_check: int=10): - sleep_until = datetime.now() + timedelta(seconds=sleep_in_sec) - while sleep_until > datetime.now(): - time.sleep(shutdown_check) - if shutdown_requested(): - return False - return True diff --git a/bgpranking/modulesfetcher.py b/bgpranking/modulesfetcher.py deleted file mode 100644 index 4f6212c..0000000 --- a/bgpranking/modulesfetcher.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import aiohttp -from dateutil import parser -from datetime import datetime, date -from hashlib import sha512 # Faster than sha256 on 64b machines. -from pathlib import Path -import logging -from pid import PidFile, PidFileError -try: - import simplejson as json -except ImportError: - import json - -from .libs.helpers import safe_create_dir, set_running, unset_running - - -class Fetcher(): - - def __init__(self, config_file: Path, storage_directory: Path, - loglevel: int=logging.DEBUG): - '''Load `config_file`, and store the fetched data into `storage_directory` - Note: if the `config_file` does not provide a URL (the file is - gathered by some oter mean), the fetcher is automatically stoped.''' - with open(config_file, 'r') as f: - module_parameters = json.load(f) - self.vendor = module_parameters['vendor'] - self.listname = module_parameters['name'] - self.__init_logger(loglevel) - self.fetcher = True - if 'url' not in module_parameters: - self.logger.info('No URL to fetch, breaking.') - self.fetcher = False - return - self.url = module_parameters['url'] - self.logger.debug(f'Starting fetcher on {self.url}') - self.directory = storage_directory / self.vendor / self.listname - safe_create_dir(self.directory) - self.meta = self.directory / 'meta' - safe_create_dir(self.meta) - self.archive_dir = self.directory / 'archive' - safe_create_dir(self.archive_dir) - self.first_fetch = True - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - self.logger.setLevel(loglevel) - - async def __get_last_modified(self): - async with aiohttp.ClientSession() as session: - async with session.head(self.url) as r: - headers = r.headers - if 'Last-Modified' in headers: - return parser.parse(headers['Last-Modified']) - return None - - async def __newer(self): - '''Check if the file available for download is newed than the one - already downloaded by checking the `Last-Modified` header. - Note: return False if the file containing the last header content - is not existing, or the header doesn't have this key. - ''' - last_modified_path = self.meta / 'lastmodified' - if not last_modified_path.exists(): - # The file doesn't exists - if not self.first_fetch: - # The URL has no Last-Modified header, we cannot use it. - self.logger.debug('No Last-Modified header available') - return True - self.first_fetch = False - last_modified = await self.__get_last_modified() - if last_modified: - self.logger.debug('Last-Modified header available') - with last_modified_path.open('w') as f: - f.write(last_modified.isoformat()) - else: - self.logger.debug('No Last-Modified header available') - return True - with last_modified_path.open() as f: - file_content = f.read() - last_modified_file = parser.parse(file_content) - last_modified = await self.__get_last_modified() - if not last_modified: - # No more Last-Modified header Oo - self.logger.warning(f'{self.listname}: Last-Modified header was present, isn\'t anymore!') - last_modified_path.unlink() - return True - if last_modified > last_modified_file: - self.logger.info('Got a new file.') - with last_modified_path.open('w') as f: - f.write(last_modified.isoformat()) - return True - return False - - def __same_as_last(self, downloaded): - '''Figure out the last downloaded file, check if it is the same as the - newly downloaded one. Returns true if both files have been downloaded the - same day. - Note: we check the new and the archive directory because we may have backlog - and the newest file is always the first one we process - ''' - to_check = [] - to_check_new = sorted([f for f in self.directory.iterdir() if f.is_file()]) - if to_check_new: - # we have files waiting to be processed - self.logger.debug('{} file(s) are waiting to be processed'.format(len(to_check_new))) - to_check.append(to_check_new[-1]) - to_check_archive = sorted([f for f in self.archive_dir.iterdir() if f.is_file()]) - if to_check_archive: - # we have files already processed, in the archive - self.logger.debug('{} file(s) have been processed'.format(len(to_check_archive))) - to_check.append(to_check_archive[-1]) - if not to_check: - self.logger.debug('New list, no hisorical files') - # nothing has been downloaded ever, moving on - return False - dl_hash = sha512(downloaded) - for last_file in to_check: - with last_file.open('rb') as f: - last_hash = sha512(f.read()) - if (dl_hash.digest() == last_hash.digest() and - parser.parse(last_file.name.split('.')[0]).date() == date.today()): - self.logger.debug('Same file already downloaded today.') - return True - return False - - async def fetch_list(self): - '''Fetch & store the list''' - if not self.fetcher: - return - set_running(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - try: - with PidFile(f'{self.listname}.pid', piddir=self.meta): - if not await self.__newer(): - unset_running(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - return - async with aiohttp.ClientSession() as session: - async with session.get(self.url) as r: - content = await r.content.read() - if self.__same_as_last(content): - return - self.logger.info('Got a new file \o/') - with (self.directory / '{}.txt'.format(datetime.now().isoformat())).open('wb') as f: - f.write(content) - unset_running(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - except PidFileError: - self.logger.info('Fetcher already running') - finally: - unset_running(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') diff --git a/bgpranking/parser.py b/bgpranking/parser.py deleted file mode 100644 index 444a42b..0000000 --- a/bgpranking/parser.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from datetime import datetime -from pathlib import Path -import logging -try: - import simplejson as json -except ImportError: - import json -import re -from redis import StrictRedis -from uuid import uuid4 -from io import BytesIO -import importlib - -from typing import List -import types - -from .libs.helpers import safe_create_dir, set_running, unset_running, get_socket_path - - -class RawFilesParser(): - - def __init__(self, config_file: Path, storage_directory: Path, - loglevel: int=logging.DEBUG) -> None: - with open(config_file, 'r') as f: - module_parameters = json.load(f) - self.vendor = module_parameters['vendor'] - self.listname = module_parameters['name'] - if 'parser' in module_parameters: - self.parse_raw_file = types.MethodType(importlib.import_module(module_parameters['parser'], 'bgpranking').parse_raw_file, self) - self.source = f'{self.vendor}-{self.listname}' - self.directory = storage_directory / self.vendor / self.listname - safe_create_dir(self.directory) - self.unparsable_dir = self.directory / 'unparsable' - safe_create_dir(self.unparsable_dir) - self.__init_logger(loglevel) - self.redis_intake = StrictRedis(unix_socket_path=get_socket_path('intake'), db=0) - self.logger.debug(f'Starting intake on {self.source}') - - def __init_logger(self, loglevel) -> None: - self.logger = logging.getLogger(f'{self.__class__.__name__}-{self.vendor}-{self.listname}') - self.logger.setLevel(loglevel) - - @property - def files_to_parse(self) -> List[Path]: - return sorted([f for f in self.directory.iterdir() if f.is_file()], reverse=True) - - def extract_ipv4(self, bytestream: bytes) -> List[bytes]: - return re.findall(rb'[0-9]+(?:\.[0-9]+){3}', bytestream) - - def strip_leading_zeros(self, ips: List[bytes]) -> List[bytes]: - '''Helper to get rid of leading 0s in an IP list. - Only run it when needed, it is nasty and slow''' - return ['.'.join(str(int(part)) for part in ip.split(b'.')).encode() for ip in ips] - - def parse_raw_file(self, f: BytesIO) -> List[bytes]: - # If the list doesn't provide a time, fallback to current day, midnight - self.datetime = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - return self.extract_ipv4(f.getvalue()) - - def parse_raw_files(self) -> None: - set_running(f'{self.__class__.__name__}-{self.source}') - nb_unparsable_files = len([f for f in self.unparsable_dir.iterdir() if f.is_file()]) - if nb_unparsable_files: - self.logger.warning(f'Was unable to parse {nb_unparsable_files} files.') - try: - for filepath in self.files_to_parse: - self.logger.debug('Parsing {}, {} to go.'.format(filepath, len(self.files_to_parse) - 1)) - with open(filepath, 'rb') as f: - to_parse = BytesIO(f.read()) - p = self.redis_intake.pipeline() - for ip in self.parse_raw_file(to_parse): - if isinstance(ip, tuple): - ip, datetime = ip - else: - datetime = self.datetime - uuid = uuid4() - p.hmset(str(uuid), {'ip': ip, 'source': self.source, - 'datetime': datetime.isoformat()}) - p.sadd('intake', str(uuid)) - p.execute() - self._archive(filepath) - except Exception as e: - self.logger.exception("That didn't go well") - self._unparsable(filepath) - finally: - unset_running(f'{self.__class__.__name__}-{self.source}') - - def _archive(self, filepath: Path) -> None: - '''After processing, move file to the archive directory''' - filepath.rename(self.directory / 'archive' / filepath.name) - - def _unparsable(self, filepath: Path) -> None: - '''After processing, move file to the archive directory''' - filepath.rename(self.unparsable_dir / filepath.name) diff --git a/bgpranking/parsers/bambenekconsulting.py b/bgpranking/parsers/bambenekconsulting.py deleted file mode 100644 index 2227217..0000000 --- a/bgpranking/parsers/bambenekconsulting.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from dateutil.parser import parse -import re -from io import BytesIO - -from typing import List - - -def parse_raw_file(self, f: BytesIO) -> List[bytes]: - if re.findall(b'This feed is not generated for this family', f.getvalue()): - return [] - - self.datetime = parse(re.findall(b'## Feed generated at: (.*)\n', f.getvalue())[0]) - return self.extract_ipv4(f.getvalue()) diff --git a/bgpranking/ranking.py b/bgpranking/ranking.py deleted file mode 100644 index 9ebd450..0000000 --- a/bgpranking/ranking.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import logging -from redis import StrictRedis -from .libs.helpers import set_running, unset_running, get_socket_path, load_config_files, get_ipasn, sanity_check_ipasn -from datetime import datetime, date, timedelta -from ipaddress import ip_network -from pathlib import Path - - -class Ranking(): - - def __init__(self, config_dir: Path=None, loglevel: int=logging.DEBUG): - self.__init_logger(loglevel) - self.storage = StrictRedis(unix_socket_path=get_socket_path('storage'), decode_responses=True) - self.ranking = StrictRedis(unix_socket_path=get_socket_path('storage'), db=1, decode_responses=True) - self.ipasn = get_ipasn() - self.config_dir = config_dir - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - - def rank_a_day(self, day: str, config_files: dict): - asns_aggregation_key_v4 = f'{day}|asns|v4' - asns_aggregation_key_v6 = f'{day}|asns|v6' - to_delete = set([asns_aggregation_key_v4, asns_aggregation_key_v6]) - r_pipeline = self.ranking.pipeline() - cached_meta = {} - for source in self.storage.smembers(f'{day}|sources'): - self.logger.info(f'{day} - Ranking source: {source}') - source_aggregation_key_v4 = f'{day}|{source}|asns|v4' - source_aggregation_key_v6 = f'{day}|{source}|asns|v6' - to_delete.update([source_aggregation_key_v4, source_aggregation_key_v6]) - for asn in self.storage.smembers(f'{day}|{source}'): - prefixes_aggregation_key_v4 = f'{day}|{asn}|v4' - prefixes_aggregation_key_v6 = f'{day}|{asn}|v6' - to_delete.update([prefixes_aggregation_key_v4, prefixes_aggregation_key_v6]) - if asn == '0': - # Default ASN when no matches. Probably spoofed. - continue - self.logger.debug(f'{day} - Ranking source: {source} / ASN: {asn}') - asn_rank_v4 = 0.0 - asn_rank_v6 = 0.0 - for prefix in self.storage.smembers(f'{day}|{source}|{asn}'): - if prefix == 'None': - # This should not happen and requires a DB cleanup. - self.logger.critical(f'Fucked up prefix in "{day}|{source}|{asn}"') - continue - ips = set([ip_ts.split('|')[0] - for ip_ts in self.storage.smembers(f'{day}|{source}|{asn}|{prefix}')]) - py_prefix = ip_network(prefix) - prefix_rank = float(len(ips)) / py_prefix.num_addresses - r_pipeline.zadd(f'{day}|{source}|{asn}|v{py_prefix.version}|prefixes', {prefix: prefix_rank}) - if py_prefix.version == 4: - asn_rank_v4 += len(ips) * config_files[source]['impact'] - r_pipeline.zincrby(prefixes_aggregation_key_v4, prefix_rank * config_files[source]['impact'], prefix) - else: - asn_rank_v6 += len(ips) * config_files[source]['impact'] - r_pipeline.zincrby(prefixes_aggregation_key_v6, prefix_rank * config_files[source]['impact'], prefix) - if asn in cached_meta: - v4info = cached_meta[asn]['v4'] - v6info = cached_meta[asn]['v6'] - else: - v4info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v4', date=day) - v6info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v6', date=day) - cached_meta[asn] = {'v4': v4info, 'v6': v6info} - ipasnhistory_date_v4 = list(v4info['response'].keys())[0] - v4count = v4info['response'][ipasnhistory_date_v4][asn]['ipcount'] - ipasnhistory_date_v6 = list(v6info['response'].keys())[0] - v6count = v6info['response'][ipasnhistory_date_v6][asn]['ipcount'] - if v4count: - asn_rank_v4 /= float(v4count) - if asn_rank_v4: - r_pipeline.set(f'{day}|{source}|{asn}|v4', asn_rank_v4) - r_pipeline.zincrby(asns_aggregation_key_v4, asn_rank_v4, asn) - r_pipeline.zadd(source_aggregation_key_v4, {asn: asn_rank_v4}) - if v6count: - asn_rank_v6 /= float(v6count) - if asn_rank_v6: - r_pipeline.set(f'{day}|{source}|{asn}|v6', asn_rank_v6) - r_pipeline.zincrby(asns_aggregation_key_v6, asn_rank_v6, asn) - r_pipeline.zadd(source_aggregation_key_v6, {asn: asn_rank_v6}) - self.ranking.delete(*to_delete) - r_pipeline.execute() - - def compute(self): - config_files = load_config_files(self.config_dir) - ready, message = sanity_check_ipasn(self.ipasn) - if not ready: - # Try again later. - self.logger.warning(message) - return - self.logger.debug(message) - - self.logger.info('Start ranking') - set_running(self.__class__.__name__) - today = date.today() - now = datetime.now() - today12am = now.replace(hour=12, minute=0, second=0, microsecond=0) - if now < today12am: - # Compute yesterday and today's ranking (useful when we have lists generated only once a day) - self.rank_a_day((today - timedelta(days=1)).isoformat(), config_files) - self.rank_a_day(today.isoformat(), config_files) - unset_running(self.__class__.__name__) - self.logger.info('Ranking done.') diff --git a/bgpranking/sanitizer.py b/bgpranking/sanitizer.py deleted file mode 100644 index f4654f7..0000000 --- a/bgpranking/sanitizer.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from datetime import timezone -from dateutil import parser -import logging -from redis import StrictRedis -import ipaddress - -from .libs.helpers import shutdown_requested, set_running, unset_running, get_socket_path, get_ipasn, sanity_check_ipasn - - -class Sanitizer(): - - def __init__(self, loglevel: int=logging.DEBUG): - self.__init_logger(loglevel) - self.redis_intake = StrictRedis(unix_socket_path=get_socket_path('intake'), db=0, decode_responses=True) - self.redis_sanitized = StrictRedis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) - self.ipasn = get_ipasn() - self.logger.debug('Starting import') - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - - def sanitize(self): - ready, message = sanity_check_ipasn(self.ipasn) - if not ready: - # Try again later. - self.logger.warning(message) - return - self.logger.debug(message) - - set_running(self.__class__.__name__) - while True: - if shutdown_requested() or not self.ipasn.is_up: - break - uuids = self.redis_intake.spop('intake', 100) - if not uuids: - break - for_cache = [] - pipeline = self.redis_sanitized.pipeline(transaction=False) - for uuid in uuids: - data = self.redis_intake.hgetall(uuid) - try: - ip = ipaddress.ip_address(data['ip']) - if isinstance(ip, ipaddress.IPv6Address): - address_family = 'v6' - else: - address_family = 'v4' - except ValueError: - self.logger.info(f"Invalid IP address: {data['ip']}") - continue - except KeyError: - self.logger.info(f"Invalid entry {data}") - continue - - if not ip.is_global: - self.logger.info(f"The IP address {data['ip']} is not global") - continue - - datetime = parser.parse(data['datetime']) - if datetime.tzinfo: - # Make sure the datetime isn't TZ aware, and UTC. - datetime = datetime.astimezone(timezone.utc).replace(tzinfo=None) - - for_cache.append({'ip': str(ip), 'address_family': address_family, 'source': 'caida', - 'date': datetime.isoformat(), 'precision_delta': {'days': 3}}) - - # Add to temporay DB for further processing - pipeline.hmset(uuid, {'ip': str(ip), 'source': data['source'], 'address_family': address_family, - 'date': datetime.date().isoformat(), 'datetime': datetime.isoformat()}) - pipeline.sadd('to_insert', uuid) - pipeline.execute() - self.redis_intake.delete(*uuids) - - try: - # Just cache everything so the lookup scripts can do their thing. - self.ipasn.mass_cache(for_cache) - except Exception: - self.logger.exception('Mass cache in IPASN History failed, trying again later.') - # Rollback the spop - self.redis_intake.sadd('intake', *uuids) - break - unset_running(self.__class__.__name__) diff --git a/bgpranking/shadowserverfetcher.py b/bgpranking/shadowserverfetcher.py deleted file mode 100644 index 0d79319..0000000 --- a/bgpranking/shadowserverfetcher.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import aiohttp -import logging -from bs4 import BeautifulSoup -from dateutil.parser import parse -from typing import Tuple -from datetime import datetime, date -from pathlib import Path -from .libs.helpers import safe_create_dir, set_running, unset_running -try: - import simplejson as json -except ImportError: - import json - - -class ShadowServerFetcher(): - - def __init__(self, user, password, config_path_modules: Path, storage_directory: Path, - loglevel: int=logging.DEBUG) -> None: - self.__init_logger(loglevel) - self.storage_directory = storage_directory - self.config_path_modules = config_path_modules - self.user = user - self.password = password - self.index_page = 'https://dl.shadowserver.org/reports/index.php' - self.vendor = 'shadowserver' - self.known_list_types = ('blacklist', 'botnet', 'cc', 'cisco', 'cwsandbox', 'drone', - 'microsoft', 'scan', 'sinkhole6', 'sinkhole', 'outdated', - 'compromised', 'hp', 'darknet', 'ddos') - self.first_available_day = None - self.last_available_day = None - self.available_entries = {} - - def __init_logger(self, loglevel): - self.logger = logging.getLogger(f'{self.__class__.__name__}') - self.logger.setLevel(loglevel) - - async def __get_index(self): - auth_details = {'user': self.user, 'password': self.password, 'login': 'Login'} - async with aiohttp.ClientSession() as s: - self.logger.debug('Fetching the index.') - async with s.post(self.index_page, data=auth_details) as r: - return await r.text() - - async def __build_daily_dict(self): - html_index = await self.__get_index() - soup = BeautifulSoup(html_index, 'html.parser') - treeview = soup.find(id='treemenu1') - for y in treeview.select(':scope > li'): - year = y.contents[0] - for m in y.contents[1].select(':scope > li'): - month = m.contents[0] - for d in m.contents[1].select(':scope > li'): - day = d.contents[0] - date = parse(f'{year} {month} {day}').date() - self.available_entries[date.isoformat()] = [] - for a in d.contents[1].find_all('a', href=True): - if not self.first_available_day: - self.first_available_day = date - self.last_available_day = date - self.available_entries[date.isoformat()].append((a['href'], a.string)) - self.logger.debug('Dictionary created.') - - def __normalize_day(self, day: Tuple[str, date, datetime]=None) -> str: - if not day: - if not self.last_available_day: - raise Exception('Unable to figure out the last available day. You need to run build_daily_dict first') - day = self.last_available_day - else: - if isinstance(day, str): - day = parse(day).date() - elif isinstance(day, datetime): - day = day.date() - return day.isoformat() - - def __split_name(self, name): - type_content, country, list_type = name.split('-') - if '_' in type_content: - type_content, details_type = type_content.split('_', maxsplit=1) - if '_' in details_type: - details_type, sub = details_type.split('_', maxsplit=1) - return list_type, country, (type_content, details_type, sub) - return list_type, country, (type_content, details_type) - return list_type, country, (type_content) - - def __check_config(self, filename: str) -> Path: - self.logger.debug(f'Working on config for {filename}.') - config = {'vendor': 'shadowserver', 'parser': '.parsers.shadowserver'} - type_content, _, type_details = self.__split_name(filename) - prefix = type_content.split('.')[0] - - if isinstance(type_details, str): - main_type = type_details - config['name'] = '{}-{}'.format(prefix, type_details) - else: - main_type = type_details[0] - config['name'] = '{}-{}'.format(prefix, '_'.join(type_details)) - - if main_type not in self.known_list_types: - self.logger.warning(f'Unknown type: {main_type}. Please update the config creator script.') - return None - - if main_type == 'blacklist': - config['impact'] = 5 - elif main_type == 'botnet': - config['impact'] = 2 - elif main_type == 'cc': - config['impact'] = 5 - elif main_type == 'cisco': - config['impact'] = 3 - elif main_type == 'cwsandbox': - config['impact'] = 5 - elif main_type == 'drone': - config['impact'] = 2 - elif main_type == 'microsoft': - config['impact'] = 3 - elif main_type == 'scan': - config['impact'] = 1 - elif main_type == 'sinkhole6': - config['impact'] = 2 - elif main_type == 'sinkhole': - config['impact'] = 2 - else: - config['impact'] = 1 - - if not (self.config_path_modules / f"{config['vendor']}_{config['name']}.json").exists(): - self.logger.debug(f'Creating config file for {filename}.') - with open(self.config_path_modules / f"{config['vendor']}_{config['name']}.json", 'w') as f: - json.dump(config, f, indent=2) - else: - with open(self.config_path_modules / f"{config['vendor']}_{config['name']}.json", 'r') as f: - # Validate new config file with old - config_current = json.load(f) - if config_current != config: - self.logger.warning('The config file created by this script is different from the one on disk: \n{}\n{}'.format(json.dumps(config), json.dumps(config_current))) - # Init list directory - directory = self.storage_directory / config['vendor'] / config['name'] - safe_create_dir(directory) - meta = directory / 'meta' - safe_create_dir(meta) - archive_dir = directory / 'archive' - safe_create_dir(archive_dir) - self.logger.debug(f'Done with config for {filename}.') - return directory - - async def download_daily_entries(self, day: Tuple[str, date, datetime]=None): - set_running(f'{self.__class__.__name__}') - await self.__build_daily_dict() - for url, filename in self.available_entries[self.__normalize_day(day)]: - storage_dir = self.__check_config(filename) - if not storage_dir: - continue - # Check if the file we're trying to download has already been downloaded. Skip if True. - uuid = url.split('/')[-1] - if (storage_dir / 'meta' / 'last_download').exists(): - with open(storage_dir / 'meta' / 'last_download') as f: - last_download_uuid = f.read() - if last_download_uuid == uuid: - self.logger.debug(f'Already downloaded: {url}.') - continue - async with aiohttp.ClientSession() as s: - async with s.get(url) as r: - self.logger.info(f'Downloading {url}.') - content = await r.content.read() - with (storage_dir / '{}.txt'.format(datetime.now().isoformat())).open('wb') as f: - f.write(content) - with open(storage_dir / 'meta' / 'last_download', 'w') as f: - f.write(uuid) - unset_running(f'{self.__class__.__name__}') diff --git a/bgpranking/libs/statsripe.py b/bgpranking/statsripe.py similarity index 88% rename from bgpranking/libs/statsripe.py rename to bgpranking/statsripe.py index 10f8766..fecd43e 100644 --- a/bgpranking/libs/statsripe.py +++ b/bgpranking/statsripe.py @@ -1,18 +1,19 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import requests -from enum import Enum -from datetime import datetime, timedelta -from ipaddress import IPv4Address, IPv6Address, IPv4Network, IPv6Network -from typing import TypeVar -from .helpers import get_homedir, safe_create_dir -try: - import simplejson as json -except ImportError: - import json -from dateutil.parser import parse import copy +import json + +from datetime import datetime, timedelta +from enum import Enum +from ipaddress import IPv4Address, IPv6Address, IPv4Network, IPv6Network +from typing import TypeVar, Optional, Dict, Any + +import requests + +from dateutil.parser import parse + +from .helpers import get_homedir, safe_create_dir IPTypes = TypeVar('IPTypes', IPv4Address, IPv6Address, 'str') PrefixTypes = TypeVar('PrefixTypes', IPv4Network, IPv6Network, 'str') @@ -84,7 +85,7 @@ class StatsRIPE(): with open(c_path, 'w') as f: json.dump(response, f, indent=2) - def _get(self, method: str, parameters: dict) -> dict: + def _get(self, method: str, parameters: Dict) -> Dict: parameters['sourceapp'] = self.sourceapp cached = self._get_cache(method, parameters) if cached: @@ -100,8 +101,8 @@ class StatsRIPE(): return self._get('network-info', parameters) def prefix_overview(self, prefix: PrefixTypes, min_peers_seeing: int= 0, - max_related: int=0, query_time: TimeTypes=None) -> dict: - parameters = {'resource': prefix} + max_related: int=0, query_time: Optional[TimeTypes]=None) -> dict: + parameters: Dict[str, Any] = {'resource': prefix} if min_peers_seeing: parameters['min_peers_seeing'] = min_peers_seeing if max_related: @@ -110,8 +111,8 @@ class StatsRIPE(): parameters['query_time'] = self.__time_to_text(query_time) return self._get('prefix-overview', parameters) - def ris_asns(self, query_time: TimeTypes=None, list_asns: bool=False, asn_types: ASNsTypes=ASNsTypes.undefined): - parameters = {} + def ris_asns(self, query_time: Optional[TimeTypes]=None, list_asns: bool=False, asn_types: ASNsTypes=ASNsTypes.undefined): + parameters: Dict[str, Any] = {} if list_asns: parameters['list_asns'] = list_asns if asn_types: @@ -120,10 +121,10 @@ class StatsRIPE(): parameters['query_time'] = self.__time_to_text(query_time) return self._get('ris-asns', parameters) - def ris_prefixes(self, asn: int, query_time: TimeTypes=None, + def ris_prefixes(self, asn: int, query_time: Optional[TimeTypes]=None, list_prefixes: bool=False, types: ASNsTypes=ASNsTypes.undefined, af: AddressFamilies=AddressFamilies.undefined, noise: Noise=Noise.keep): - parameters = {'resource': str(asn)} + parameters: Dict[str, Any] = {'resource': str(asn)} if query_time: parameters['query_time'] = self.__time_to_text(query_time) if list_prefixes: @@ -136,8 +137,8 @@ class StatsRIPE(): parameters['noise'] = noise.value return self._get('ris-prefixes', parameters) - def country_asns(self, country: str, details: int=0, query_time: TimeTypes=None): - parameters = {'resource': country} + def country_asns(self, country: str, details: int=0, query_time: Optional[TimeTypes]=None): + parameters: Dict[str, Any] = {'resource': country} if details: parameters['lod'] = details if query_time: diff --git a/bgpranking/libs/__init__.py b/bin/__init__.py similarity index 100% rename from bgpranking/libs/__init__.py rename to bin/__init__.py diff --git a/bin/archiver.py b/bin/archiver.py index 87ca19a..8b8584b 100755 --- a/bin/archiver.py +++ b/bin/archiver.py @@ -1,42 +1,79 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from bgpranking.archive import DeepArchive +import json import logging +import zipfile + +from collections import defaultdict +from datetime import date +from logging import Logger from pathlib import Path -from bgpranking.libs.helpers import get_config_path, get_homedir -from pid import PidFile, PidFileError + +from dateutil import parser +from dateutil.relativedelta import relativedelta + +from bgpranking.default import safe_create_dir, AbstractManager +from bgpranking.helpers import get_modules, get_data_dir logger = logging.getLogger('Archiver') logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') -# NOTE: -# * Supposed to run once every ~2 months +class DeepArchive(): -class ModulesArchiver(): - - def __init__(self, config_dir: Path=None, storage_directory: Path=None, loglevel: int=logging.INFO): - if not config_dir: - config_dir = get_config_path() - if not storage_directory: - self.storage_directory = get_homedir() / 'rawdata' - modules_config = config_dir / 'modules' - modules_paths = [modulepath for modulepath in modules_config.glob('*.json')] - self.modules = [DeepArchive(path, self.storage_directory, loglevel) for path in modules_paths] + def __init__(self, config_file: Path, logger: Logger): + '''Archive everyfile older than 2 month.''' + with config_file.open() as f: + module_parameters = json.load(f) + self.logger = logger + self.vendor = module_parameters['vendor'] + self.listname = module_parameters['name'] + self.directory = get_data_dir() / self.vendor / self.listname / 'archive' + safe_create_dir(self.directory) + self.deep_archive = self.directory / 'deep' + safe_create_dir(self.deep_archive) def archive(self): + to_archive = defaultdict(list) + today = date.today() + last_day_to_keep = date(today.year, today.month, 1) - relativedelta(months=2) + for p in self.directory.iterdir(): + if not p.is_file(): + continue + filedate = parser.parse(p.name.split('.')[0]).date() + if filedate >= last_day_to_keep: + continue + to_archive['{}.zip'.format(filedate.strftime('%Y%m'))].append(p) + if to_archive: + self.logger.info('Found old files. Archiving: {}'.format(', '.join(to_archive.keys()))) + else: + self.logger.debug('No old files.') + for archivename, path_list in to_archive.items(): + with zipfile.ZipFile(self.deep_archive / archivename, 'x', zipfile.ZIP_DEFLATED) as z: + for f in path_list: + z.write(f, f.name) + # Delete all the files if the archiving worked out properly + [f.unlink() for f in path_list] + + +class ModulesArchiver(AbstractManager): + + def __init__(self, loglevel: int=logging.INFO): + super().__init__(loglevel) + self.script_name = 'archiver' + self.modules = [DeepArchive(path, self.logger) for path in get_modules()] + + def _to_run_forever(self): [module.archive() for module in self.modules] -if __name__ == '__main__': +def main(): archiver = ModulesArchiver() - try: - with PidFile(piddir=archiver.storage_directory): - logger.info('Archiving...') - archiver.archive() - logger.info('... done.') - except PidFileError: - logger.warning('Archiver already running, skip.') + archiver.run(sleep_in_sec=360000) + + +if __name__ == '__main__': + main() diff --git a/bin/asn_descriptions.py b/bin/asn_descriptions.py index feedecf..296e588 100755 --- a/bin/asn_descriptions.py +++ b/bin/asn_descriptions.py @@ -2,28 +2,80 @@ # -*- coding: utf-8 -*- import logging -from pathlib import Path +import re +import requests -from bgpranking.abstractmanager import AbstractManager -from bgpranking.asn_descriptions import ASNDescriptions -from bgpranking.libs.helpers import get_homedir +from dateutil.parser import parse +from redis import Redis + +from bgpranking.default import get_socket_path, safe_create_dir, AbstractManager, get_config +from bgpranking.helpers import get_data_dir logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') -class ASNDescriptionsManager(AbstractManager): +class ASNDescriptions(AbstractManager): - def __init__(self, storage_directory: Path=None, loglevel: int=logging.DEBUG): + def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) - if not storage_directory: - storage_directory = get_homedir() / 'rawdata' - self.asn_descr = ASNDescriptions(storage_directory, loglevel) + self.script_name = 'asn_descr' + self.asn_meta = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port'), db=2, decode_responses=True) + self.logger.debug('Starting ASN History') + self.directory = get_data_dir() / 'asn_descriptions' + safe_create_dir(self.directory) + self.archives = self.directory / 'archive' + safe_create_dir(self.archives) + self.url = 'https://www.cidr-report.org/as2.0/autnums.html' + + def __update_available(self): + r = requests.head(self.url) + print(r.headers) + current_last_modified = parse(r.headers['Last-Modified']) + if not self.asn_meta.exists('ans_description_last_update'): + return True + last_update = parse(self.asn_meta.get('ans_description_last_update')) # type: ignore + if last_update < current_last_modified: + return True + return False + + def load_descriptions(self): + if not self.__update_available(): + self.logger.debug('No new file to import.') + return + self.logger.info('Importing new ASN descriptions.') + r = requests.get(self.url) + last_modified = parse(r.headers['Last-Modified']).isoformat() + p = self.asn_meta.pipeline() + new_asn = 0 + new_description = 0 + for asn, descr in re.findall('as=AS(.*)&.* (.*)\n', r.text): + existing_descriptions = self.asn_meta.hgetall(f'{asn}|descriptions') + if not existing_descriptions: + self.logger.debug(f'New ASN: {asn} - {descr}') + p.hset(f'{asn}|descriptions', last_modified, descr) + new_asn += 1 + else: + last_descr = sorted(existing_descriptions.keys(), reverse=True)[0] + if descr != existing_descriptions[last_descr]: + self.logger.debug(f'New description for {asn}: {existing_descriptions[last_descr]} -> {descr}') + p.hset(f'{asn}|descriptions', last_modified, descr) + new_description += 1 + p.set('ans_description_last_update', last_modified) + p.execute() + self.logger.info(f'Done with import. New ASNs: {new_asn}, new descriptions: {new_description}') + if new_asn or new_description: + with open(self.archives / f'{last_modified}.html', 'w') as f: + f.write(r.text) def _to_run_forever(self): - self.asn_descr.load_descriptions() + self.load_descriptions() + + +def main(): + asnd_manager = ASNDescriptions() + asnd_manager.run(sleep_in_sec=3600) if __name__ == '__main__': - asnd_manager = ASNDescriptionsManager() - asnd_manager.run(sleep_in_sec=3600) + main() diff --git a/bin/dbinsert.py b/bin/dbinsert.py index fd101a5..a2759ef 100755 --- a/bin/dbinsert.py +++ b/bin/dbinsert.py @@ -2,8 +2,15 @@ # -*- coding: utf-8 -*- import logging -from bgpranking.abstractmanager import AbstractManager -from bgpranking.dbinsert import DatabaseInsert +import time + +from typing import List + +from redis import Redis + +from bgpranking.default import get_socket_path, AbstractManager, get_config +from bgpranking.helpers import get_ipasn, sanity_check_ipasn + logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') @@ -13,12 +20,107 @@ class DBInsertManager(AbstractManager): def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) - self.dbinsert = DatabaseInsert(loglevel) + self.script_name = 'db_insert' + self.kvrocks_storage = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port'), decode_responses=True) + self.redis_sanitized = Redis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) + self.ipasn = get_ipasn() + self.logger.debug('Starting import') def _to_run_forever(self): - self.dbinsert.insert() + ready, message = sanity_check_ipasn(self.ipasn) + if not ready: + # Try again later. + self.logger.warning(message) + return + self.logger.debug(message) + + while True: + if self.shutdown_requested(): + break + try: + if not self.ipasn.is_up: + break + except Exception: + self.logger.warning('Unable to query ipasnhistory') + time.sleep(10) + continue + uuids: List[str] = self.redis_sanitized.spop('to_insert', 100) # type: ignore + if not uuids: + break + p = self.redis_sanitized.pipeline(transaction=False) + [p.hgetall(uuid) for uuid in uuids] + sanitized_data = p.execute() + + for_query = [] + for i, uuid in enumerate(uuids): + data = sanitized_data[i] + if not data: + self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.') + continue + for_query.append({'ip': data['ip'], 'address_family': data['address_family'], 'source': 'caida', + 'date': data['datetime'], 'precision_delta': {'days': 3}}) + try: + responses = self.ipasn.mass_query(for_query) + except Exception: + self.logger.exception('Mass query in IPASN History failed, trying again later.') + # Rollback the spop + self.redis_sanitized.sadd('to_insert', *uuids) + time.sleep(10) + continue + retry = [] + done = [] + ardb_pipeline = self.kvrocks_storage.pipeline(transaction=False) + for i, uuid in enumerate(uuids): + data = sanitized_data[i] + if not data: + self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.') + continue + routing_info = responses['responses'][i]['response'] # our queries are on one single date, not a range + # Data gathered from IPASN History: + # * IP Block of the IP + # * AS number + if 'error' in routing_info: + self.logger.warning(f"Unable to find routing information for {data['ip']} - {data['datetime']}: {routing_info['error']}") + continue + # Single date query, getting from the object + datetime_routing = list(routing_info.keys())[0] + entry = routing_info[datetime_routing] + if not entry: + # routing info is missing, need to try again later. + retry.append(uuid) + continue + if 'asn' in entry and entry['asn'] is None: + self.logger.warning(f"Unable to find the AS number associated to {data['ip']} - {data['datetime']} (got None). This should not happen...") + continue + if 'prefix' in entry and entry['prefix'] is None: + self.logger.warning(f"Unable to find the prefix associated to {data['ip']} - {data['datetime']} (got None). This should not happen...") + continue + + # Format: |sources -> set([, ...]) + ardb_pipeline.sadd(f"{data['date']}|sources", data['source']) + + # Format: | -> set([, ...]) + ardb_pipeline.sadd(f"{data['date']}|{data['source']}", entry['asn']) + # Format: || -> set([, ...]) + ardb_pipeline.sadd(f"{data['date']}|{data['source']}|{entry['asn']}", entry['prefix']) + + # Format: ||| -> set([|, ...]) + ardb_pipeline.sadd(f"{data['date']}|{data['source']}|{entry['asn']}|{entry['prefix']}", + f"{data['ip']}|{data['datetime']}") + done.append(uuid) + ardb_pipeline.execute() + p = self.redis_sanitized.pipeline(transaction=False) + if done: + p.delete(*done) + if retry: + p.sadd('to_insert', *retry) + p.execute() + + +def main(): + dbinsert = DBInsertManager() + dbinsert.run(sleep_in_sec=120) if __name__ == '__main__': - dbinsert = DBInsertManager() - dbinsert.run(sleep_in_sec=120) + main() diff --git a/bin/fetcher.py b/bin/fetcher.py index d97e385..6816f88 100755 --- a/bin/fetcher.py +++ b/bin/fetcher.py @@ -1,50 +1,177 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import logging +import json import asyncio -from pathlib import Path -import aiohttp +import logging + +from datetime import datetime, date +from hashlib import sha512 # Faster than sha256 on 64b machines. +from logging import Logger +from pathlib import Path + +import aiohttp +from dateutil import parser +from pid import PidFile, PidFileError # type: ignore + +from bgpranking.default import AbstractManager, safe_create_dir +from bgpranking.helpers import get_modules, get_data_dir, get_modules_dir -from bgpranking.abstractmanager import AbstractManager -from bgpranking.modulesfetcher import Fetcher -from bgpranking.libs.helpers import get_config_path, get_homedir logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') +class Fetcher(): + + def __init__(self, config_file: Path, logger: Logger): + '''Load `config_file`, and store the fetched data into `storage_directory` + Note: if the `config_file` does not provide a URL (the file is + gathered by some oter mean), the fetcher is automatically stoped.''' + with open(config_file, 'r') as f: + module_parameters = json.load(f) + self.vendor = module_parameters['vendor'] + self.listname = module_parameters['name'] + self.logger = logger + self.fetcher = True + if 'url' not in module_parameters: + self.logger.info(f'{self.vendor}-{self.listname}: No URL to fetch, breaking.') + self.fetcher = False + return + self.url = module_parameters['url'] + self.logger.debug(f'{self.vendor}-{self.listname}: Starting fetcher on {self.url}') + self.directory = get_data_dir() / self.vendor / self.listname + safe_create_dir(self.directory) + self.meta = self.directory / 'meta' + safe_create_dir(self.meta) + self.archive_dir = self.directory / 'archive' + safe_create_dir(self.archive_dir) + self.first_fetch = True + + async def __get_last_modified(self): + async with aiohttp.ClientSession() as session: + async with session.head(self.url) as r: + headers = r.headers + if 'Last-Modified' in headers: + return parser.parse(headers['Last-Modified']) + return None + + async def __newer(self): + '''Check if the file available for download is newed than the one + already downloaded by checking the `Last-Modified` header. + Note: return False if the file containing the last header content + is not existing, or the header doesn't have this key. + ''' + last_modified_path = self.meta / 'lastmodified' + if not last_modified_path.exists(): + # The file doesn't exists + if not self.first_fetch: + # The URL has no Last-Modified header, we cannot use it. + self.logger.debug(f'{self.vendor}-{self.listname}: No Last-Modified header available') + return True + self.first_fetch = False + last_modified = await self.__get_last_modified() + if last_modified: + self.logger.debug(f'{self.vendor}-{self.listname}: Last-Modified header available') + with last_modified_path.open('w') as f: + f.write(last_modified.isoformat()) + else: + self.logger.debug(f'{self.vendor}-{self.listname}: No Last-Modified header available') + return True + with last_modified_path.open() as f: + file_content = f.read() + last_modified_file = parser.parse(file_content) + last_modified = await self.__get_last_modified() + if not last_modified: + # No more Last-Modified header Oo + self.logger.warning(f'{self.vendor}-{self.listname}: Last-Modified header was present, isn\'t anymore!') + last_modified_path.unlink() + return True + if last_modified > last_modified_file: + self.logger.info(f'{self.vendor}-{self.listname}: Got a new file.') + with last_modified_path.open('w') as f: + f.write(last_modified.isoformat()) + return True + return False + + def __same_as_last(self, downloaded): + '''Figure out the last downloaded file, check if it is the same as the + newly downloaded one. Returns true if both files have been downloaded the + same day. + Note: we check the new and the archive directory because we may have backlog + and the newest file is always the first one we process + ''' + to_check = [] + to_check_new = sorted([f for f in self.directory.iterdir() if f.is_file()]) + if to_check_new: + # we have files waiting to be processed + self.logger.debug(f'{self.vendor}-{self.listname}: {len(to_check_new)} file(s) are waiting to be processed') + to_check.append(to_check_new[-1]) + to_check_archive = sorted([f for f in self.archive_dir.iterdir() if f.is_file()]) + if to_check_archive: + # we have files already processed, in the archive + self.logger.debug(f'{self.vendor}-{self.listname}: {len(to_check_archive)} file(s) have been processed') + to_check.append(to_check_archive[-1]) + if not to_check: + self.logger.debug(f'{self.vendor}-{self.listname}: New list, no hisorical files') + # nothing has been downloaded ever, moving on + return False + dl_hash = sha512(downloaded) + for last_file in to_check: + with last_file.open('rb') as f: + last_hash = sha512(f.read()) + if (dl_hash.digest() == last_hash.digest() + and parser.parse(last_file.name.split('.')[0]).date() == date.today()): + self.logger.debug(f'{self.vendor}-{self.listname}: Same file already downloaded today.') + return True + return False + + async def fetch_list(self): + '''Fetch & store the list''' + if not self.fetcher: + return + try: + with PidFile(f'{self.listname}.pid', piddir=self.meta): + if not await self.__newer(): + return + async with aiohttp.ClientSession() as session: + async with session.get(self.url) as r: + content = await r.content.read() + if self.__same_as_last(content): + return + self.logger.info(f'{self.vendor}-{self.listname}: Got a new file!') + with (self.directory / '{}.txt'.format(datetime.now().isoformat())).open('wb') as f: + f.write(content) + except PidFileError: + self.logger.info(f'{self.vendor}-{self.listname}: Fetcher already running') + + class ModulesManager(AbstractManager): - def __init__(self, config_dir: Path=None, storage_directory: Path=None, loglevel: int=logging.DEBUG): + def __init__(self, loglevel: int=logging.DEBUG): super().__init__(loglevel) - if not config_dir: - config_dir = get_config_path() - if not storage_directory: - self.storage_directory = get_homedir() / 'rawdata' - self.modules_config = config_dir / 'modules' - self.modules_paths = [modulepath for modulepath in self.modules_config.glob('*.json')] - self.modules = [Fetcher(path, self.storage_directory, loglevel) for path in self.modules_paths] + self.script_name = 'modules_manager' + self.modules_paths = get_modules() + self.modules = [Fetcher(path, self.logger) for path in self.modules_paths] - def _to_run_forever(self): + async def _to_run_forever_async(self): # Check if there are new config files - new_modules_paths = [modulepath for modulepath in self.modules_config.glob('*.json') if modulepath not in self.modules_paths] - self.modules += [Fetcher(path, self.storage_directory, self.loglevel) for path in new_modules_paths] + new_modules_paths = [modulepath for modulepath in get_modules_dir().glob('*.json') if modulepath not in self.modules_paths] + self.modules += [Fetcher(path, self.logger) for path in new_modules_paths] self.modules_paths += new_modules_paths if self.modules: - loop = asyncio.get_event_loop() - try: - loop.run_until_complete(asyncio.gather( - *[module.fetch_list() for module in self.modules if module.fetcher], - return_exceptions=True) - ) - except aiohttp.client_exceptions.ClientConnectorError as e: - self.logger.critical(f'Exception while fetching lists: {e}') + for module in self.modules: + if module.fetcher: + await module.fetch_list() else: self.logger.info('No config files were found so there are no fetchers running yet. Will try again later.') +def main(): + m = ModulesManager() + asyncio.run(m.run_async(sleep_in_sec=3600)) + + if __name__ == '__main__': - modules_manager = ModulesManager() - modules_manager.run(sleep_in_sec=3600) + main() diff --git a/bin/manual_ranking.py b/bin/manual_ranking.py index 7a132ac..a7f82b7 100644 --- a/bin/manual_ranking.py +++ b/bin/manual_ranking.py @@ -6,8 +6,8 @@ import logging from dateutil.parser import parse from datetime import timedelta -from bgpranking.libs.helpers import load_config_files -from bgpranking.ranking import Ranking +from bgpranking.helpers import load_all_modules_configs +from .ranking import Ranking logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%I:%M:%S') @@ -21,13 +21,13 @@ if __name__ == '__main__': args = parser.parse_args() ranking = Ranking(loglevel=logging.DEBUG) - config_files = load_config_files() + config_files = load_all_modules_configs() if args.day: day = parse(args.day).date().isoformat() - ranking.rank_a_day(day, config_files) + ranking.rank_a_day(day) else: current = parse(args.interval[1]).date() stop_date = parse(args.interval[0]).date() while current >= stop_date: - ranking.rank_a_day(current.isoformat(), config_files) + ranking.rank_a_day(current.isoformat()) current -= timedelta(days=1) diff --git a/bin/monitor.py b/bin/monitor.py deleted file mode 100755 index 2c996d7..0000000 --- a/bin/monitor.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -from bgpranking.monitor import Monitor -import logging - -logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', - level=logging.INFO, datefmt='%I:%M:%S') - - -class MonitorManager(): - - def __init__(self, loglevel: int=logging.INFO): - self.monitor = Monitor() - - def get_values(self): - return self.monitor.get_values() - - -if __name__ == '__main__': - m = MonitorManager() - print(m.get_values()) diff --git a/bin/parser.py b/bin/parser.py index 981cacf..438229c 100755 --- a/bin/parser.py +++ b/bin/parser.py @@ -1,41 +1,124 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import importlib +import json import logging -from pathlib import Path +import re +import types + +from datetime import datetime +from io import BytesIO +from logging import Logger +from pathlib import Path +from typing import List, Union, Tuple +from uuid import uuid4 + +from redis import Redis + +from bgpranking.default import AbstractManager, safe_create_dir, get_socket_path +from bgpranking.helpers import get_modules, get_data_dir, get_modules_dir -from bgpranking.abstractmanager import AbstractManager -from bgpranking.parser import RawFilesParser -from bgpranking.libs.helpers import get_config_path, get_homedir logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') +class RawFilesParser(): + + def __init__(self, config_file: Path, logger: Logger) -> None: + self.logger = logger + with open(config_file, 'r') as f: + module_parameters = json.load(f) + self.vendor = module_parameters['vendor'] + self.listname = module_parameters['name'] + if 'parser' in module_parameters: + self.parse_raw_file = types.MethodType(importlib.import_module(module_parameters['parser'], 'bgpranking').parse_raw_file, self) # type: ignore + self.source = f'{self.vendor}-{self.listname}' + self.directory = get_data_dir() / self.vendor / self.listname + safe_create_dir(self.directory) + self.unparsable_dir = self.directory / 'unparsable' + safe_create_dir(self.unparsable_dir) + self.redis_intake = Redis(unix_socket_path=get_socket_path('intake'), db=0) + self.logger.debug(f'{self.source}: Starting intake.') + + @property + def files_to_parse(self) -> List[Path]: + return sorted([f for f in self.directory.iterdir() if f.is_file()], reverse=True) + + def extract_ipv4(self, bytestream: bytes) -> List[Union[bytes, Tuple[bytes, datetime]]]: + return re.findall(rb'[0-9]+(?:\.[0-9]+){3}', bytestream) + + def strip_leading_zeros(self, ips: List[bytes]) -> List[bytes]: + '''Helper to get rid of leading 0s in an IP list. + Only run it when needed, it is nasty and slow''' + return ['.'.join(str(int(part)) for part in ip.split(b'.')).encode() for ip in ips] + + def parse_raw_file(self, f: BytesIO) -> List[Union[bytes, Tuple[bytes, datetime]]]: + # If the list doesn't provide a time, fallback to current day, midnight + self.datetime = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) + return self.extract_ipv4(f.getvalue()) + + def parse_raw_files(self) -> None: + nb_unparsable_files = len([f for f in self.unparsable_dir.iterdir() if f.is_file()]) + if nb_unparsable_files: + self.logger.warning(f'{self.source}: Was unable to parse {nb_unparsable_files} files.') + try: + for filepath in self.files_to_parse: + self.logger.debug(f'{self.source}: Parsing {filepath}, {len(self.files_to_parse) - 1} to go.') + with open(filepath, 'rb') as f: + to_parse = BytesIO(f.read()) + p = self.redis_intake.pipeline() + for line in self.parse_raw_file(to_parse): + if isinstance(line, tuple): + ip, datetime = line + else: + ip = line + datetime = self.datetime + uuid = uuid4() + p.hmset(str(uuid), {'ip': ip, 'source': self.source, + 'datetime': datetime.isoformat()}) + p.sadd('intake', str(uuid)) + p.execute() + self._archive(filepath) + except Exception as e: + self.logger.warning(f"{self.source}: That didn't go well: {e}") + self._unparsable(filepath) + + def _archive(self, filepath: Path) -> None: + '''After processing, move file to the archive directory''' + filepath.rename(self.directory / 'archive' / filepath.name) + + def _unparsable(self, filepath: Path) -> None: + '''After processing, move file to the archive directory''' + filepath.rename(self.unparsable_dir / filepath.name) + + class ParserManager(AbstractManager): - def __init__(self, config_dir: Path=None, storage_directory: Path=None, loglevel: int=logging.DEBUG): + def __init__(self, loglevel: int=logging.DEBUG): super().__init__(loglevel) - if not config_dir: - config_dir = get_config_path() - if not storage_directory: - self.storage_directory = get_homedir() / 'rawdata' - self.modules_config = config_dir / 'modules' - self.modules_paths = [modulepath for modulepath in self.modules_config.glob('*.json')] - self.modules = [RawFilesParser(path, self.storage_directory, loglevel) for path in self.modules_paths] + self.script_name = 'parser' + self.modules_paths = get_modules() + self.modules = [RawFilesParser(path, self.logger) for path in self.modules_paths] def _to_run_forever(self): # Check if there are new config files - new_modules_paths = [modulepath for modulepath in self.modules_config.glob('*.json') if modulepath not in self.modules_paths] - self.modules += [RawFilesParser(path, self.storage_directory, self.loglevel) for path in new_modules_paths] + new_modules_paths = [modulepath for modulepath in get_modules_dir().glob('*.json') if modulepath not in self.modules_paths] + self.modules += [RawFilesParser(path, self.logger) for path in new_modules_paths] self.modules_paths += new_modules_paths if self.modules: - [module.parse_raw_files() for module in self.modules] + for module in self.modules: + module.parse_raw_files() else: self.logger.warning('No config files were found so there are no parsers running yet. Will try again later.') -if __name__ == '__main__': +def main(): parser_manager = ParserManager() parser_manager.run(sleep_in_sec=120) + + +if __name__ == '__main__': + main() diff --git a/bin/ranking.py b/bin/ranking.py index e783232..ccf2dbd 100755 --- a/bin/ranking.py +++ b/bin/ranking.py @@ -2,24 +2,131 @@ # -*- coding: utf-8 -*- import logging -from bgpranking.abstractmanager import AbstractManager -from bgpranking.ranking import Ranking -from pathlib import Path + +from datetime import datetime, date, timedelta +from ipaddress import ip_network +from typing import Dict, Any + +from redis import Redis +import requests + +from bgpranking.default import AbstractManager, get_config +from bgpranking.helpers import get_ipasn, sanity_check_ipasn, load_all_modules_configs logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') -class RankingManager(AbstractManager): +class Ranking(AbstractManager): - def __init__(self, config_dir: Path=None, loglevel: int=logging.INFO): + def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) - self.ranking = Ranking(config_dir, loglevel) + self.script_name = 'ranking' + self.storage = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port'), decode_responses=True) + self.ranking = Redis(get_config('generic', 'ranking_db_hostname'), get_config('generic', 'ranking_db_port'), decode_responses=True) + self.ipasn = get_ipasn() + + def rank_a_day(self, day: str): + asns_aggregation_key_v4 = f'{day}|asns|v4' + asns_aggregation_key_v6 = f'{day}|asns|v6' + to_delete = set([asns_aggregation_key_v4, asns_aggregation_key_v6]) + r_pipeline = self.ranking.pipeline() + cached_meta: Dict[str, Dict[str, Any]] = {} + config_files = load_all_modules_configs() + for source in self.storage.smembers(f'{day}|sources'): + self.logger.info(f'{day} - Ranking source: {source}') + source_aggregation_key_v4 = f'{day}|{source}|asns|v4' + source_aggregation_key_v6 = f'{day}|{source}|asns|v6' + to_delete.update([source_aggregation_key_v4, source_aggregation_key_v6]) + for asn in self.storage.smembers(f'{day}|{source}'): + prefixes_aggregation_key_v4 = f'{day}|{asn}|v4' + prefixes_aggregation_key_v6 = f'{day}|{asn}|v6' + to_delete.update([prefixes_aggregation_key_v4, prefixes_aggregation_key_v6]) + if asn == '0': + # Default ASN when no matches. Probably spoofed. + continue + self.logger.debug(f'{day} - Ranking source: {source} / ASN: {asn}') + asn_rank_v4 = 0.0 + asn_rank_v6 = 0.0 + for prefix in self.storage.smembers(f'{day}|{source}|{asn}'): + if prefix == 'None': + # This should not happen and requires a DB cleanup. + self.logger.critical(f'Fucked up prefix in "{day}|{source}|{asn}"') + continue + ips = set([ip_ts.split('|')[0] + for ip_ts in self.storage.smembers(f'{day}|{source}|{asn}|{prefix}')]) + py_prefix = ip_network(prefix) + prefix_rank = float(len(ips)) / py_prefix.num_addresses + r_pipeline.zadd(f'{day}|{source}|{asn}|v{py_prefix.version}|prefixes', {prefix: prefix_rank}) + if py_prefix.version == 4: + asn_rank_v4 += len(ips) * config_files[source]['impact'] + r_pipeline.zincrby(prefixes_aggregation_key_v4, prefix_rank * config_files[source]['impact'], prefix) + else: + asn_rank_v6 += len(ips) * config_files[source]['impact'] + r_pipeline.zincrby(prefixes_aggregation_key_v6, prefix_rank * config_files[source]['impact'], prefix) + if asn in cached_meta: + v4info = cached_meta[asn]['v4'] + v6info = cached_meta[asn]['v6'] + else: + retry = 3 + while retry: + try: + v4info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v4', date=day) + v6info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v6', date=day) + break + except requests.exceptions.ConnectionError: + # Sometimes, ipasnhistory is unreachable try again a few times + retry -= 1 + else: + # if it keeps failing, the ASN will be ranked on next run. + continue + + cached_meta[asn] = {'v4': v4info, 'v6': v6info} + ipasnhistory_date_v4 = list(v4info['response'].keys())[0] + v4count = v4info['response'][ipasnhistory_date_v4][asn]['ipcount'] + ipasnhistory_date_v6 = list(v6info['response'].keys())[0] + v6count = v6info['response'][ipasnhistory_date_v6][asn]['ipcount'] + if v4count: + asn_rank_v4 /= float(v4count) + if asn_rank_v4: + r_pipeline.set(f'{day}|{source}|{asn}|v4', asn_rank_v4) + r_pipeline.zincrby(asns_aggregation_key_v4, asn_rank_v4, asn) + r_pipeline.zadd(source_aggregation_key_v4, {asn: asn_rank_v4}) + if v6count: + asn_rank_v6 /= float(v6count) + if asn_rank_v6: + r_pipeline.set(f'{day}|{source}|{asn}|v6', asn_rank_v6) + r_pipeline.zincrby(asns_aggregation_key_v6, asn_rank_v6, asn) + r_pipeline.zadd(source_aggregation_key_v6, {asn: asn_rank_v6}) + self.ranking.delete(*to_delete) + r_pipeline.execute() + + def compute(self): + ready, message = sanity_check_ipasn(self.ipasn) + if not ready: + # Try again later. + self.logger.warning(message) + return + self.logger.debug(message) + + self.logger.info('Start ranking') + today = date.today() + now = datetime.now() + today12am = now.replace(hour=12, minute=0, second=0, microsecond=0) + if now < today12am: + # Compute yesterday and today's ranking (useful when we have lists generated only once a day) + self.rank_a_day((today - timedelta(days=1)).isoformat()) + self.rank_a_day(today.isoformat()) + self.logger.info('Ranking done.') def _to_run_forever(self): - self.ranking.compute() + self.compute() + + +def main(): + ranking = Ranking() + ranking.run(sleep_in_sec=3600) if __name__ == '__main__': - ranking = RankingManager() - ranking.run(sleep_in_sec=3600) + main() diff --git a/bin/run_backend.py b/bin/run_backend.py index 5e3169c..e4f46b1 100755 --- a/bin/run_backend.py +++ b/bin/run_backend.py @@ -1,79 +1,120 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from bgpranking.libs.helpers import get_homedir, check_running -from subprocess import Popen +import argparse +import os import time from pathlib import Path +from subprocess import Popen +from typing import Optional, Dict -import argparse +from redis import Redis +from redis.exceptions import ConnectionError + +from bgpranking.default import get_homedir, get_socket_path, get_config -def launch_cache(storage_directory: Path=None): +def check_running(name: str) -> bool: + if name == "storage": + r = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port')) + elif name == "ranking": + r = Redis(get_config('generic', 'ranking_db_hostname'), get_config('generic', 'ranking_db_port')) + else: + socket_path = get_socket_path(name) + if not os.path.exists(socket_path): + return False + r = Redis(unix_socket_path=socket_path) + try: + return True if r.ping() else False + except ConnectionError: + return False + + +def launch_cache(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() if not check_running('cache'): Popen(["./run_redis.sh"], cwd=(storage_directory / 'cache')) -def shutdown_cache(storage_directory: Path=None): +def shutdown_cache(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() - Popen(["./shutdown_redis.sh"], cwd=(storage_directory / 'cache')) + r = Redis(unix_socket_path=get_socket_path('cache')) + r.shutdown(save=True) + print('Redis cache database shutdown.') -def launch_temp(storage_directory: Path=None): +def launch_temp(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() if not check_running('intake') and not check_running('prepare'): Popen(["./run_redis.sh"], cwd=(storage_directory / 'temp')) -def shutdown_temp(storage_directory: Path=None): +def shutdown_temp(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() - Popen(["./shutdown_redis.sh"], cwd=(storage_directory / 'temp')) + r = Redis(unix_socket_path=get_socket_path('intake')) + r.shutdown(save=True) + print('Redis intake database shutdown.') + r = Redis(unix_socket_path=get_socket_path('prepare')) + r.shutdown(save=True) + print('Redis prepare database shutdown.') -def launch_storage(storage_directory: Path=None): +def launch_storage(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() if not check_running('storage'): - Popen(["./run_ardb.sh"], cwd=(storage_directory / 'storage')) + Popen(["./run_kvrocks.sh"], cwd=(storage_directory / 'storage')) -def shutdown_storage(storage_directory: Path=None): +def shutdown_storage(storage_directory: Optional[Path]=None): + redis = Redis(get_config('generic', 'storage_db_hostname'), get_config('generic', 'storage_db_port')) + redis.shutdown() + + +def launch_ranking(storage_directory: Optional[Path]=None): if not storage_directory: storage_directory = get_homedir() - Popen(["./shutdown_ardb.sh"], cwd=(storage_directory / 'storage')) + if not check_running('ranking'): + Popen(["./run_kvrocks.sh"], cwd=(storage_directory / 'ranking')) + + +def shutdown_ranking(storage_directory: Optional[Path]=None): + redis = Redis(get_config('generic', 'ranking_db_hostname'), get_config('generic', 'ranking_db_port')) + redis.shutdown() def launch_all(): launch_cache() launch_temp() launch_storage() + launch_ranking() -def check_all(stop=False): - backends = [['cache', False], ['storage', False], - ['intake', False], ['prepare', False]] +def check_all(stop: bool=False): + backends: Dict[str, bool] = {'cache': False, 'storage': False, 'ranking': False, + 'intake': False, 'prepare': False} while True: - for b in backends: + for db_name in backends.keys(): + print(backends[db_name]) try: - b[1] = check_running(b[0]) + backends[db_name] = check_running(db_name) except Exception: - b[1] = False + backends[db_name] = False if stop: - if not any(b[1] for b in backends): + if not any(running for running in backends.values()): break else: - if all(b[1] for b in backends): + if all(running for running in backends.values()): break - for b in backends: - if not stop and not b[1]: - print(f"Waiting on {b[0]}") - if stop and b[1]: - print(f"Waiting on {b[0]}") + for db_name, running in backends.items(): + if not stop and not running: + print(f"Waiting on {db_name} to start") + if stop and running: + print(f"Waiting on {db_name} to stop") time.sleep(1) @@ -81,9 +122,10 @@ def stop_all(): shutdown_cache() shutdown_temp() shutdown_storage() + shutdown_ranking() -if __name__ == '__main__': +def main(): parser = argparse.ArgumentParser(description='Manage backend DBs.') parser.add_argument("--start", action='store_true', default=False, help="Start all") parser.add_argument("--stop", action='store_true', default=False, help="Stop all") @@ -96,3 +138,7 @@ if __name__ == '__main__': stop_all() if not args.stop and args.status: check_all() + + +if __name__ == '__main__': + main() diff --git a/bin/sanitizer.py b/bin/sanitizer.py index ba0e46e..debce50 100755 --- a/bin/sanitizer.py +++ b/bin/sanitizer.py @@ -1,25 +1,109 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import ipaddress import logging +import time -from bgpranking.abstractmanager import AbstractManager -from bgpranking.sanitizer import Sanitizer +from datetime import timezone +from typing import Optional, List + +from dateutil import parser +from redis import Redis +import requests + +from bgpranking.default import AbstractManager, get_socket_path +from bgpranking.helpers import get_ipasn, sanity_check_ipasn logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.WARNING, datefmt='%I:%M:%S') -class SanitizerManager(AbstractManager): +class Sanitizer(AbstractManager): - def __init__(self, loglevel: int=logging.WARNING): + def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) - self.sanitizer = Sanitizer(loglevel) + self.script_name = 'sanitizer' + self.redis_intake = Redis(unix_socket_path=get_socket_path('intake'), db=0, decode_responses=True) + self.redis_sanitized = Redis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) + self.ipasn = get_ipasn() + self.logger.debug('Starting import') + + def sanitize(self): + ready, message = sanity_check_ipasn(self.ipasn) + if not ready: + # Try again later. + self.logger.warning(message) + return + self.logger.debug(message) + + while True: + try: + if self.shutdown_requested() or not self.ipasn.is_up: + break + except requests.exceptions.ConnectionError: + # Temporary issue with ipasnhistory + self.logger.info('Temporary issue with ipasnhistory, trying again later.') + time.sleep(10) + continue + uuids: Optional[List[str]] = self.redis_intake.spop('intake', 100) # type: ignore + if not uuids: + break + for_cache = [] + pipeline = self.redis_sanitized.pipeline(transaction=False) + for uuid in uuids: + data = self.redis_intake.hgetall(uuid) + if not data: + continue + try: + ip = ipaddress.ip_address(data['ip']) + if isinstance(ip, ipaddress.IPv6Address): + address_family = 'v6' + else: + address_family = 'v4' + except ValueError: + self.logger.info(f"Invalid IP address: {data['ip']}") + continue + except KeyError: + self.logger.info(f"Invalid entry {data}") + continue + + if not ip.is_global: + self.logger.info(f"The IP address {data['ip']} is not global") + continue + + datetime = parser.parse(data['datetime']) + if datetime.tzinfo: + # Make sure the datetime isn't TZ aware, and UTC. + datetime = datetime.astimezone(timezone.utc).replace(tzinfo=None) + + for_cache.append({'ip': str(ip), 'address_family': address_family, 'source': 'caida', + 'date': datetime.isoformat(), 'precision_delta': {'days': 3}}) + + # Add to temporay DB for further processing + pipeline.hmset(uuid, {'ip': str(ip), 'source': data['source'], 'address_family': address_family, + 'date': datetime.date().isoformat(), 'datetime': datetime.isoformat()}) + pipeline.sadd('to_insert', uuid) + pipeline.execute() + self.redis_intake.delete(*uuids) + + try: + # Just cache everything so the lookup scripts can do their thing. + self.ipasn.mass_cache(for_cache) + except Exception: + self.logger.info('Mass cache in IPASN History failed, trying again later.') + # Rollback the spop + self.redis_intake.sadd('intake', *uuids) + break def _to_run_forever(self): - self.sanitizer.sanitize() + self.sanitize() + + +def main(): + sanitizer = Sanitizer() + sanitizer.run(sleep_in_sec=120) if __name__ == '__main__': - sanitizer = SanitizerManager() - sanitizer.run(sleep_in_sec=120) + main() diff --git a/bin/shutdown.py b/bin/shutdown.py index d596135..93bbc22 100755 --- a/bin/shutdown.py +++ b/bin/shutdown.py @@ -1,16 +1,25 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from bgpranking.libs.helpers import is_running, get_socket_path import time -from redis import StrictRedis -if __name__ == '__main__': - r = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) - r.set('shutdown', 1) +from bgpranking.default import AbstractManager + + +def main(): + AbstractManager.force_shutdown() + time.sleep(5) while True: - running = is_running() - print(running) + try: + running = AbstractManager.is_running() + except FileNotFoundError: + print('Redis is already down.') + break if not running: break - time.sleep(10) + print(running) + time.sleep(5) + + +if __name__ == '__main__': + main() diff --git a/bin/ssfetcher.py b/bin/ssfetcher.py index c075d52..b260bed 100755 --- a/bin/ssfetcher.py +++ b/bin/ssfetcher.py @@ -2,49 +2,202 @@ # -*- coding: utf-8 -*- import logging -try: - import simplejson as json -except ImportError: - import json +from logging import Logger +import json import asyncio -from pathlib import Path -import aiohttp -from bgpranking.abstractmanager import AbstractManager -from bgpranking.shadowserverfetcher import ShadowServerFetcher -from bgpranking.libs.helpers import get_config_path, get_homedir +from typing import Tuple, Dict, List, Optional, TypeVar, Any +from datetime import datetime, date +from pathlib import Path + +import aiohttp +from bs4 import BeautifulSoup # type: ignore +from dateutil.parser import parse + +from bgpranking.default import AbstractManager, get_homedir, safe_create_dir +from bgpranking.helpers import get_data_dir, get_modules_dir + logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', level=logging.INFO, datefmt='%I:%M:%S') +Dates = TypeVar('Dates', datetime, date, str) + + +class ShadowServerFetcher(): + + def __init__(self, user, password, logger: Logger) -> None: + self.logger = logger + self.storage_directory = get_data_dir() + self.config_path_modules = get_modules_dir() + self.user = user + self.password = password + self.index_page = 'https://dl.shadowserver.org/reports/index.php' + self.vendor = 'shadowserver' + self.known_list_types = ('blacklist', 'botnet', 'cc', 'cisco', 'cwsandbox', 'drone', + 'microsoft', 'scan', 'sinkhole6', 'sinkhole', 'outdated', + 'compromised', 'hp', 'darknet', 'ddos') + self.first_available_day: date + self.last_available_day: date + self.available_entries: Dict[str, List[Tuple[str, str]]] = {} + + async def __get_index(self): + auth_details = {'user': self.user, 'password': self.password, 'login': 'Login'} + async with aiohttp.ClientSession() as s: + self.logger.debug('Fetching the index.') + async with s.post(self.index_page, data=auth_details) as r: + return await r.text() + + async def __build_daily_dict(self): + html_index = await self.__get_index() + soup = BeautifulSoup(html_index, 'html.parser') + treeview = soup.find(id='treemenu1') + for y in treeview.select(':scope > li'): + year = y.contents[0] + for m in y.contents[1].select(':scope > li'): + month = m.contents[0] + for d in m.contents[1].select(':scope > li'): + day = d.contents[0] + date = parse(f'{year} {month} {day}').date() + self.available_entries[date.isoformat()] = [] + for a in d.contents[1].find_all('a', href=True): + if not self.first_available_day: + self.first_available_day = date + self.last_available_day = date + self.available_entries[date.isoformat()].append((a['href'], a.string)) + self.logger.debug('Dictionary created.') + + def __normalize_day(self, day: Optional[Dates]=None) -> str: + if not day: + if not self.last_available_day: + raise Exception('Unable to figure out the last available day. You need to run build_daily_dict first') + to_return = self.last_available_day + else: + if isinstance(day, str): + to_return = parse(day).date() + elif isinstance(day, datetime): + to_return = day.date() + return to_return.isoformat() + + def __split_name(self, name): + type_content, country, list_type = name.split('-') + if '_' in type_content: + type_content, details_type = type_content.split('_', maxsplit=1) + if '_' in details_type: + details_type, sub = details_type.split('_', maxsplit=1) + return list_type, country, (type_content, details_type, sub) + return list_type, country, (type_content, details_type) + return list_type, country, (type_content) + + def __check_config(self, filename: str) -> Optional[Path]: + self.logger.debug(f'Working on config for {filename}.') + config: Dict[str, Any] = {'vendor': 'shadowserver', 'parser': '.parsers.shadowserver'} + type_content, _, type_details = self.__split_name(filename) + prefix = type_content.split('.')[0] + + if isinstance(type_details, str): + main_type = type_details + config['name'] = '{}-{}'.format(prefix, type_details) + else: + main_type = type_details[0] + config['name'] = '{}-{}'.format(prefix, '_'.join(type_details)) + + if main_type not in self.known_list_types: + self.logger.warning(f'Unknown type: {main_type}. Please update the config creator script.') + return None + + if main_type == 'blacklist': + config['impact'] = 5 + elif main_type == 'botnet': + config['impact'] = 2 + elif main_type == 'cc': + config['impact'] = 5 + elif main_type == 'cisco': + config['impact'] = 3 + elif main_type == 'cwsandbox': + config['impact'] = 5 + elif main_type == 'drone': + config['impact'] = 2 + elif main_type == 'microsoft': + config['impact'] = 3 + elif main_type == 'scan': + config['impact'] = 1 + elif main_type == 'sinkhole6': + config['impact'] = 2 + elif main_type == 'sinkhole': + config['impact'] = 2 + else: + config['impact'] = 1 + + if not (self.config_path_modules / f"{config['vendor']}_{config['name']}.json").exists(): + self.logger.debug(f'Creating config file for {filename}.') + with open(self.config_path_modules / f"{config['vendor']}_{config['name']}.json", 'w') as f: + json.dump(config, f, indent=2) + else: + with open(self.config_path_modules / f"{config['vendor']}_{config['name']}.json", 'r') as f: + # Validate new config file with old + config_current = json.load(f) + if config_current != config: + self.logger.warning('The config file created by this script is different from the one on disk: \n{}\n{}'.format(json.dumps(config), json.dumps(config_current))) + # Init list directory + directory = self.storage_directory / config['vendor'] / config['name'] + safe_create_dir(directory) + meta = directory / 'meta' + safe_create_dir(meta) + archive_dir = directory / 'archive' + safe_create_dir(archive_dir) + self.logger.debug(f'Done with config for {filename}.') + return directory + + async def download_daily_entries(self, day: Optional[Dates]=None): + await self.__build_daily_dict() + for url, filename in self.available_entries[self.__normalize_day(day)]: + storage_dir = self.__check_config(filename) + if not storage_dir: + continue + # Check if the file we're trying to download has already been downloaded. Skip if True. + uuid = url.split('/')[-1] + if (storage_dir / 'meta' / 'last_download').exists(): + with open(storage_dir / 'meta' / 'last_download') as _fr: + last_download_uuid = _fr.read() + if last_download_uuid == uuid: + self.logger.debug(f'Already downloaded: {url}.') + continue + async with aiohttp.ClientSession() as s: + async with s.get(url) as r: + self.logger.info(f'Downloading {url}.') + content = await r.content.read() + with (storage_dir / f'{datetime.now().isoformat()}.txt').open('wb') as _fw: + _fw.write(content) + with (storage_dir / 'meta' / 'last_download').open('w') as _fwt: + _fwt.write(uuid) + + class ShadowServerManager(AbstractManager): - def __init__(self, config_dir: Path=None, storage_directory: Path=None, loglevel: int=logging.INFO): + def __init__(self, loglevel: int=logging.INFO): super().__init__(loglevel) + self.script_name = 'shadowserver_fetcher' + shadow_server_config_file = get_homedir() / 'config' / 'shadowserver.json' self.config = True - if not config_dir: - config_dir = get_config_path() - if not (config_dir / 'shadowserver.json').exists(): + if not shadow_server_config_file.exists(): self.config = False - self.logger.warning(f'No config file available, the shadow server module will not be launched.') + self.logger.warning(f'No config file available {shadow_server_config_file}, the shadow server module will not be launched.') return - with open(config_dir / 'shadowserver.json') as f: + with shadow_server_config_file.open() as f: ss_config = json.load(f) - if not storage_directory: - storage_directory = get_homedir() / 'rawdata' - modules_config = config_dir / 'modules' - self.fetcher = ShadowServerFetcher(ss_config['user'], ss_config['password'], modules_config, storage_directory, loglevel) + self.fetcher = ShadowServerFetcher(ss_config['user'], ss_config['password'], self.logger) - def _to_run_forever(self): - loop = asyncio.get_event_loop() - try: - loop.run_until_complete(self.fetcher.download_daily_entries()) - except aiohttp.client_exceptions.ClientConnectorError as e: - self.logger.critical(f'Exception while fetching Shadow Server lists: {e}') + async def _to_run_forever_async(self): + await self.fetcher.download_daily_entries() + + +def main(): + modules_manager = ShadowServerManager() + if modules_manager.config: + asyncio.run(modules_manager.run_async(sleep_in_sec=3600)) if __name__ == '__main__': - modules_manager = ShadowServerManager() - if modules_manager.config: - modules_manager.run(sleep_in_sec=3600) + main() diff --git a/bin/start.py b/bin/start.py index 0cf1f49..f8c7b2e 100755 --- a/bin/start.py +++ b/bin/start.py @@ -1,25 +1,29 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from subprocess import Popen -from bgpranking.libs.helpers import get_homedir +from subprocess import Popen, run -import redis -import sys +from bgpranking.default import get_homedir -if redis.VERSION < (3, ): - print('redis-py >= 3 is required.') - sys.exit() -if __name__ == '__main__': +def main(): # Just fail if the env isn't set. get_homedir() - p = Popen(['run_backend.py', '--start']) - p.wait() - Popen(['fetcher.py']) - Popen(['ssfetcher.py']) - Popen(['parser.py']) - Popen(['sanitizer.py']) - Popen(['dbinsert.py']) - Popen(['ranking.py']) - Popen(['asn_descriptions.py']) + print('Start backend (redis)...') + p = run(['run_backend', '--start']) + p.check_returncode() + print('done.') + Popen(['fetcher']) + # Popen(['ssfetcher']) + Popen(['parser']) + Popen(['sanitizer']) + Popen(['dbinsert']) + Popen(['ranking']) + Popen(['asn_descriptions']) + print('Start website...') + # Popen(['start_website']) + print('done.') + + +if __name__ == '__main__': + main() diff --git a/bin/start_website.py b/bin/start_website.py index 13bb4db..a5cdbfa 100755 --- a/bin/start_website.py +++ b/bin/start_website.py @@ -1,14 +1,40 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import logging from subprocess import Popen -from bgpranking.libs.helpers import get_homedir + +from bgpranking.default import AbstractManager +from bgpranking.default import get_config, get_homedir + +logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', + level=logging.INFO) + + +class Website(AbstractManager): + + def __init__(self, loglevel: int=logging.INFO): + super().__init__(loglevel) + self.script_name = 'website' + self.process = self._launch_website() + self.set_running() + + def _launch_website(self): + website_dir = get_homedir() / 'website' + ip = get_config('generic', 'website_listen_ip') + port = get_config('generic', 'website_listen_port') + return Popen(['gunicorn', '-w', '10', + '--graceful-timeout', '2', '--timeout', '300', + '-b', f'{ip}:{port}', + '--log-level', 'info', + 'web:app'], + cwd=website_dir) + + +def main(): + w = Website() + w.run(sleep_in_sec=10) + if __name__ == '__main__': - website_dir = get_homedir() / 'website' - Popen([f'{website_dir}/3drparty.sh'], cwd=website_dir) - try: - Popen(['gunicorn', '--worker-class', 'gevent', '-w', '10', '-b', '0.0.0.0:5005', 'web:app'], - cwd=website_dir).communicate() - except KeyboardInterrupt: - print('Stopping gunicorn.') + main() diff --git a/bin/stop.py b/bin/stop.py index 58d01e9..12c9032 100755 --- a/bin/stop.py +++ b/bin/stop.py @@ -1,11 +1,29 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from subprocess import Popen -from bgpranking.libs.helpers import get_homedir +from subprocess import Popen, run + +from redis import Redis +from redis.exceptions import ConnectionError + +from bgpranking.default import get_homedir, get_socket_path + + +def main(): + get_homedir() + p = Popen(['shutdown']) + p.wait() + try: + r = Redis(unix_socket_path=get_socket_path('cache'), db=1) + r.delete('shutdown') + print('Shutting down databases...') + p_backend = run(['run_backend', '--stop']) + p_backend.check_returncode() + print('done.') + except ConnectionError: + # Already down, skip the stacktrace + pass + if __name__ == '__main__': - get_homedir() - p = Popen(['shutdown.py']) - p.wait() - Popen(['run_backend.py', '--stop']) + main() diff --git a/bin/update.py b/bin/update.py new file mode 100755 index 0000000..e74f1cf --- /dev/null +++ b/bin/update.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import argparse +import hashlib +import logging +import platform +import shlex +import subprocess +import sys +from pathlib import Path + +from bgpranking.default import get_homedir, get_config + +logging.basicConfig(format='%(asctime)s %(name)s %(levelname)s:%(message)s', + level=logging.INFO) + + +def compute_hash_self(): + m = hashlib.sha256() + with (get_homedir() / 'bin' / 'update.py').open('rb') as f: + m.update(f.read()) + return m.digest() + + +def keep_going(ignore=False): + if ignore: + return + keep_going = input('Continue? (y/N) ') + if keep_going.lower() != 'y': + print('Okay, quitting.') + sys.exit() + + +def run_command(command, expect_fail: bool=False, capture_output: bool=True): + args = shlex.split(command) + homedir = get_homedir() + process = subprocess.run(args, cwd=homedir, capture_output=capture_output) + if capture_output: + print(process.stdout.decode()) + if process.returncode and not expect_fail: + print(process.stderr.decode()) + sys.exit() + + +def check_poetry_version(): + args = shlex.split("poetry self -V") + homedir = get_homedir() + process = subprocess.run(args, cwd=homedir, capture_output=True) + poetry_version_str = process.stdout.decode() + version = poetry_version_str.split()[2] + version_details = tuple(int(i) for i in version.split('.')) + if version_details < (1, 1, 0): + print('The project requires poetry >= 1.1.0, please update.') + print('If you installed with "pip install --user poetry", run "pip install --user -U poetry"') + print('If you installed via the recommended method, use "poetry self update"') + print('More details: https://github.com/python-poetry/poetry#updating-poetry') + sys.exit() + + +def main(): + parser = argparse.ArgumentParser(description='Pull latest release, update dependencies, update and validate the config files, update 3rd deps for the website.') + parser.add_argument('--yes', default=False, action='store_true', help='Run all commands without asking.') + args = parser.parse_args() + + old_hash = compute_hash_self() + + print('* Update repository.') + keep_going(args.yes) + run_command('git pull') + new_hash = compute_hash_self() + if old_hash != new_hash: + print('Update script changed, please do "poetry run update"') + sys.exit() + + check_poetry_version() + + print('* Install/update dependencies.') + keep_going(args.yes) + run_command('poetry install') + + print('* Validate configuration files.') + keep_going(args.yes) + run_command(f'poetry run {(Path("tools") / "validate_config_files.py").as_posix()} --check') + + print('* Update configuration files.') + keep_going(args.yes) + run_command(f'poetry run {(Path("tools") / "validate_config_files.py").as_posix()} --update') + + print('* Restarting') + keep_going(args.yes) + if platform.system() == 'Windows': + print('Restarting with poetry...') + run_command('poetry run stop', expect_fail=True) + run_command('poetry run start', capture_output=False) + print('Started.') + else: + service = get_config('generic', 'systemd_service_name') + p = subprocess.run(["systemctl", "is-active", "--quiet", service]) + try: + p.check_returncode() + print('Restarting with systemd...') + run_command(f'sudo service {service} restart') + print('done.') + except subprocess.CalledProcessError: + print('Restarting with poetry...') + run_command('poetry run stop', expect_fail=True) + run_command('poetry run start', capture_output=False) + print('Started.') + + +if __name__ == '__main__': + main() diff --git a/client/bin/bgpranking b/client/bin/bgpranking deleted file mode 100755 index 811d2fe..0000000 --- a/client/bin/bgpranking +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import argparse -try: - import simplejson as json -except ImportError: - import json - -from urllib.parse import urljoin -from pybgpranking import BGPRanking -from pyipasnhistory import IPASNHistory -from datetime import date, timedelta - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Run a query against BGP Ranking') - parser.add_argument('--url', type=str, help='URL of the instance.') - parser.add_argument('--date', default=date.today().isoformat(), help='Date of the dataset required') - - sub_parsers = parser.add_subparsers(title='Available commands') - - index_query = sub_parsers.add_parser('index') - index_query.add_argument('--limit', default=100, help='Max number of ASN to get') - index_query.add_argument('--family', default='v4', help='v4 or v6') - index_query.set_defaults(which='index') - - simple_query = sub_parsers.add_parser('simple') - group = simple_query.add_mutually_exclusive_group(required=True) - group.add_argument('--asn', help='ASN to lookup') - group.add_argument('--ip', help='IP to lookup') - simple_query.set_defaults(which='simple') - - args = parser.parse_args() - - if args.url: - bgpranking = BGPRanking(args.url) - ipasn = IPASNHistory(urljoin(args.url, 'ipasn_history')) - else: - bgpranking = BGPRanking() - ipasn = IPASNHistory() - - if args.which == 'simple': - if args.ip: - response = ipasn.query(args.ip) - print(json.dumps(response, indent=2)) - if 'response' in response and response['response']: - asn = response['response'][list(response['response'].keys())[0]]['asn'] - else: - asn = args.asn - - response = bgpranking.query(asn, date=(date.today() - timedelta(1)).isoformat()) - elif args.which == 'index': - response = bgpranking.asns_global_ranking(address_family=args.family, limit=args.limit, date=args.date) - print(json.dumps(response, indent=2)) diff --git a/client/pybgpranking/__init__.py b/client/pybgpranking/__init__.py deleted file mode 100644 index 2ad6888..0000000 --- a/client/pybgpranking/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .api import BGPRanking # noqa diff --git a/client/pybgpranking/api.py b/client/pybgpranking/api.py deleted file mode 100644 index 9fc8d27..0000000 --- a/client/pybgpranking/api.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -try: - import simplejson as json -except ImportError: - import json - -from typing import Union - -import requests -from urllib.parse import urljoin -from datetime import date - - -class BGPRanking(): - - def __init__(self, root_url: str='https://bgpranking-ng.circl.lu/'): - self.root_url = root_url - if not self.root_url.endswith('/'): - self.root_url += '/' - self.session = requests.session() - - @property - def is_up(self): - r = self.session.head(self.root_url) - return r.status_code == 200 - - def query(self, asn: str, address_family: str='v4', date: str=None, - source: Union[list, str]=''): - '''Launch a query. - :param asn: ASN to lookup - :param address_family: v4 or v6 - :param date: Exact date to lookup. Fallback to most recent available. - :param source: Source to query. Can be a list of sources. - ''' - to_query = {'asn': asn, 'address_family': address_family} - if date: - to_query['date'] = date - if source: - to_query['source'] = source - r = self.session.post(urljoin(self.root_url, '/json/asn'), data=json.dumps(to_query)) - return r.json() - - def asns_global_ranking(self, date: str=date.today().isoformat(), address_family: str='v4', limit: int=100): - '''Get the top `limit` ASNs, from worse to best''' - to_query = {'date': date, 'ipversion': address_family, 'limit': limit} - r = self.session.post(urljoin(self.root_url, '/json/asns_global_ranking'), data=json.dumps(to_query)) - return r.json() diff --git a/client/setup.py b/client/setup.py deleted file mode 100644 index fbadbef..0000000 --- a/client/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -from setuptools import setup - - -setup( - name='pybgpranking', - version='0.1', - author='Raphaël Vinot', - author_email='raphael.vinot@circl.lu', - maintainer='Raphaël Vinot', - url='https://github.com/D4-project/BGP-Ranking/client', - description='Python client for BGP Ranking', - packages=['pybgpranking'], - scripts=['bin/bgpranking'], - install_requires=['requests'], - classifiers=[ - 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', - 'Development Status :: 3 - Alpha', - 'Environment :: Console', - 'Operating System :: POSIX :: Linux', - 'Intended Audience :: Science/Research', - 'Intended Audience :: Telecommunications Industry', - 'Intended Audience :: Information Technology', - 'Programming Language :: Python :: 3', - 'Topic :: Security', - 'Topic :: Internet', - ] -) diff --git a/config/generic.json.sample b/config/generic.json.sample new file mode 100644 index 0000000..57296bb --- /dev/null +++ b/config/generic.json.sample @@ -0,0 +1,22 @@ +{ + "loglevel": "INFO", + "website_listen_ip": "0.0.0.0", + "website_listen_port": 5005, + "systemd_service_name": "bgpranking", + "storage_db_hostname": "127.0.0.1", + "storage_db_port": 5188, + "ranking_db_hostname": "127.0.0.1", + "ranking_db_port": 5189, + "ipasnhistory_url": "https://ipasnhistory.circl.lu/", + "_notes": { + "loglevel": "(lookyloo) Can be one of the value listed here: https://docs.python.org/3/library/logging.html#levels", + "website_listen_ip": "IP Flask will listen on. Defaults to 0.0.0.0, meaning all interfaces.", + "website_listen_port": "Port Flask will listen on.", + "systemd_service_name": "(Optional) Name of the systemd service if your project has one.", + "storage_db_hostname": "Hostname of the storage database (kvrocks)", + "storage_db_port": "Port of the storage database (kvrocks)", + "ranking_db_hostname": "Hostname of the ranking database (kvrocks)", + "ranking_db_port": "Port of the ranking database (kvrocks)", + "ipasnhistory_url": "URL of the IP ASN History service, defaults to the public one." + } +} diff --git a/bgpranking/config/modules/Alienvault.json b/config/modules/Alienvault.json similarity index 100% rename from bgpranking/config/modules/Alienvault.json rename to config/modules/Alienvault.json diff --git a/bgpranking/config/modules/BlocklistDeApache.json b/config/modules/BlocklistDeApache.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeApache.json rename to config/modules/BlocklistDeApache.json diff --git a/bgpranking/config/modules/BlocklistDeBots.json b/config/modules/BlocklistDeBots.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeBots.json rename to config/modules/BlocklistDeBots.json diff --git a/bgpranking/config/modules/BlocklistDeFTP.json b/config/modules/BlocklistDeFTP.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeFTP.json rename to config/modules/BlocklistDeFTP.json diff --git a/bgpranking/config/modules/BlocklistDeIMAP.json b/config/modules/BlocklistDeIMAP.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeIMAP.json rename to config/modules/BlocklistDeIMAP.json diff --git a/bgpranking/config/modules/BlocklistDeMail.json b/config/modules/BlocklistDeMail.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeMail.json rename to config/modules/BlocklistDeMail.json diff --git a/bgpranking/config/modules/BlocklistDeSIP.json b/config/modules/BlocklistDeSIP.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeSIP.json rename to config/modules/BlocklistDeSIP.json diff --git a/bgpranking/config/modules/BlocklistDeSSH.json b/config/modules/BlocklistDeSSH.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeSSH.json rename to config/modules/BlocklistDeSSH.json diff --git a/bgpranking/config/modules/BlocklistDeStrong.json b/config/modules/BlocklistDeStrong.json similarity index 100% rename from bgpranking/config/modules/BlocklistDeStrong.json rename to config/modules/BlocklistDeStrong.json diff --git a/bgpranking/config/modules/CIArmy.json b/config/modules/CIArmy.json similarity index 100% rename from bgpranking/config/modules/CIArmy.json rename to config/modules/CIArmy.json diff --git a/bgpranking/config/modules/CleanMXMalwares.json b/config/modules/CleanMXMalwares.json similarity index 100% rename from bgpranking/config/modules/CleanMXMalwares.json rename to config/modules/CleanMXMalwares.json diff --git a/bgpranking/config/modules/CleanMXPhishing.json b/config/modules/CleanMXPhishing.json similarity index 100% rename from bgpranking/config/modules/CleanMXPhishing.json rename to config/modules/CleanMXPhishing.json diff --git a/bgpranking/config/modules/CleanMXPortals.json b/config/modules/CleanMXPortals.json similarity index 100% rename from bgpranking/config/modules/CleanMXPortals.json rename to config/modules/CleanMXPortals.json diff --git a/bgpranking/config/modules/CoinBlockerLists.json b/config/modules/CoinBlockerLists.json similarity index 100% rename from bgpranking/config/modules/CoinBlockerLists.json rename to config/modules/CoinBlockerLists.json diff --git a/bgpranking/config/modules/DshieldDaily.json b/config/modules/DshieldDaily.json similarity index 100% rename from bgpranking/config/modules/DshieldDaily.json rename to config/modules/DshieldDaily.json diff --git a/bgpranking/config/modules/DshieldTopIPs.json b/config/modules/DshieldTopIPs.json similarity index 100% rename from bgpranking/config/modules/DshieldTopIPs.json rename to config/modules/DshieldTopIPs.json diff --git a/bgpranking/config/modules/EmergingThreatsCompromized.json b/config/modules/EmergingThreatsCompromized.json similarity index 100% rename from bgpranking/config/modules/EmergingThreatsCompromized.json rename to config/modules/EmergingThreatsCompromized.json diff --git a/bgpranking/config/modules/FeodotrackerIPBlockList.json b/config/modules/FeodotrackerIPBlockList.json similarity index 100% rename from bgpranking/config/modules/FeodotrackerIPBlockList.json rename to config/modules/FeodotrackerIPBlockList.json diff --git a/bgpranking/config/modules/Malc0de.json b/config/modules/Malc0de.json similarity index 100% rename from bgpranking/config/modules/Malc0de.json rename to config/modules/Malc0de.json diff --git a/bgpranking/config/modules/MalwareDomainListIP.json b/config/modules/MalwareDomainListIP.json similarity index 100% rename from bgpranking/config/modules/MalwareDomainListIP.json rename to config/modules/MalwareDomainListIP.json diff --git a/bgpranking/config/modules/RansomwareIPBlockList.json b/config/modules/RansomwareIPBlockList.json similarity index 100% rename from bgpranking/config/modules/RansomwareIPBlockList.json rename to config/modules/RansomwareIPBlockList.json diff --git a/bgpranking/config/modules/greensnow.json b/config/modules/greensnow.json similarity index 100% rename from bgpranking/config/modules/greensnow.json rename to config/modules/greensnow.json diff --git a/bgpranking/config/modules/jq_all_the_things.sh b/config/modules/jq_all_the_things.sh similarity index 100% rename from bgpranking/config/modules/jq_all_the_things.sh rename to config/modules/jq_all_the_things.sh diff --git a/bgpranking/config/modules/module.schema b/config/modules/module.schema similarity index 100% rename from bgpranking/config/modules/module.schema rename to config/modules/module.schema diff --git a/bgpranking/config/modules/pop3gropers.json b/config/modules/pop3gropers.json similarity index 100% rename from bgpranking/config/modules/pop3gropers.json rename to config/modules/pop3gropers.json diff --git a/bgpranking/config/modules/shadowserver_only.sh b/config/modules/shadowserver_only.sh similarity index 100% rename from bgpranking/config/modules/shadowserver_only.sh rename to config/modules/shadowserver_only.sh diff --git a/bgpranking/config/modules/validate_all.sh b/config/modules/validate_all.sh similarity index 100% rename from bgpranking/config/modules/validate_all.sh rename to config/modules/validate_all.sh diff --git a/bgpranking/config/shadowserver.json.example b/config/shadowserver.json.sample similarity index 100% rename from bgpranking/config/shadowserver.json.example rename to config/shadowserver.json.sample diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..231a2b9 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1418 @@ +[[package]] +name = "aiohttp" +version = "3.8.1" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotli", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.2.0" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "aniso8601" +version = "9.0.1" +description = "A library for parsing ISO 8601 strings." +category = "main" +optional = false +python-versions = "*" + +[package.extras] +dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] + +[[package]] +name = "appnope" +version = "0.1.2" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "async-timeout" +version = "4.0.1" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=3.6.5" + +[[package]] +name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "beautifulsoup4" +version = "4.10.0" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = ">3.0.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bootstrap-flask" +version = "1.8.0" +description = "Bootstrap helper for Flask/Jinja2." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = "*" + +[package.extras] +dev = ["coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] +docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "charset-normalizer" +version = "2.0.9" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "decorator" +version = "5.1.0" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "deprecated" +version = "1.2.13" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] + +[[package]] +name = "flask" +version = "2.0.2" +description = "A simple framework for building complex web applications." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.2" +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-restx" +version = "0.5.1" +description = "Fully featured framework for fast, easy and documented API development with Flask" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aniso8601 = {version = ">=0.82", markers = "python_version >= \"3.5\""} +Flask = ">=0.8,<2.0.0 || >2.0.0" +jsonschema = "*" +pytz = "*" +six = ">=1.3.0" +werkzeug = "!=2.0.0" + +[package.extras] +dev = ["blinker", "Faker (==2.0.0)", "mock (==3.0.5)", "pytest-benchmark (==3.2.2)", "pytest-cov (==2.7.1)", "pytest-flask (==0.15.1)", "pytest-mock (==1.10.4)", "pytest-profiling (==1.7.0)", "tzlocal", "invoke (==1.3.0)", "readme-renderer (==24.0)", "twine (==1.15.0)", "tox", "pytest (==4.6.5)", "pytest (==5.4.1)", "ossaudit", "black"] +doc = ["alabaster (==0.7.12)", "Sphinx (==2.1.2)", "sphinx-issues (==1.2.0)"] +test = ["blinker", "Faker (==2.0.0)", "mock (==3.0.5)", "pytest-benchmark (==3.2.2)", "pytest-cov (==2.7.1)", "pytest-flask (==0.15.1)", "pytest-mock (==1.10.4)", "pytest-profiling (==1.7.0)", "tzlocal", "invoke (==1.3.0)", "readme-renderer (==24.0)", "twine (==1.15.0)", "pytest (==4.6.5)", "pytest (==5.4.1)", "ossaudit"] + +[[package]] +name = "frozenlist" +version = "1.2.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "hiredis" +version = "2.0.0" +description = "Python wrapper for hiredis" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[[package]] +name = "ipython" +version = "7.30.1" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["notebook", "ipywidgets"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] + +[[package]] +name = "itsdangerous" +version = "2.0.1" +description = "Safely pass data to untrusted environments and back." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "jedi" +version = "0.18.1" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.3.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "matplotlib-inline" +version = "0.1.3" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "multidict" +version = "5.2.0" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mypy" +version = "0.920" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3,<0.5.0" +tomli = ">=1.1.0,<3.0.0" +typing-extensions = ">=3.7.4" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pid" +version = "3.0.4" +description = "Pidfile featuring stale detection and file-locking, can also be used as context-manager or decorator" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +psutil = {version = ">=5.4.8", markers = "sys_platform == \"win32\""} + +[[package]] +name = "prompt-toolkit" +version = "3.0.24" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.8.0" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pycountry" +version = "20.7.3" +description = "ISO country, subdivision, language, currency and script definitions and their translations" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pygments" +version = "2.10.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pyipasnhistory" +version = "2.1" +description = "Python client for IP ASN History" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" + +[package.dependencies] +requests = ">=2.26.0,<3.0.0" + +[package.extras] +docs = ["Sphinx (>=4.2.0,<5.0.0)"] + +[[package]] +name = "pyrsistent" +version = "0.18.0" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2021.3" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "redis" +version = "4.0.2" +description = "Python client for Redis database and key-value store" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +deprecated = "*" +hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] + +[[package]] +name = "requests" +version = "2.26.0" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "soupsieve" +version = "2.3.1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "tomli" +version = "2.0.0" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "traitlets" +version = "5.1.1" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "types-python-dateutil" +version = "2.8.3" +description = "Typing stubs for python-dateutil" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-redis" +version = "4.0.3" +description = "Typing stubs for redis" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.26.1" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-setuptools" +version = "57.4.4" +description = "Typing stubs for setuptools" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "urllib3" +version = "1.26.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "werkzeug" +version = "2.0.2" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "wrapt" +version = "1.13.3" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "yarl" +version = "1.7.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.6.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.8" +content-hash = "79abc680b892c92c5c5173823e804562cb596fbef7d2fb9e54fb31ae262e2cdd" + +[metadata.files] +aiohttp = [ + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, + {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, + {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, + {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, + {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, + {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, + {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, + {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, + {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, + {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, +] +aiosignal = [ + {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, + {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, +] +aniso8601 = [ + {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, + {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, +] +appnope = [ + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, +] +async-timeout = [ + {file = "async-timeout-4.0.1.tar.gz", hash = "sha256:b930cb161a39042f9222f6efb7301399c87eeab394727ec5437924a36d6eef51"}, + {file = "async_timeout-4.0.1-py3-none-any.whl", hash = "sha256:a22c0b311af23337eb05fcf05a8b51c3ea53729d46fb5460af62bee033cec690"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, + {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, +] +bootstrap-flask = [ + {file = "Bootstrap-Flask-1.8.0.tar.gz", hash = "sha256:1c47a35b26b2ea2eaa0326b416c557aee77c7802f188055a8cfc4520a5cfdba2"}, + {file = "Bootstrap_Flask-1.8.0-py2.py3-none-any.whl", hash = "sha256:74a333756e997f2bb7d2d39a080eb2d1b4fd9da225f6cd5b56f0488ae5fe48ed"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.9.tar.gz", hash = "sha256:b0b883e8e874edfdece9c28f314e3dd5badf067342e42fb162203335ae61aa2c"}, + {file = "charset_normalizer-2.0.9-py3-none-any.whl", hash = "sha256:1eecaa09422db5be9e29d7fc65664e6c33bd06f9ced7838578ba40d58bdf3721"}, +] +click = [ + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, +] +decorator = [ + {file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"}, + {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, +] +deprecated = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] +flask = [ + {file = "Flask-2.0.2-py3-none-any.whl", hash = "sha256:cb90f62f1d8e4dc4621f52106613488b5ba826b2e1e10a33eac92f723093ab6a"}, + {file = "Flask-2.0.2.tar.gz", hash = "sha256:7b2fb8e934ddd50731893bdcdb00fc8c0315916f9fcd50d22c7cc1a95ab634e2"}, +] +flask-restx = [ + {file = "flask-restx-0.5.1.tar.gz", hash = "sha256:63c69a61999a34f1774eaccc6fc8c7f504b1aad7d56a8ec672264e52d9ac05f4"}, + {file = "flask_restx-0.5.1-py2.py3-none-any.whl", hash = "sha256:96157547acaa8892adcefd8c60abf9040212ac2a8634937a82946e07b46147fd"}, +] +frozenlist = [ + {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:977a1438d0e0d96573fd679d291a1542097ea9f4918a8b6494b06610dfeefbf9"}, + {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8d86547a5e98d9edd47c432f7a14b0c5592624b496ae9880fb6332f34af1edc"}, + {file = "frozenlist-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:181754275d5d32487431a0a29add4f897968b7157204bc1eaaf0a0ce80c5ba7d"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5df31bb2b974f379d230a25943d9bf0d3bc666b4b0807394b131a28fca2b0e5f"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4766632cd8a68e4f10f156a12c9acd7b1609941525569dd3636d859d79279ed3"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16eef427c51cb1203a7c0ab59d1b8abccaba9a4f58c4bfca6ed278fc896dc193"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:01d79515ed5aa3d699b05f6bdcf1fe9087d61d6b53882aa599a10853f0479c6c"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28e164722ea0df0cf6d48c4d5bdf3d19e87aaa6dfb39b0ba91153f224b912020"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e63ad0beef6ece06475d29f47d1f2f29727805376e09850ebf64f90777962792"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41de4db9b9501679cf7cddc16d07ac0f10ef7eb58c525a1c8cbff43022bddca4"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a9d84ee6427b65a81fc24e6ef589cb794009f5ca4150151251c062773e7ed2"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:f5f3b2942c3b8b9bfe76b408bbaba3d3bb305ee3693e8b1d631fe0a0d4f93673"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c98d3c04701773ad60d9545cd96df94d955329efc7743fdb96422c4b669c633b"}, + {file = "frozenlist-1.2.0-cp310-cp310-win32.whl", hash = "sha256:72cfbeab7a920ea9e74b19aa0afe3b4ad9c89471e3badc985d08756efa9b813b"}, + {file = "frozenlist-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:11ff401951b5ac8c0701a804f503d72c048173208490c54ebb8d7bb7c07a6d00"}, + {file = "frozenlist-1.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b46f997d5ed6d222a863b02cdc9c299101ee27974d9bbb2fd1b3c8441311c408"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351686ca020d1bcd238596b1fa5c8efcbc21bffda9d0efe237aaa60348421e2a"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfbaa08cf1452acad9cb1c1d7b89394a41e712f88df522cea1a0f296b57782a0"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ae2f5e9fa10805fb1c9adbfefaaecedd9e31849434be462c3960a0139ed729"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6790b8d96bbb74b7a6f4594b6f131bd23056c25f2aa5d816bd177d95245a30e3"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:41f62468af1bd4e4b42b5508a3fe8cc46a693f0cdd0ca2f443f51f207893d837"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:ec6cf345771cdb00791d271af9a0a6fbfc2b6dd44cb753f1eeaa256e21622adb"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:14a5cef795ae3e28fb504b73e797c1800e9249f950e1c964bb6bdc8d77871161"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8b54cdd2fda15467b9b0bfa78cee2ddf6dbb4585ef23a16e14926f4b076dfae4"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f025f1d6825725b09c0038775acab9ae94264453a696cc797ce20c0769a7b367"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:84e97f59211b5b9083a2e7a45abf91cfb441369e8bb6d1f5287382c1c526def3"}, + {file = "frozenlist-1.2.0-cp36-cp36m-win32.whl", hash = "sha256:c5328ed53fdb0a73c8a50105306a3bc013e5ca36cca714ec4f7bd31d38d8a97f"}, + {file = "frozenlist-1.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:9ade70aea559ca98f4b1b1e5650c45678052e76a8ab2f76d90f2ac64180215a2"}, + {file = "frozenlist-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0d3ffa8772464441b52489b985d46001e2853a3b082c655ec5fad9fb6a3d618"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3457f8cf86deb6ce1ba67e120f1b0128fcba1332a180722756597253c465fc1d"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a72eecf37eface331636951249d878750db84034927c997d47f7f78a573b72b"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:acc4614e8d1feb9f46dd829a8e771b8f5c4b1051365d02efb27a3229048ade8a"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:87521e32e18a2223311afc2492ef2d99946337da0779ddcda77b82ee7319df59"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b4c7665a17c3a5430edb663e4ad4e1ad457614d1b2f2b7f87052e2ef4fa45ca"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed58803563a8c87cf4c0771366cf0ad1aa265b6b0ae54cbbb53013480c7ad74d"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa44c4740b4e23fcfa259e9dd52315d2b1770064cde9507457e4c4a65a04c397"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2de5b931701257d50771a032bba4e448ff958076380b049fd36ed8738fdb375b"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6e105013fa84623c057a4381dc8ea0361f4d682c11f3816cc80f49a1f3bc17c6"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:705c184b77565955a99dc360f359e8249580c6b7eaa4dc0227caa861ef46b27a"}, + {file = "frozenlist-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:a37594ad6356e50073fe4f60aa4187b97d15329f2138124d252a5a19c8553ea4"}, + {file = "frozenlist-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:25b358aaa7dba5891b05968dd539f5856d69f522b6de0bf34e61f133e077c1a4"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af2a51c8a381d76eabb76f228f565ed4c3701441ecec101dd18be70ebd483cfd"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:82d22f6e6f2916e837c91c860140ef9947e31194c82aaeda843d6551cec92f19"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cfe6fef507f8bac40f009c85c7eddfed88c1c0d38c75e72fe10476cef94e10f"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f602e380a5132880fa245c92030abb0fc6ff34e0c5500600366cedc6adb06a"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ad065b2ebd09f32511ff2be35c5dfafee6192978b5a1e9d279a5c6e121e3b03"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc93f5f62df3bdc1f677066327fc81f92b83644852a31c6aa9b32c2dde86ea7d"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:89fdfc84c6bf0bff2ff3170bb34ecba8a6911b260d318d377171429c4be18c73"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:47b2848e464883d0bbdcd9493c67443e5e695a84694efff0476f9059b4cb6257"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f52d0732e56906f8ddea4bd856192984650282424049c956857fed43697ea43"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:16ef7dd5b7d17495404a2e7a49bac1bc13d6d20c16d11f4133c757dd94c4144c"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1cf63243bc5f5c19762943b0aa9e0d3fb3723d0c514d820a18a9b9a5ef864315"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:54a1e09ab7a69f843cd28fefd2bcaf23edb9e3a8d7680032c8968b8ac934587d"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:954b154a4533ef28bd3e83ffdf4eadf39deeda9e38fb8feaf066d6069885e034"}, + {file = "frozenlist-1.2.0-cp38-cp38-win32.whl", hash = "sha256:cb3957c39668d10e2b486acc85f94153520a23263b6401e8f59422ef65b9520d"}, + {file = "frozenlist-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0a7c7cce70e41bc13d7d50f0e5dd175f14a4f1837a8549b0936ed0cbe6170bf9"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4c457220468d734e3077580a3642b7f682f5fd9507f17ddf1029452450912cdc"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e74f8b4d8677ebb4015ac01fcaf05f34e8a1f22775db1f304f497f2f88fdc697"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fbd4844ff111449f3bbe20ba24fbb906b5b1c2384d0f3287c9f7da2354ce6d23"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0081a623c886197ff8de9e635528fd7e6a387dccef432149e25c13946cb0cd0"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b6e21e5770df2dea06cb7b6323fbc008b13c4a4e3b52cb54685276479ee7676"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:406aeb340613b4b559db78d86864485f68919b7141dec82aba24d1477fd2976f"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:878ebe074839d649a1cdb03a61077d05760624f36d196884a5cafb12290e187b"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1fef737fd1388f9b93bba8808c5f63058113c10f4e3c0763ced68431773f72f9"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a495c3d513573b0b3f935bfa887a85d9ae09f0627cf47cad17d0cc9b9ba5c38"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7d0dd3e727c70c2680f5f09a0775525229809f1a35d8552b92ff10b2b14f2c2"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:66a518731a21a55b7d3e087b430f1956a36793acc15912e2878431c7aec54210"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:94728f97ddf603d23c8c3dd5cae2644fa12d33116e69f49b1644a71bb77b89ae"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c1e8e9033d34c2c9e186e58279879d78c94dd365068a3607af33f2bc99357a53"}, + {file = "frozenlist-1.2.0-cp39-cp39-win32.whl", hash = "sha256:83334e84a290a158c0c4cc4d22e8c7cfe0bba5b76d37f1c2509dabd22acafe15"}, + {file = "frozenlist-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:735f386ec522e384f511614c01d2ef9cf799f051353876b4c6fb93ef67a6d1ee"}, + {file = "frozenlist-1.2.0.tar.gz", hash = "sha256:68201be60ac56aff972dc18085800b6ee07973c49103a8aba669dee3d71079de"}, +] +gunicorn = [ + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] +hiredis = [ + {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, + {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, + {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, + {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, + {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, + {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, + {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, + {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, + {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, + {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, + {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, + {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, + {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, + {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, + {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, + {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, + {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, + {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, + {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] +ipython = [ + {file = "ipython-7.30.1-py3-none-any.whl", hash = "sha256:fc60ef843e0863dd4e24ab2bb5698f071031332801ecf8d1aeb4fb622056545c"}, + {file = "ipython-7.30.1.tar.gz", hash = "sha256:cb6aef731bf708a7727ab6cde8df87f0281b1427d41e65d62d4b68934fa54e97"}, +] +itsdangerous = [ + {file = "itsdangerous-2.0.1-py3-none-any.whl", hash = "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c"}, + {file = "itsdangerous-2.0.1.tar.gz", hash = "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"}, +] +jedi = [ + {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, + {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, +] +jinja2 = [ + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +jsonschema = [ + {file = "jsonschema-4.3.0-py3-none-any.whl", hash = "sha256:ab7069946a3ad2037e79a5cdc8d0e9a74cd00721d426d75c5d69a6707c778218"}, + {file = "jsonschema-4.3.0.tar.gz", hash = "sha256:cb7f57b40f870409d7571844d0623f66d8078c90a9c255d9a4d4314b5ec3fc7c"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, + {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, +] +multidict = [ + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:28e6d883acd8674887d7edc896b91751dc2d8e87fbdca8359591a13872799e4e"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61f85101ef08cbbc37846ac0e43f027f7844f3fade9b7f6dd087178caedeee7"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9b668c065968c5979fe6b6fa6760bb6ab9aeb94b75b73c0a9c1acf6393ac3bf"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517d75522b7b18a3385726b54a081afd425d4f41144a5399e5abd97ccafdf36b"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b4ac3ba7a97b35a5ccf34f41b5a8642a01d1e55454b699e5e8e7a99b5a3acf5"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:df23c83398715b26ab09574217ca21e14694917a0c857e356fd39e1c64f8283f"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e58a9b5cc96e014ddf93c2227cbdeca94b56a7eb77300205d6e4001805391747"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f76440e480c3b2ca7f843ff8a48dc82446b86ed4930552d736c0bac507498a52"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cfde464ca4af42a629648c0b0d79b8f295cf5b695412451716531d6916461628"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0fed465af2e0eb6357ba95795d003ac0bdb546305cc2366b1fc8f0ad67cc3fda"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b70913cbf2e14275013be98a06ef4b412329fe7b4f83d64eb70dce8269ed1e1a"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5635bcf1b75f0f6ef3c8a1ad07b500104a971e38d3683167b9454cb6465ac86"}, + {file = "multidict-5.2.0-cp310-cp310-win32.whl", hash = "sha256:77f0fb7200cc7dedda7a60912f2059086e29ff67cefbc58d2506638c1a9132d7"}, + {file = "multidict-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9416cf11bcd73c861267e88aea71e9fcc35302b3943e45e1dbb4317f91a4b34f"}, + {file = "multidict-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd77c8f3cba815aa69cb97ee2b2ef385c7c12ada9c734b0f3b32e26bb88bbf1d"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ec9aea6223adf46999f22e2c0ab6cf33f5914be604a404f658386a8f1fba37"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5283c0a00f48e8cafcecadebfa0ed1dac8b39e295c7248c44c665c16dc1138b"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f79c19c6420962eb17c7e48878a03053b7ccd7b69f389d5831c0a4a7f1ac0a1"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4a67f1080123de76e4e97a18d10350df6a7182e243312426d508712e99988d4"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:94b117e27efd8e08b4046c57461d5a114d26b40824995a2eb58372b94f9fca02"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2e77282fd1d677c313ffcaddfec236bf23f273c4fba7cdf198108f5940ae10f5"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:116347c63ba049c1ea56e157fa8aa6edaf5e92925c9b64f3da7769bdfa012858"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:dc3a866cf6c13d59a01878cd806f219340f3e82eed514485e094321f24900677"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac42181292099d91217a82e3fa3ce0e0ddf3a74fd891b7c2b347a7f5aa0edded"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f0bb0973f42ffcb5e3537548e0767079420aefd94ba990b61cf7bb8d47f4916d"}, + {file = "multidict-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:ea21d4d5104b4f840b91d9dc8cbc832aba9612121eaba503e54eaab1ad140eb9"}, + {file = "multidict-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6453f3cbeb78440747096f239d282cc57a2997a16b5197c9bc839099e1633d0"}, + {file = "multidict-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3def943bfd5f1c47d51fd324df1e806d8da1f8e105cc7f1c76a1daf0f7e17b0"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35591729668a303a02b06e8dba0eb8140c4a1bfd4c4b3209a436a02a5ac1de11"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8cacda0b679ebc25624d5de66c705bc53dcc7c6f02a7fb0f3ca5e227d80422"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:baf1856fab8212bf35230c019cde7c641887e3fc08cadd39d32a421a30151ea3"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a43616aec0f0d53c411582c451f5d3e1123a68cc7b3475d6f7d97a626f8ff90d"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25cbd39a9029b409167aa0a20d8a17f502d43f2efebfe9e3ac019fe6796c59ac"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a2cbcfbea6dc776782a444db819c8b78afe4db597211298dd8b2222f73e9cd0"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d2d7d1fff8e09d99354c04c3fd5b560fb04639fd45926b34e27cfdec678a704"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a37e9a68349f6abe24130846e2f1d2e38f7ddab30b81b754e5a1fde32f782b23"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:637c1896497ff19e1ee27c1c2c2ddaa9f2d134bbb5e0c52254361ea20486418d"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9815765f9dcda04921ba467957be543423e5ec6a1136135d84f2ae092c50d87b"}, + {file = "multidict-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:8b911d74acdc1fe2941e59b4f1a278a330e9c34c6c8ca1ee21264c51ec9b67ef"}, + {file = "multidict-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:380b868f55f63d048a25931a1632818f90e4be71d2081c2338fcf656d299949a"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7d81ce5744757d2f05fc41896e3b2ae0458464b14b5a2c1e87a6a9d69aefaa8"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1d55cdf706ddc62822d394d1df53573d32a7a07d4f099470d3cb9323b721b6"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4771d0d0ac9d9fe9e24e33bed482a13dfc1256d008d101485fe460359476065"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da7d57ea65744d249427793c042094c4016789eb2562576fb831870f9c878d9e"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd68778f96216596218b4e8882944d24a634d984ee1a5a049b300377878fa7c"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecc99bce8ee42dcad15848c7885197d26841cb24fa2ee6e89d23b8993c871c64"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:067150fad08e6f2dd91a650c7a49ba65085303fcc3decbd64a57dc13a2733031"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78c106b2b506b4d895ddc801ff509f941119394b89c9115580014127414e6c2d"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6c4fa1ec16e01e292315ba76eb1d012c025b99d22896bd14a66628b245e3e01"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b227345e4186809d31f22087d0265655114af7cda442ecaf72246275865bebe4"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:06560fbdcf22c9387100979e65b26fba0816c162b888cb65b845d3def7a54c9b"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7878b61c867fb2df7a95e44b316f88d5a3742390c99dfba6c557a21b30180cac"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:246145bff76cc4b19310f0ad28bd0769b940c2a49fc601b86bfd150cbd72bb22"}, + {file = "multidict-5.2.0-cp38-cp38-win32.whl", hash = "sha256:c30ac9f562106cd9e8071c23949a067b10211917fdcb75b4718cf5775356a940"}, + {file = "multidict-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:f19001e790013ed580abfde2a4465388950728861b52f0da73e8e8a9418533c0"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c1ff762e2ee126e6f1258650ac641e2b8e1f3d927a925aafcfde943b77a36d24"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd6c9c50bf2ad3f0448edaa1a3b55b2e6866ef8feca5d8dbec10ec7c94371d21"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc66d4016f6e50ed36fb39cd287a3878ffcebfa90008535c62e0e90a7ab713ae"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9acb76d5f3dd9421874923da2ed1e76041cb51b9337fd7f507edde1d86535d6"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfc924a7e946dd3c6360e50e8f750d51e3ef5395c95dc054bc9eab0f70df4f9c"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32fdba7333eb2351fee2596b756d730d62b5827d5e1ab2f84e6cbb287cc67fe0"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9aad49466b8d828b96b9e3630006234879c8d3e2b0a9d99219b3121bc5cdb17"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93de39267c4c676c9ebb2057e98a8138bade0d806aad4d864322eee0803140a0"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9bef5cff994ca3026fcc90680e326d1a19df9841c5e3d224076407cc21471a1"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5f841c4f14331fd1e36cbf3336ed7be2cb2a8f110ce40ea253e5573387db7621"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:38ba256ee9b310da6a1a0f013ef4e422fca30a685bcbec86a969bd520504e341"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3bc3b1621b979621cee9f7b09f024ec76ec03cc365e638126a056317470bde1b"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ee908c070020d682e9b42c8f621e8bb10c767d04416e2ebe44e37d0f44d9ad5"}, + {file = "multidict-5.2.0-cp39-cp39-win32.whl", hash = "sha256:1c7976cd1c157fa7ba5456ae5d31ccdf1479680dc9b8d8aa28afabc370df42b8"}, + {file = "multidict-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:c9631c642e08b9fff1c6255487e62971d8b8e821808ddd013d8ac058087591ac"}, + {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, +] +mypy = [ + {file = "mypy-0.920-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41f3575b20714171c832d8f6c7aaaa0d499c9a2d1b8adaaf837b4c9065c38540"}, + {file = "mypy-0.920-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:431be889ffc8d9681813a45575c42e341c19467cbfa6dd09bf41467631feb530"}, + {file = "mypy-0.920-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8b2059f73878e92eff7ed11a03515d6572f4338a882dd7547b5f7dd242118e6"}, + {file = "mypy-0.920-cp310-cp310-win_amd64.whl", hash = "sha256:9cd316e9705555ca6a50670ba5fb0084d756d1d8cb1697c83820b1456b0bc5f3"}, + {file = "mypy-0.920-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e091fe58b4475b3504dc7c3022ff7f4af2f9e9ddf7182047111759ed0973bbde"}, + {file = "mypy-0.920-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98b4f91a75fed2e4c6339e9047aba95968d3a7c4b91e92ab9dc62c0c583564f4"}, + {file = "mypy-0.920-cp36-cp36m-win_amd64.whl", hash = "sha256:562a0e335222d5bbf5162b554c3afe3745b495d67c7fe6f8b0d1b5bace0c1eeb"}, + {file = "mypy-0.920-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:618e677aabd21f30670bffb39a885a967337f5b112c6fb7c79375e6dced605d6"}, + {file = "mypy-0.920-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40cb062f1b7ff4cd6e897a89d8ddc48c6ad7f326b5277c93a8c559564cc1551c"}, + {file = "mypy-0.920-cp37-cp37m-win_amd64.whl", hash = "sha256:69b5a835b12fdbfeed84ef31152d41343d32ccb2b345256d8682324409164330"}, + {file = "mypy-0.920-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:993c2e52ea9570e6e872296c046c946377b9f5e89eeb7afea2a1524cf6e50b27"}, + {file = "mypy-0.920-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df0fec878ccfcb2d1d2306ba31aa757848f681e7bbed443318d9bbd4b0d0fe9a"}, + {file = "mypy-0.920-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:331a81d2c9bf1be25317260a073b41f4584cd11701a7c14facef0aa5a005e843"}, + {file = "mypy-0.920-cp38-cp38-win_amd64.whl", hash = "sha256:ffb1e57ec49a30e3c0ebcfdc910ae4aceb7afb649310b7355509df6b15bd75f6"}, + {file = "mypy-0.920-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:31895b0b3060baf15bf76e789d94722c026f673b34b774bba9e8772295edccff"}, + {file = "mypy-0.920-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:140174e872d20d4768124a089b9f9fc83abd6a349b7f8cc6276bc344eb598922"}, + {file = "mypy-0.920-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:13b3c110309b53f5a62aa1b360f598124be33a42563b790a2a9efaacac99f1fc"}, + {file = "mypy-0.920-cp39-cp39-win_amd64.whl", hash = "sha256:82e6c15675264e923b60a11d6eb8f90665504352e68edfbb4a79aac7a04caddd"}, + {file = "mypy-0.920-py3-none-any.whl", hash = "sha256:71c77bd885d2ce44900731d4652d0d1c174dc66a0f11200e0c680bdedf1a6b37"}, + {file = "mypy-0.920.tar.gz", hash = "sha256:a55438627f5f546192f13255a994d6d1cf2659df48adcf966132b4379fd9c86b"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pid = [ + {file = "pid-3.0.4-py2.py3-none-any.whl", hash = "sha256:af2bf11c5d637bba8a80ce3368279c5eca28f08e201ac828538e1b9ad9e35ef9"}, + {file = "pid-3.0.4.tar.gz", hash = "sha256:0e33670e83f6a33ebb0822e43a609c3247178d4a375ff50a4689e266d853eb66"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.24-py3-none-any.whl", hash = "sha256:e56f2ff799bacecd3e88165b1e2f5ebf9bcd59e80e06d395fa0cc4b8bd7bb506"}, + {file = "prompt_toolkit-3.0.24.tar.gz", hash = "sha256:1bb05628c7d87b645974a1bad3f17612be0c29fa39af9f7688030163f680bad6"}, +] +psutil = [ + {file = "psutil-5.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0066a82f7b1b37d334e68697faba68e5ad5e858279fd6351c8ca6024e8d6ba64"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ae6f386d8d297177fd288be6e8d1afc05966878704dad9847719650e44fc49c"}, + {file = "psutil-5.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:12d844996d6c2b1d3881cfa6fa201fd635971869a9da945cf6756105af73d2df"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:02b8292609b1f7fcb34173b25e48d0da8667bc85f81d7476584d889c6e0f2131"}, + {file = "psutil-5.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6ffe81843131ee0ffa02c317186ed1e759a145267d54fdef1bc4ea5f5931ab60"}, + {file = "psutil-5.8.0-cp27-none-win32.whl", hash = "sha256:ea313bb02e5e25224e518e4352af4bf5e062755160f77e4b1767dd5ccb65f876"}, + {file = "psutil-5.8.0-cp27-none-win_amd64.whl", hash = "sha256:5da29e394bdedd9144c7331192e20c1f79283fb03b06e6abd3a8ae45ffecee65"}, + {file = "psutil-5.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:74fb2557d1430fff18ff0d72613c5ca30c45cdbfcddd6a5773e9fc1fe9364be8"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:74f2d0be88db96ada78756cb3a3e1b107ce8ab79f65aa885f76d7664e56928f6"}, + {file = "psutil-5.8.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99de3e8739258b3c3e8669cb9757c9a861b2a25ad0955f8e53ac662d66de61ac"}, + {file = "psutil-5.8.0-cp36-cp36m-win32.whl", hash = "sha256:36b3b6c9e2a34b7d7fbae330a85bf72c30b1c827a4366a07443fc4b6270449e2"}, + {file = "psutil-5.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:52de075468cd394ac98c66f9ca33b2f54ae1d9bff1ef6b67a212ee8f639ec06d"}, + {file = "psutil-5.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a5fd10ce6b6344e616cf01cc5b849fa8103fbb5ba507b6b2dee4c11e84c935"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:61f05864b42fedc0771d6d8e49c35f07efd209ade09a5afe6a5059e7bb7bf83d"}, + {file = "psutil-5.8.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0dd4465a039d343925cdc29023bb6960ccf4e74a65ad53e768403746a9207023"}, + {file = "psutil-5.8.0-cp37-cp37m-win32.whl", hash = "sha256:1bff0d07e76114ec24ee32e7f7f8d0c4b0514b3fae93e3d2aaafd65d22502394"}, + {file = "psutil-5.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fcc01e900c1d7bee2a37e5d6e4f9194760a93597c97fee89c4ae51701de03563"}, + {file = "psutil-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6223d07a1ae93f86451d0198a0c361032c4c93ebd4bf6d25e2fb3edfad9571ef"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d225cd8319aa1d3c85bf195c4e07d17d3cd68636b8fc97e6cf198f782f99af28"}, + {file = "psutil-5.8.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:28ff7c95293ae74bf1ca1a79e8805fcde005c18a122ca983abf676ea3466362b"}, + {file = "psutil-5.8.0-cp38-cp38-win32.whl", hash = "sha256:ce8b867423291cb65cfc6d9c4955ee9bfc1e21fe03bb50e177f2b957f1c2469d"}, + {file = "psutil-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:90f31c34d25b1b3ed6c40cdd34ff122b1887a825297c017e4cbd6796dd8b672d"}, + {file = "psutil-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6323d5d845c2785efb20aded4726636546b26d3b577aded22492908f7c1bdda7"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:245b5509968ac0bd179287d91210cd3f37add77dad385ef238b275bad35fa1c4"}, + {file = "psutil-5.8.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:90d4091c2d30ddd0a03e0b97e6a33a48628469b99585e2ad6bf21f17423b112b"}, + {file = "psutil-5.8.0-cp39-cp39-win32.whl", hash = "sha256:ea372bcc129394485824ae3e3ddabe67dc0b118d262c568b4d2602a7070afdb0"}, + {file = "psutil-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f4634b033faf0d968bb9220dd1c793b897ab7f1189956e1aa9eae752527127d3"}, + {file = "psutil-5.8.0.tar.gz", hash = "sha256:0c9ccb99ab76025f2f0bbecf341d4656e9c1351db8cc8a03ccd62e318ab4b5c6"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pycountry = [ + {file = "pycountry-20.7.3.tar.gz", hash = "sha256:81084a53d3454344c0292deebc20fcd0a1488c136d4900312cbd465cf552cb42"}, +] +pygments = [ + {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, + {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, +] +pyipasnhistory = [ + {file = "pyipasnhistory-2.1-py3-none-any.whl", hash = "sha256:38f84f52270a7da276c602b3c7bd267c75a4f92fe7f9a78664fbe732b9a3fc56"}, + {file = "pyipasnhistory-2.1.tar.gz", hash = "sha256:df10cbdf9db2cba7a83ce23b93b12f066635b572f535ba65fb625075a68c917a"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, + {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, + {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +redis = [ + {file = "redis-4.0.2-py3-none-any.whl", hash = "sha256:c8481cf414474e3497ec7971a1ba9b998c8efad0f0d289a009a5bbef040894f9"}, + {file = "redis-4.0.2.tar.gz", hash = "sha256:ccf692811f2c1fc7a92b466aa2599e4a6d2d73d5f736a2c70be600657c0da34a"}, +] +requests = [ + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +soupsieve = [ + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, +] +tomli = [ + {file = "tomli-2.0.0-py3-none-any.whl", hash = "sha256:b5bde28da1fed24b9bd1d4d2b8cba62300bfb4ec9a6187a957e8ddb9434c5224"}, + {file = "tomli-2.0.0.tar.gz", hash = "sha256:c292c34f58502a1eb2bbb9f5bbc9a5ebc37bee10ffb8c2d6bbdfa8eb13cc14e1"}, +] +traitlets = [ + {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, + {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, +] +types-python-dateutil = [ + {file = "types-python-dateutil-2.8.3.tar.gz", hash = "sha256:d94e7c7ecd9f0e23b3a78087eae12c0d7aa4af9e067a8ea963ad03ed0abd1cb7"}, + {file = "types_python_dateutil-2.8.3-py3-none-any.whl", hash = "sha256:42262d0b8f8ecb06cdc5c458956685eb3b27c74f170adf541d1cc5ee4ff68bdc"}, +] +types-redis = [ + {file = "types-redis-4.0.3.tar.gz", hash = "sha256:c7a99bdfea2e4fdf57952159614d85f0d08991007f0666dfa3c4220c3baa8fba"}, + {file = "types_redis-4.0.3-py3-none-any.whl", hash = "sha256:0c980f5df48610b0933cfe914c75e7c7eed8cf10025c47391a016b844145233a"}, +] +types-requests = [ + {file = "types-requests-2.26.1.tar.gz", hash = "sha256:0893e112e1510bbb67f537941c92192de7472e51bf7f236e0e583866f0ed933e"}, + {file = "types_requests-2.26.1-py3-none-any.whl", hash = "sha256:853571b3accc188976c0f4feffcaebf6cdfc170082b5e43f3358aa78de61f531"}, +] +types-setuptools = [ + {file = "types-setuptools-57.4.4.tar.gz", hash = "sha256:a3cbcbf3f02142bb5d3b5c5f5918f453b8752362b96d58aba2a5cfa43ba6d209"}, + {file = "types_setuptools-57.4.4-py3-none-any.whl", hash = "sha256:9fe4180548e5cbb44cc0d343a47e4dc1d6769c31e16447994788c28df169011f"}, +] +typing-extensions = [ + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, +] +urllib3 = [ + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +werkzeug = [ + {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"}, + {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"}, +] +wrapt = [ + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, +] +yarl = [ + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, + {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, + {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, + {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, + {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, + {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, + {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, + {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, + {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, + {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, + {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, + {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, + {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, + {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, +] +zipp = [ + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..85feb6b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,64 @@ +[tool.poetry] +name = "bgpranking" +version = "2.0" +description = "BGP Ranking is a software to rank AS numbers based on their malicious activities." +authors = ["Raphaël Vinot "] +license = "AGPLv3" + +[tool.poetry.scripts] +start = "bin.start:main" +stop = "bin.stop:main" +update = "bin.update:main" +shutdown = "bin.shutdown:main" +run_backend = "bin.run_backend:main" +start_website = "bin.start_website:main" + +archiver = "bin.archiver:main" +asn_descriptions = "bin.asn_descriptions:main" +dbinsert = "bin.dbinsert:main" +fetcher = "bin.fetcher:main" +parser = "bin.parser:main" +ranking = "bin.ranking:main" +sanitizer = "bin.sanitizer:main" +ssfetcher = "bin.ssfetcher:main" + + +[tool.poetry.dependencies] +python = "^3.8" +redis = {version = "^4.0.2", extras = ["hiredis"]} +flask-restx = "^0.5.1" +gunicorn = "^20.1.0" +python-dateutil = "^2.8.2" +pyipasnhistory = "^2.0" +pycountry = "^20.7.3" +beautifulsoup4 = "^4.10.0" +aiohttp = "^3.8.1" +Bootstrap-Flask = "^1.8.0" +pid = "^3.0.4" + +[tool.poetry.dev-dependencies] +ipython = "^7.23.0" +mypy = "^0.920" +types-setuptools = "^57.4.4" +types-redis = "^4.0.3" +types-requests = "^2.26.1" +types-python-dateutil = "^2.8.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.mypy] +python_version = 3.8 +check_untyped_defs = true +ignore_errors = false +ignore_missing_imports = false +strict_optional = true +no_implicit_optional = true +warn_unused_ignores = true +warn_redundant_casts = true +warn_unused_configs = true +warn_unreachable = true + +show_error_context = true +pretty = true diff --git a/ranking/kvrocks.conf b/ranking/kvrocks.conf new file mode 100644 index 0000000..6de8c63 --- /dev/null +++ b/ranking/kvrocks.conf @@ -0,0 +1,497 @@ +################################ GENERAL ##################################### + +# By default kvrocks listens for connections from all the network interfaces +# available on the server. It is possible to listen to just one or multiple +# interfaces using the "bind" configuration directive, followed by one or +# more IP addresses. +# +# Examples: +# +# bind 192.168.1.100 10.0.0.1 +# bind 127.0.0.1 +bind 0.0.0.0 + +# Accept connections on the specified port, default is 6666. +port 5189 + +# Close the connection after a client is idle for N seconds (0 to disable) +timeout 0 + +# The number of worker's threads, increase or decrease it would effect the performance. +workers 8 + +# The number of replication worker's threads, increase or decrease it would effect the replication performance. +# Default: 1 +repl-workers 1 + +# By default kvrocks does not run as a daemon. Use 'yes' if you need it. +# Note that kvrocks will write a pid file in /var/run/kvrocks.pid when daemonized. +daemonize no + +# Kvrocks implements cluster solution that is similar with redis cluster sulution. +# You can get cluster information by CLUSTER NODES|SLOTS|INFO command, it also is +# adapted to redis-cli, redis-benchmark, redis cluster SDK and redis cluster proxy. +# But kvrocks doesn't support to communicate with each others, so you must set +# cluster topology by CLUSTER SETNODES|SETNODEID commands, more details: #219. +# +# PLEASE NOTE: +# If you enable cluster, kvrocks will encode key with its slot id calculated by +# CRC16 and modulo 16384, endoding key with its slot id makes it efficient to +# migrate keys based on slot. So if you enabled at first time, cluster mode must +# not be disabled after restarting, and vice versa. That is to say, data is not +# compatible between standalone mode with cluster mode, you must migrate data +# if you want to change mode, otherwise, kvrocks will make data corrupt. +# +# Default: no +cluster-enabled no + +# Set the max number of connected clients at the same time. By default +# this limit is set to 10000 clients, however if the server is not +# able to configure the process file limit to allow for the specified limit +# the max number of allowed clients is set to the current file limit +# +# Once the limit is reached the server will close all the new connections sending +# an error 'max number of clients reached'. +# +maxclients 10000 + +# Require clients to issue AUTH before processing any other +# commands. This might be useful in environments in which you do not trust +# others with access to the host running kvrocks. +# +# This should stay commented out for backward compatibility and because most +# people do not need auth (e.g. they run their own servers). +# +# Warning: since kvrocks is pretty fast an outside user can try up to +# 150k passwords per second against a good box. This means that you should +# use a very strong password otherwise it will be very easy to break. +# +# requirepass foobared + +# If the master is password protected (using the "masterauth" configuration +# directive below) it is possible to tell the slave to authenticate before +# starting the replication synchronization process, otherwise the master will +# refuse the slave request. +# +# masterauth foobared + +# Master-Salve replication would check db name is matched. if not, the slave should +# refuse to sync the db from master. Don't use default value, set the db-name to identify +# the cluster. +db-name storage.db + +# The working directory +# +# The DB will be written inside this directory +# Note that you must specify a directory here, not a file name. +dir ./ + +# The logs of server will be stored in this directory. If you don't specify +# one directory, by default, we store logs in the working directory that set +# by 'dir' above. +# log-dir /tmp/kvrocks + +# When running daemonized, kvrocks writes a pid file in ${CONFIG_DIR}/kvrocks.pid by +# default. You can specify a custom pid file location here. +# pidfile /var/run/kvrocks.pid +pidfile storage.pid + +# You can configure a slave instance to accept writes or not. Writing against +# a slave instance may be useful to store some ephemeral data (because data +# written on a slave will be easily deleted after resync with the master) but +# may also cause problems if clients are writing to it because of a +# misconfiguration. +slave-read-only yes + +# The slave priority is an integer number published by Kvrocks in the INFO output. +# It is used by Redis Sentinel in order to select a slave to promote into a +# master if the master is no longer working correctly. +# +# A slave with a low priority number is considered better for promotion, so +# for instance if there are three slave with priority 10, 100, 25 Sentinel will +# pick the one with priority 10, that is the lowest. +# +# However a special priority of 0 marks the replica as not able to perform the +# role of master, so a slave with priority of 0 will never be selected by +# Redis Sentinel for promotion. +# +# By default the priority is 100. +slave-priority 100 + +# TCP listen() backlog. +# +# In high requests-per-second environments you need an high backlog in order +# to avoid slow clients connections issues. Note that the Linux kernel +# will silently truncate it to the value of /proc/sys/net/core/somaxconn so +# make sure to raise both the value of somaxconn and tcp_max_syn_backlog +# in order to Get the desired effect. +tcp-backlog 511 + +# If the master is an old version, it may have specified replication threads +# that use 'port + 1' as listening port, but in new versions, we don't use +# extra port to implement replication. In order to allow the new replicas to +# copy old masters, you should indicate that the master uses replication port +# or not. +# If yes, that indicates master uses replication port and replicas will connect +# to 'master's listening port + 1' when synchronization. +# If no, that indicates master doesn't use replication port and replicas will +# connect 'master's listening port' when synchronization. +master-use-repl-port no + +# Master-Slave replication. Use slaveof to make a kvrocks instance a copy of +# another kvrocks server. A few things to understand ASAP about kvrocks replication. +# +# 1) Kvrocks replication is asynchronous, but you can configure a master to +# stop accepting writes if it appears to be not connected with at least +# a given number of slaves. +# 2) Kvrocks slaves are able to perform a partial resynchronization with the +# master if the replication link is lost for a relatively small amount of +# time. You may want to configure the replication backlog size (see the next +# sections of this file) with a sensible value depending on your needs. +# 3) Replication is automatic and does not need user intervention. After a +# network partition slaves automatically try to reconnect to masters +# and resynchronize with them. +# +# slaveof +# slaveof 127.0.0.1 6379 + +# When a slave loses its connection with the master, or when the replication +# is still in progress, the slave can act in two different ways: +# +# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will +# still reply to client requests, possibly with out of date data, or the +# data set may just be empty if this is the first synchronization. +# +# 2) if slave-serve-stale-data is set to 'no' the slave will reply with +# an error "SYNC with master in progress" to all the kind of commands +# but to INFO and SLAVEOF. +# +slave-serve-stale-data yes + +# To guarantee slave's data safe and serve when it is in full synchronization +# state, slave still keep itself data. But this way needs to occupy much disk +# space, so we provide a way to reduce disk occupation, slave will delete itself +# entire database before fetching files from master during full synchronization. +# If you want to enable this way, you can set 'slave-delete-db-before-fullsync' +# to yes, but you must know that database will be lost if master is down during +# full synchronization, unless you have a backup of database. +# +# This option is similar redis replicas RDB diskless load option: +# repl-diskless-load on-empty-db +# +# Default: no +slave-empty-db-before-fullsync no + +# If replicas need full synchronization with master, master need to create +# checkpoint for feeding replicas, and replicas also stage a checkpoint of +# the master. If we also keep the backup, it maybe occupy extra disk space. +# You can enable 'purge-backup-on-fullsync' if disk is not sufficient, but +# that may cause remote backup copy failing. +# +# Default: no +purge-backup-on-fullsync no + +# The maximum allowed rate (in MB/s) that should be used by Replication. +# If the rate exceeds max-replication-mb, replication will slow down. +# Default: 0 (i.e. no limit) +max-replication-mb 0 + +# The maximum allowed aggregated write rate of flush and compaction (in MB/s). +# If the rate exceeds max-io-mb, io will slow down. +# 0 is no limit +# Default: 500 +max-io-mb 500 + +# The maximum allowed space (in GB) that should be used by RocksDB. +# If the total size of the SST files exceeds max_allowed_space, writes to RocksDB will fail. +# Please see: https://github.com/facebook/rocksdb/wiki/Managing-Disk-Space-Utilization +# Default: 0 (i.e. no limit) +max-db-size 0 + +# The maximum backup to keep, server cron would run every minutes to check the num of current +# backup, and purge the old backup if exceed the max backup num to keep. If max-backup-to-keep +# is 0, no backup would be keep. But now, we only support 0 or 1. +max-backup-to-keep 1 + +# The maximum hours to keep the backup. If max-backup-keep-hours is 0, wouldn't purge any backup. +# default: 1 day +max-backup-keep-hours 24 + +# Ratio of the samples would be recorded when the profiling was enabled. +# we simply use the rand to determine whether to record the sample or not. +# +# Default: 0 +profiling-sample-ratio 0 + +# There is no limit to this length. Just be aware that it will consume memory. +# You can reclaim memory used by the perf log with PERFLOG RESET. +# +# Default: 256 +profiling-sample-record-max-len 256 + +# profiling-sample-record-threshold-ms use to tell the kvrocks when to record. +# +# Default: 100 millisecond +profiling-sample-record-threshold-ms 100 + +################################## SLOW LOG ################################### + +# The Kvrocks Slow Log is a system to log queries that exceeded a specified +# execution time. The execution time does not include the I/O operations +# like talking with the client, sending the reply and so forth, +# but just the time needed to actually execute the command (this is the only +# stage of command execution where the thread is blocked and can not serve +# other requests in the meantime). +# +# You can configure the slow log with two parameters: one tells Kvrocks +# what is the execution time, in microseconds, to exceed in order for the +# command to get logged, and the other parameter is the length of the +# slow log. When a new command is logged the oldest one is removed from the +# queue of logged commands. + +# The following time is expressed in microseconds, so 1000000 is equivalent +# to one second. Note that -1 value disables the slow log, while +# a value of zero forces the logging of every command. +slowlog-log-slower-than 100000 + +# There is no limit to this length. Just be aware that it will consume memory. +# You can reclaim memory used by the slow log with SLOWLOG RESET. +slowlog-max-len 128 + +# If you run kvrocks from upstart or systemd, kvrocks can interact with your +# supervision tree. Options: +# supervised no - no supervision interaction +# supervised upstart - signal upstart by putting kvrocks into SIGSTOP mode +# supervised systemd - signal systemd by writing READY=1 to $NOTIFY_SOCKET +# supervised auto - detect upstart or systemd method based on +# UPSTART_JOB or NOTIFY_SOCKET environment variables +# Note: these supervision methods only signal "process is ready." +# They do not enable continuous liveness pings back to your supervisor. +supervised no + +################################## CRON ################################### + +# Compact Scheduler, auto compact at schedule time +# time expression format is the same as crontab(currently only support * and int) +# e.g. compact-cron 0 3 * * * 0 4 * * * +# would compact the db at 3am and 4am everyday +# compact-cron 0 3 * * * + +# The hour range that compaction checker would be active +# e.g. compaction-checker-range 0-7 means compaction checker would be worker between +# 0-7am every day. +compaction-checker-range 0-7 + +# Bgsave scheduler, auto bgsave at schedule time +# time expression format is the same as crontab(currently only support * and int) +# e.g. bgsave-cron 0 3 * * * 0 4 * * * +# would bgsave the db at 3am and 4am everyday + +# Command renaming. +# +# It is possible to change the name of dangerous commands in a shared +# environment. For instance the KEYS command may be renamed into something +# hard to guess so that it will still be available for internal-use tools +# but not available for general clients. +# +# Example: +# +# rename-command KEYS b840fc02d524045429941cc15f59e41cb7be6c52 +# +# It is also possible to completely kill a command by renaming it into +# an empty string: +# +# rename-command KEYS "" + +# The key-value size may so be quite different in many scenes, and use 256MiB as SST file size +# may cause data loading(large index/filter block) ineffective when the key-value was too small. +# kvrocks supports user-defined SST file in config(rocksdb.target_file_size_base), +# but it still too trivial and inconvenient to adjust the different sizes for different instances. +# so we want to periodic auto-adjust the SST size in-flight with user avg key-value size. +# +# If enabled, kvrocks will auto resize rocksdb.target_file_size_base +# and rocksdb.write_buffer_size in-flight with user avg key-value size. +# Please see #118. +# +# Default: yes +auto-resize-block-and-sst yes + +################################ ROCKSDB ##################################### + +# Specify the capacity of metadata column family block cache. Larger block cache +# may make request faster while more keys would be cached. Max Size is 200*1024. +# Default: 2048MB +rocksdb.metadata_block_cache_size 2048 + +# Specify the capacity of subkey column family block cache. Larger block cache +# may make request faster while more keys would be cached. Max Size is 200*1024. +# Default: 2048MB +rocksdb.subkey_block_cache_size 2048 + +# Metadata column family and subkey column family will share a single block cache +# if set 'yes'. The capacity of shared block cache is +# metadata_block_cache_size + subkey_block_cache_size +# +# Default: yes +rocksdb.share_metadata_and_subkey_block_cache yes + +# Number of open files that can be used by the DB. You may need to +# increase this if your database has a large working set. Value -1 means +# files opened are always kept open. You can estimate number of files based +# on target_file_size_base and target_file_size_multiplier for level-based +# compaction. For universal-style compaction, you can usually set it to -1. +# Default: 4096 +rocksdb.max_open_files 8096 + +# Amount of data to build up in memory (backed by an unsorted log +# on disk) before converting to a sorted on-disk file. +# +# Larger values increase performance, especially during bulk loads. +# Up to max_write_buffer_number write buffers may be held in memory +# at the same time, +# so you may wish to adjust this parameter to control memory usage. +# Also, a larger write buffer will result in a longer recovery time +# the next time the database is opened. +# +# Note that write_buffer_size is enforced per column family. +# See db_write_buffer_size for sharing memory across column families. + +# default is 64MB +rocksdb.write_buffer_size 16 + +# Target file size for compaction, target file size for Leve N can be caculated +# by target_file_size_base * (target_file_size_multiplier ^ (L-1)) +# +# Default: 128MB +rocksdb.target_file_size_base 16 + +# The maximum number of write buffers that are built up in memory. +# The default and the minimum number is 2, so that when 1 write buffer +# is being flushed to storage, new writes can continue to the other +# write buffer. +# If max_write_buffer_number > 3, writing will be slowed down to +# options.delayed_write_rate if we are writing to the last write buffer +# allowed. +rocksdb.max_write_buffer_number 4 + +# Maximum number of concurrent background compaction jobs, submitted to +# the default LOW priority thread pool. +rocksdb.max_background_compactions 4 + +# Maximum number of concurrent background memtable flush jobs, submitted by +# default to the HIGH priority thread pool. If the HIGH priority thread pool +# is configured to have zero threads, flush jobs will share the LOW priority +# thread pool with compaction jobs. +rocksdb.max_background_flushes 4 + +# This value represents the maximum number of threads that will +# concurrently perform a compaction job by breaking it into multiple, +# smaller ones that are run simultaneously. +# Default: 2 (i.e. no subcompactions) +rocksdb.max_sub_compactions 2 + +# In order to limit the size of WALs, RocksDB uses DBOptions::max_total_wal_size +# as the trigger of column family flush. Once WALs exceed this size, RocksDB +# will start forcing the flush of column families to allow deletion of some +# oldest WALs. This config can be useful when column families are updated at +# non-uniform frequencies. If there's no size limit, users may need to keep +# really old WALs when the infrequently-updated column families hasn't flushed +# for a while. +# +# In kvrocks, we use multiple column families to store metadata, subkeys, etc. +# If users always use string type, but use list, hash and other complex data types +# infrequently, there will be a lot of old WALs if we don't set size limit +# (0 by default in rocksdb), because rocksdb will dynamically choose the WAL size +# limit to be [sum of all write_buffer_size * max_write_buffer_number] * 4 if set to 0. +# +# Moreover, you should increase this value if you already set rocksdb.write_buffer_size +# to a big value, to avoid influencing the effect of rocksdb.write_buffer_size and +# rocksdb.max_write_buffer_number. +# +# default is 512MB +rocksdb.max_total_wal_size 512 + +# We impl the repliction with rocksdb WAL, it would trigger full sync when the seq was out of range. +# wal_ttl_seconds and wal_size_limit_mb would affect how archived logswill be deleted. +# If WAL_ttl_seconds is not 0, then WAL files will be checked every WAL_ttl_seconds / 2 and those that +# are older than WAL_ttl_seconds will be deleted# +# +# Default: 3 Hours +rocksdb.wal_ttl_seconds 10800 + +# If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, +# WAL files will be checked every 10 min and if total size is greater +# then WAL_size_limit_MB, they will be deleted starting with the +# earliest until size_limit is met. All empty files will be deleted +# Default: 16GB +rocksdb.wal_size_limit_mb 16384 + +# Approximate size of user data packed per block. Note that the +# block size specified here corresponds to uncompressed data. The +# actual size of the unit read from disk may be smaller if +# compression is enabled. +# +# Default: 4KB +rocksdb.block_size 2048 + +# Indicating if we'd put index/filter blocks to the block cache +# +# Default: no +rocksdb.cache_index_and_filter_blocks yes + +# Specify the compression to use. +# Accept value: "no", "snappy" +# default snappy +rocksdb.compression snappy + +# If non-zero, we perform bigger reads when doing compaction. If you're +# running RocksDB on spinning disks, you should set this to at least 2MB. +# That way RocksDB's compaction is doing sequential instead of random reads. +# When non-zero, we also force new_table_reader_for_compaction_inputs to +# true. +# +# Default: 2 MB +rocksdb.compaction_readahead_size 2097152 + +# he limited write rate to DB if soft_pending_compaction_bytes_limit or +# level0_slowdown_writes_trigger is triggered. + +# If the value is 0, we will infer a value from `rater_limiter` value +# if it is not empty, or 16MB if `rater_limiter` is empty. Note that +# if users change the rate in `rate_limiter` after DB is opened, +# `delayed_write_rate` won't be adjusted. +# +rocksdb.delayed_write_rate 0 +# If enable_pipelined_write is true, separate write thread queue is +# maintained for WAL write and memtable write. +# +# Default: no +rocksdb.enable_pipelined_write no + +# Soft limit on number of level-0 files. We start slowing down writes at this +# point. A value <0 means that no writing slow down will be triggered by +# number of files in level-0. +# +# Default: 20 +rocksdb.level0_slowdown_writes_trigger 20 + +# Maximum number of level-0 files. We stop writes at this point. +# +# Default: 40 +rocksdb.level0_stop_writes_trigger 40 + +# if not zero, dump rocksdb.stats to LOG every stats_dump_period_sec +# +# Default: 0 +rocksdb.stats_dump_period_sec 0 + +# if yes, the auto compaction would be disabled, but the manual compaction remain works +# +# Default: no +rocksdb.disable_auto_compactions no +################################ NAMESPACE ##################################### +# namespace.test change.me + + +backup-dir .//backup +log-dir ./ diff --git a/ranking/run_kvrocks.sh b/ranking/run_kvrocks.sh new file mode 100755 index 0000000..6df689d --- /dev/null +++ b/ranking/run_kvrocks.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -e +set -x + +../../kvrocks/src/kvrocks -c kvrocks.conf diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 77613e7..0000000 --- a/requirements.txt +++ /dev/null @@ -1,39 +0,0 @@ --i https://pypi.org/simple --e . --e ./client --e git+https://github.com/D4-project/IPASN-History.git/@283539cfbbde4bb54497726634407025f7d685c2#egg=pyipasnhistory&subdirectory=client --e git+https://github.com/MISP/PyMISPGalaxies.git@a59a5c18024aedda0c1306c917e09bdb8596ef48#egg=pymispgalaxies --e git+https://github.com/MISP/PyTaxonomies.git@f28fc11bd682aba35778efbcd8a65e68d1225a3f#egg=pytaxonomies --e git+https://github.com/trbs/pid.git/@240d6e848fcb0ebbf80c88b9d724dcb85978a019#egg=pid -aiohttp==3.5.4 -async-timeout==3.0.1 -attrs==19.1.0 -beautifulsoup4==4.8.0 -certifi==2019.6.16 -chardet==3.0.4 -click==7.0 -dominate==2.4.0 -flask-bootstrap==3.3.7.1 -flask==1.1.1 -gevent==1.4.0 -greenlet==0.4.15 ; platform_python_implementation == 'CPython' -gunicorn[gevent]==19.9.0 -hiredis==1.0.0 -idna-ssl==1.1.0 ; python_version < '3.7' -idna==2.8 -itsdangerous==1.1.0 -jinja2==2.10.1 -markupsafe==1.1.1 -multidict==4.5.2 -pycountry==19.8.18 -python-dateutil==2.8.0 -redis==3.3.8 -requests==2.22.0 -simplejson==3.16.0 -six==1.12.0 -soupsieve==1.9.3 -typing-extensions==3.7.4 ; python_version < '3.7' -urllib3==1.25.3 -visitor==0.1.3 -werkzeug==0.15.5 -yarl==1.3.0 diff --git a/storage/ardb.conf b/storage/ardb.conf deleted file mode 100644 index 99b055e..0000000 --- a/storage/ardb.conf +++ /dev/null @@ -1,468 +0,0 @@ -# Ardb configuration file example, modified from redis's conf file. - -# Home dir for ardb instance, it can be referenced by ${ARDB_HOME} in this config file -home . - -# Note on units: when memory size is needed, it is possible to specify -# it in the usual form of 1k 5GB 4M and so forth: -# -# 1k => 1000 bytes -# 1kb => 1024 bytes -# 1m => 1000000 bytes -# 1mb => 1024*1024 bytes -# 1g => 1000000000 bytes -# 1gb => 1024*1024*1024 bytes -# -# units are case insensitive so 1GB 1Gb 1gB are all the same. - -# By default Ardb does not run as a daemon. Use 'yes' if you need it. -daemonize yes - -# When running daemonized, Ardb writes a pid file in ${ARDB_HOME}/ardb.pid by -# default. You can specify a custom pid file location here. -pidfile ${ARDB_HOME}/ardb.pid - -# The thread pool size for the corresponding all listen servers, -1 means current machine's cpu number -thread-pool-size -1 - -#Accept connections on the specified host&port/unix socket, default is 0.0.0.0:16379. -#server[0].listen 0.0.0.0:16579 -# If current qps exceed the limit, Ardb would return an error. -#server[0].qps-limit 1000 - -#listen on unix socket -server[0].listen storage.sock -server[0].unixsocketperm 755 -#server[1].qps-limit 1000 - -# 'qps-limit-per-host' used to limit the request per second from same host -# 'qps-limit-per-connection' used to limit the request per second from same connection -qps-limit-per-host 0 -qps-limit-per-connection 0 - -# Specify the optimized RocksDB compaction strategies. -# If anything other than none is set then the rocksdb.options will not be used. -# The property can one of: -# OptimizeLevelStyleCompaction -# OptimizeUniversalStyleCompaction -# none -# -rocksdb.compaction OptimizeLevelStyleCompaction - -# Enable this to indicate that hsca/sscan/zscan command use total order mode for rocksdb engine -rocksdb.scan-total-order false - -# Disable RocksDB WAL may improve the write performance but -# data in the un-flushed memtables might be lost in case of a RocksDB shutdown. -# Disabling WAL provides similar guarantees as Redis. -rocksdb.disableWAL false - -#rocksdb's options -rocksdb.options write_buffer_size=1024M;max_write_buffer_number=5;min_write_buffer_number_to_merge=3;compression=kSnappyCompression;\ - bloom_locality=1;memtable_prefix_bloom_size_ratio=0.1;\ - block_based_table_factory={block_cache=512M;filter_policy=bloomfilter:10:true};\ - create_if_missing=true;max_open_files=10000;rate_limiter_bytes_per_sec=50M;\ - use_direct_io_for_flush_and_compaction=true;use_adaptive_mutex=true - -#leveldb's options -leveldb.options block_cache_size=512M,write_buffer_size=128M,max_open_files=5000,block_size=4k,block_restart_interval=16,\ - bloom_bits=10,compression=snappy,logenable=yes,max_file_size=2M - -#lmdb's options -lmdb.options database_maxsize=10G,database_maxdbs=4096,readahead=no,batch_commit_watermark=1024 - -#perconaft's options -perconaft.options cache_size=128M,compression=snappy - -#wiredtiger's options -wiredtiger.options cache_size=512M,session_max=8k,chunk_size=100M,block_size=4k,bloom_bits=10,\ - mmap=false,compressor=snappy - -#forestdb's options -forestdb.options chunksize=8,blocksize=4K - -# Close the connection after a client is idle for N seconds (0 to disable) -timeout 0 - -# TCP keepalive. -# -# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence -# of communication. This is useful for two reasons: -# -# 1) Detect dead peers. -# 2) Take the connection alive from the point of view of network -# equipment in the middle. -# -# On Linux, the specified value (in seconds) is the period used to send ACKs. -# Note that to close the connection the double of the time is needed. -# On other kernels the period depends on the kernel configuration. -# -# A reasonable value for this option is 60 seconds. -tcp-keepalive 0 - -# Specify the server verbosity level. -# This can be one of: -# error -# warn -# info -# debug -# trace -loglevel info - -# Specify the log file name. Also 'stdout' can be used to force -# Redis to log on the standard output. Note that if you use standard -# output for logging but daemonize, logs will be sent to /dev/null -#logfile ${ARDB_HOME}/log/ardb-server.log -logfile stdout - - -# The working data directory. -# -# The DB will be written inside this directory, with the filename specified -# above using the 'dbfilename' configuration directive. -# -# The Append Only File will also be created inside this directory. -# -# Note that you must specify a directory here, not a file name. -data-dir ${ARDB_HOME}/data - -################################# REPLICATION ################################# - -# Master-Slave replication. Use slaveof to make a Ardb instance a copy of -# another Ardb server. Note that the configuration is local to the slave -# so for example it is possible to configure the slave to save the DB with a -# different interval, or to listen to another port, and so on. -# -# slaveof : -#slaveof 127.0.0.1:6379 - - -# By default, ardb use 2 threads to execute commands synced from master. -# -1 means use current CPU number threads instead. -slave-workers 2 - -# Max synced command queue size in memory. -max-slave-worker-queue 1024 - -# The directory for replication. -repl-dir ${ARDB_HOME}/repl - - -# When a slave loses its connection with the master, or when the replication -# is still in progress, the slave can act in two different ways: -# -# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will -# still reply to client requests, possibly with out of date data, or the -# data set may just be empty if this is the first synchronization. -# -# 2) if slave-serve-stale-data is set to 'no' the slave will reply with -# an error "SYNC with master in progress" to all the kind of commands -# but to INFO and SLAVEOF. -# -slave-serve-stale-data yes - -# The slave priority is an integer number published by Ardb/Redis in the INFO output. -# It is used by Redis Sentinel in order to select a slave to promote into a -# master if the master is no longer working correctly. -# -# A slave with a low priority number is considered better for promotion, so -# for instance if there are three slaves with priority 10, 100, 25 Sentinel will -# pick the one with priority 10, that is the lowest. -# -# However a special priority of 0 marks the slave as not able to perform the -# role of master, so a slave with priority of 0 will never be selected by -# Redis Sentinel for promotion. -# -# By default the priority is 100. -slave-priority 100 - -# You can configure a slave instance to accept writes or not. Writing against -# a slave instance may be useful to store some ephemeral data (because data -# written on a slave will be easily deleted after resync with the master) but -# may also cause problems if clients are writing to it because of a -# misconfiguration. -# -# Note: read only slaves are not designed to be exposed to untrusted clients -# on the internet. It's just a protection layer against misuse of the instance. -# Still a read only slave exports by default all the administrative commands -# such as CONFIG, DEBUG, and so forth. To a limited extent you can improve -# security of read only slaves using 'rename-command' to shadow all the -# administrative / dangerous commands. -# -# Note: any requests processed by non read only slaves would no write to replication -# log and sync to connected slaves. -slave-read-only yes - -# The directory for backup. -backup-dir ${ARDB_HOME}/backup -# -# You can configure the backup file format as 'redis' or 'ardb'. The 'ardb' format -# can only used by ardb instance, while 'redis' format file can be used by redis -# and ardb instance. -backup-file-format ardb - - -# Slaves send PINGs to server in a predefined interval. It's possible to change -# this interval with the repl_ping_slave_period option. The default value is 10 -# seconds. -# -# repl-ping-slave-period 10 - -# The following option sets a timeout for both Bulk transfer I/O timeout and -# master data or ping response timeout. The default value is 60 seconds. -# -# It is important to make sure that this value is greater than the value -# specified for repl-ping-slave-period otherwise a timeout will be detected -# every time there is low traffic between the master and the slave. -# -# repl-timeout 60 - -# Disable TCP_NODELAY on the slave socket after SYNC? -# -# If you select "yes" Ardb will use a smaller number of TCP packets and -# less bandwidth to send data to slaves. But this can add a delay for -# the data to appear on the slave side, up to 40 milliseconds with -# Linux kernels using a default configuration. -# -# If you select "no" the delay for data to appear on the slave side will -# be reduced but more bandwidth will be used for replication. -# -# By default we optimize for low latency, but in very high traffic conditions -# or when the master and slaves are many hops away, turning this to "yes" may -# be a good idea. -repl-disable-tcp-nodelay no - -# Set the replication backlog size. The backlog is a buffer that accumulates -# slave data when slaves are disconnected for some time, so that when a slave -# wants to reconnect again, often a full resync is not needed, but a partial -# resync is enough, just passing the portion of data the slave missed while -# disconnected. -# -# The biggest the replication backlog, the longer the time the slave can be -# disconnected and later be able to perform a partial resynchronization. -# -# If the size is configured by 0, then Ardb instance can NOT serve as a master. -# -# repl-backlog-size 500m -repl-backlog-size 1G -repl-backlog-cache-size 100M -snapshot-max-lag-offset 500M - -# Set the max number of snapshots. By default this limit is set to 10 snapshot. -# Once the limit is reached Ardb would try to remove the oldest snapshots -maxsnapshots 10 - -# It is possible for a master to stop accepting writes if there are less than -# N slaves connected, having a lag less or equal than M seconds. -# -# The N slaves need to be in "online" state. -# -# The lag in seconds, that must be <= the specified value, is calculated from -# the last ping received from the slave, that is usually sent every second. -# -# This option does not GUARANTEE that N replicas will accept the write, but -# will limit the window of exposure for lost writes in case not enough slaves -# are available, to the specified number of seconds. -# -# For example to require at least 3 slaves with a lag <= 10 seconds use: -# -# min-slaves-to-write 3 -# min-slaves-max-lag 10 - -# When a slave loses its connection with the master, or when the replication -# is still in progress, the slave can act in two different ways: -# -# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will -# still reply to client requests, possibly with out of date data, or the -# data set may just be empty if this is the first synchronization. -# -# 2) if slave-serve-stale-data is set to 'no' the slave will reply with -# an error "SYNC with master in progress" to all the kind of commands -# but to INFO and SLAVEOF. -# -slave-serve-stale-data yes - -# After a master has no longer connected slaves for some time, the backlog -# will be freed. The following option configures the amount of seconds that -# need to elapse, starting from the time the last slave disconnected, for -# the backlog buffer to be freed. -# -# A value of 0 means to never release the backlog. -# -# repl-backlog-ttl 3600 - -# Slave clear current data store before full resync to master. -# It make sure that slave keep consistent with master's data. But slave may cost a -# long time to delete data, it depends on -# If set by no, then slave may have different data with master. -slave-cleardb-before-fullresync yes - -# Master/Slave instance would persist sync state every 'repl-backlog-sync-period' secs. -repl-backlog-sync-period 5 - -# Slave would ignore any 'expire' setting from replication command if set by 'yes'. -# It could be used if master is redis instance serve hot data with expire setting, slave is -# ardb instance which persist all data. -# Since master redis instance would generate a 'del' for each expired key, slave should ignore -# all 'del' command too by setting 'slave-ignore-del' to 'yes' for this scenario. -slave-ignore-expire no -slave-ignore-del no - -# After a master has no longer connected slaves for some time, the backlog -# will be freed. The following option configures the amount of seconds that -# need to elapse, starting from the time the last slave disconnected, for -# the backlog buffer to be freed. -# -# A value of 0 means to never release the backlog. -# -# repl-backlog-ttl 3600 - - -################################## SECURITY ################################### - -# Require clients to issue AUTH before processing any other -# commands. This might be useful in environments in which you do not trust -# others with access to the host running redis-server. -# -# This should stay commented out for backward compatibility and because most -# people do not need auth (e.g. they run their own servers). -# -# Warning: since Redis is pretty fast an outside user can try up to -# 150k passwords per second against a good box. This means that you should -# use a very strong password otherwise it will be very easy to break. -# -# requirepass foobared - -# Command renaming. -# -# It is possible to change the name of dangerous commands in a shared -# environment. For instance the CONFIG command may be renamed into something -# hard to guess so that it will still be available for internal-use tools -# but not available for general clients. -# -# Example: -# -# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52 -# -# It is also possible to completely kill a command by renaming it into -# an empty string: -# -# rename-command CONFIG "" -# -# Please note that changing the name of commands that are logged into the -# AOF file or transmitted to slaves may cause problems. - -################################ CLUSTER ############################### - -# Max execution time of a Lua script in milliseconds. -#zookeeper-servers 127.0.0.1:2181,127.0.0.1:2182,127.0.0.1:2183 -#zk-recv-timeout 10000 -#zk-clientid-file ${ARDB_HOME}/ardb.zkclientid -cluster-name ardb-cluster - - -################################### LIMITS #################################### - -# Set the max number of connected clients at the same time. By default -# this limit is set to 10000 clients, however if the Redis server is not -# able to configure the process file limit to allow for the specified limit -# the max number of allowed clients is set to the current file limit -# minus 32 (as Redis reserves a few file descriptors for internal uses). -# -# Once the limit is reached Redis will close all the new connections sending -# an error 'max number of clients reached'. -# -# maxclients 10000 - - -# The client output buffer limits can be used to force disconnection of clients -# that are not reading data from the server fast enough for some reason (a -# common reason is that a Pub/Sub/Slave client can't consume messages as fast as the -# publisher can produce them). -slave-client-output-buffer-limit 256mb -pubsub-client-output-buffer-limit 32mb - -################################## SLOW LOG ################################### - -# The Redis Slow Log is a system to log queries that exceeded a specified -# execution time. The execution time does not include the I/O operations -# like talking with the client, sending the reply and so forth, -# but just the time needed to actually execute the command (this is the only -# stage of command execution where the thread is blocked and can not serve -# other requests in the meantime). -# -# You can configure the slow log with two parameters: one tells Redis -# what is the execution time, in microseconds, to exceed in order for the -# command to get logged, and the other parameter is the length of the -# slow log. When a new command is logged the oldest one is removed from the -# queue of logged commands. - -# The following time is expressed in microseconds, so 1000000 is equivalent -# to one second. Note that a negative number disables the slow log, while -# a value of zero forces the logging of every command. -slowlog-log-slower-than 10000 - -# There is no limit to this length. Just be aware that it will consume memory. -# You can reclaim memory used by the slow log with SLOWLOG RESET. -slowlog-max-len 128 - -################################ LUA SCRIPTING ############################### - -# Max execution time of a Lua script in milliseconds. -# -# If the maximum execution time is reached Redis will log that a script is -# still in execution after the maximum allowed time and will start to -# reply to queries with an error. -# -# When a long running script exceed the maximum execution time only the -# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be -# used to stop a script that did not yet called write commands. The second -# is the only way to shut down the server in the case a write commands was -# already issue by the script but the user don't want to wait for the natural -# termination of the script. -# -# Set it to 0 or a negative value for unlimited execution without warnings. -lua-time-limit 5000 - -############################### ADVANCED CONFIG ############################### -## Since some redis clients would check info command's output, this configuration -## would be set in 'misc' section of 'info's output -#additional-misc-info redis_version:2.8.9\nredis_trick:yes - - -# HyperLogLog sparse representation bytes limit. The limit includes the -# 16 bytes header. When an HyperLogLog using the sparse representation crosses -# this limit, it is convereted into the dense representation. -# -# A value greater than 16000 is totally useless, since at that point the -# dense representation is more memory efficient. -# -# The suggested value is ~ 3000 in order to have the benefits of -# the space efficient encoding without slowing down too much PFADD, -# which is O(N) with the sparse encoding. Thev value can be raised to -# ~ 10000 when CPU is not a concern, but space is, and the data set is -# composed of many HyperLogLogs with cardinality in the 0 - 15000 range. -hll-sparse-max-bytes 3000 - -#trusted-ip 10.10.10.10 -#trusted-ip 10.10.10.* - -# By default Ardb would not compact whole db after loading a snapshot, which may happens -# when slave syncing from master, processing 'import' command from client. -# This configuration only works with rocksdb engine. -# If ardb dord not compact data after loading snapshot file, there would be poor read performance before rocksdb -# completes the next compaction task internally. While the compaction task would cost very long time for a huge data set. -compact-after-snapshot-load false - -# Ardb would store cursor in memory -scan-redis-compatible yes -scan-cursor-expire-after 60 - -redis-compatible-mode yes -redis-compatible-version 2.8.0 - -statistics-log-period 600 - - -# Range deletion min size trigger -range-delete-min-size 100 diff --git a/storage/kvrocks.conf b/storage/kvrocks.conf new file mode 100644 index 0000000..91c3957 --- /dev/null +++ b/storage/kvrocks.conf @@ -0,0 +1,497 @@ +################################ GENERAL ##################################### + +# By default kvrocks listens for connections from all the network interfaces +# available on the server. It is possible to listen to just one or multiple +# interfaces using the "bind" configuration directive, followed by one or +# more IP addresses. +# +# Examples: +# +# bind 192.168.1.100 10.0.0.1 +# bind 127.0.0.1 +bind 0.0.0.0 + +# Accept connections on the specified port, default is 6666. +port 5188 + +# Close the connection after a client is idle for N seconds (0 to disable) +timeout 0 + +# The number of worker's threads, increase or decrease it would effect the performance. +workers 8 + +# The number of replication worker's threads, increase or decrease it would effect the replication performance. +# Default: 1 +repl-workers 1 + +# By default kvrocks does not run as a daemon. Use 'yes' if you need it. +# Note that kvrocks will write a pid file in /var/run/kvrocks.pid when daemonized. +daemonize no + +# Kvrocks implements cluster solution that is similar with redis cluster sulution. +# You can get cluster information by CLUSTER NODES|SLOTS|INFO command, it also is +# adapted to redis-cli, redis-benchmark, redis cluster SDK and redis cluster proxy. +# But kvrocks doesn't support to communicate with each others, so you must set +# cluster topology by CLUSTER SETNODES|SETNODEID commands, more details: #219. +# +# PLEASE NOTE: +# If you enable cluster, kvrocks will encode key with its slot id calculated by +# CRC16 and modulo 16384, endoding key with its slot id makes it efficient to +# migrate keys based on slot. So if you enabled at first time, cluster mode must +# not be disabled after restarting, and vice versa. That is to say, data is not +# compatible between standalone mode with cluster mode, you must migrate data +# if you want to change mode, otherwise, kvrocks will make data corrupt. +# +# Default: no +cluster-enabled no + +# Set the max number of connected clients at the same time. By default +# this limit is set to 10000 clients, however if the server is not +# able to configure the process file limit to allow for the specified limit +# the max number of allowed clients is set to the current file limit +# +# Once the limit is reached the server will close all the new connections sending +# an error 'max number of clients reached'. +# +maxclients 10000 + +# Require clients to issue AUTH before processing any other +# commands. This might be useful in environments in which you do not trust +# others with access to the host running kvrocks. +# +# This should stay commented out for backward compatibility and because most +# people do not need auth (e.g. they run their own servers). +# +# Warning: since kvrocks is pretty fast an outside user can try up to +# 150k passwords per second against a good box. This means that you should +# use a very strong password otherwise it will be very easy to break. +# +# requirepass foobared + +# If the master is password protected (using the "masterauth" configuration +# directive below) it is possible to tell the slave to authenticate before +# starting the replication synchronization process, otherwise the master will +# refuse the slave request. +# +# masterauth foobared + +# Master-Salve replication would check db name is matched. if not, the slave should +# refuse to sync the db from master. Don't use default value, set the db-name to identify +# the cluster. +db-name storage.db + +# The working directory +# +# The DB will be written inside this directory +# Note that you must specify a directory here, not a file name. +dir ./ + +# The logs of server will be stored in this directory. If you don't specify +# one directory, by default, we store logs in the working directory that set +# by 'dir' above. +# log-dir /tmp/kvrocks + +# When running daemonized, kvrocks writes a pid file in ${CONFIG_DIR}/kvrocks.pid by +# default. You can specify a custom pid file location here. +# pidfile /var/run/kvrocks.pid +pidfile storage.pid + +# You can configure a slave instance to accept writes or not. Writing against +# a slave instance may be useful to store some ephemeral data (because data +# written on a slave will be easily deleted after resync with the master) but +# may also cause problems if clients are writing to it because of a +# misconfiguration. +slave-read-only yes + +# The slave priority is an integer number published by Kvrocks in the INFO output. +# It is used by Redis Sentinel in order to select a slave to promote into a +# master if the master is no longer working correctly. +# +# A slave with a low priority number is considered better for promotion, so +# for instance if there are three slave with priority 10, 100, 25 Sentinel will +# pick the one with priority 10, that is the lowest. +# +# However a special priority of 0 marks the replica as not able to perform the +# role of master, so a slave with priority of 0 will never be selected by +# Redis Sentinel for promotion. +# +# By default the priority is 100. +slave-priority 100 + +# TCP listen() backlog. +# +# In high requests-per-second environments you need an high backlog in order +# to avoid slow clients connections issues. Note that the Linux kernel +# will silently truncate it to the value of /proc/sys/net/core/somaxconn so +# make sure to raise both the value of somaxconn and tcp_max_syn_backlog +# in order to Get the desired effect. +tcp-backlog 511 + +# If the master is an old version, it may have specified replication threads +# that use 'port + 1' as listening port, but in new versions, we don't use +# extra port to implement replication. In order to allow the new replicas to +# copy old masters, you should indicate that the master uses replication port +# or not. +# If yes, that indicates master uses replication port and replicas will connect +# to 'master's listening port + 1' when synchronization. +# If no, that indicates master doesn't use replication port and replicas will +# connect 'master's listening port' when synchronization. +master-use-repl-port no + +# Master-Slave replication. Use slaveof to make a kvrocks instance a copy of +# another kvrocks server. A few things to understand ASAP about kvrocks replication. +# +# 1) Kvrocks replication is asynchronous, but you can configure a master to +# stop accepting writes if it appears to be not connected with at least +# a given number of slaves. +# 2) Kvrocks slaves are able to perform a partial resynchronization with the +# master if the replication link is lost for a relatively small amount of +# time. You may want to configure the replication backlog size (see the next +# sections of this file) with a sensible value depending on your needs. +# 3) Replication is automatic and does not need user intervention. After a +# network partition slaves automatically try to reconnect to masters +# and resynchronize with them. +# +# slaveof +# slaveof 127.0.0.1 6379 + +# When a slave loses its connection with the master, or when the replication +# is still in progress, the slave can act in two different ways: +# +# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will +# still reply to client requests, possibly with out of date data, or the +# data set may just be empty if this is the first synchronization. +# +# 2) if slave-serve-stale-data is set to 'no' the slave will reply with +# an error "SYNC with master in progress" to all the kind of commands +# but to INFO and SLAVEOF. +# +slave-serve-stale-data yes + +# To guarantee slave's data safe and serve when it is in full synchronization +# state, slave still keep itself data. But this way needs to occupy much disk +# space, so we provide a way to reduce disk occupation, slave will delete itself +# entire database before fetching files from master during full synchronization. +# If you want to enable this way, you can set 'slave-delete-db-before-fullsync' +# to yes, but you must know that database will be lost if master is down during +# full synchronization, unless you have a backup of database. +# +# This option is similar redis replicas RDB diskless load option: +# repl-diskless-load on-empty-db +# +# Default: no +slave-empty-db-before-fullsync no + +# If replicas need full synchronization with master, master need to create +# checkpoint for feeding replicas, and replicas also stage a checkpoint of +# the master. If we also keep the backup, it maybe occupy extra disk space. +# You can enable 'purge-backup-on-fullsync' if disk is not sufficient, but +# that may cause remote backup copy failing. +# +# Default: no +purge-backup-on-fullsync no + +# The maximum allowed rate (in MB/s) that should be used by Replication. +# If the rate exceeds max-replication-mb, replication will slow down. +# Default: 0 (i.e. no limit) +max-replication-mb 0 + +# The maximum allowed aggregated write rate of flush and compaction (in MB/s). +# If the rate exceeds max-io-mb, io will slow down. +# 0 is no limit +# Default: 500 +max-io-mb 500 + +# The maximum allowed space (in GB) that should be used by RocksDB. +# If the total size of the SST files exceeds max_allowed_space, writes to RocksDB will fail. +# Please see: https://github.com/facebook/rocksdb/wiki/Managing-Disk-Space-Utilization +# Default: 0 (i.e. no limit) +max-db-size 0 + +# The maximum backup to keep, server cron would run every minutes to check the num of current +# backup, and purge the old backup if exceed the max backup num to keep. If max-backup-to-keep +# is 0, no backup would be keep. But now, we only support 0 or 1. +max-backup-to-keep 1 + +# The maximum hours to keep the backup. If max-backup-keep-hours is 0, wouldn't purge any backup. +# default: 1 day +max-backup-keep-hours 24 + +# Ratio of the samples would be recorded when the profiling was enabled. +# we simply use the rand to determine whether to record the sample or not. +# +# Default: 0 +profiling-sample-ratio 0 + +# There is no limit to this length. Just be aware that it will consume memory. +# You can reclaim memory used by the perf log with PERFLOG RESET. +# +# Default: 256 +profiling-sample-record-max-len 256 + +# profiling-sample-record-threshold-ms use to tell the kvrocks when to record. +# +# Default: 100 millisecond +profiling-sample-record-threshold-ms 100 + +################################## SLOW LOG ################################### + +# The Kvrocks Slow Log is a system to log queries that exceeded a specified +# execution time. The execution time does not include the I/O operations +# like talking with the client, sending the reply and so forth, +# but just the time needed to actually execute the command (this is the only +# stage of command execution where the thread is blocked and can not serve +# other requests in the meantime). +# +# You can configure the slow log with two parameters: one tells Kvrocks +# what is the execution time, in microseconds, to exceed in order for the +# command to get logged, and the other parameter is the length of the +# slow log. When a new command is logged the oldest one is removed from the +# queue of logged commands. + +# The following time is expressed in microseconds, so 1000000 is equivalent +# to one second. Note that -1 value disables the slow log, while +# a value of zero forces the logging of every command. +slowlog-log-slower-than 100000 + +# There is no limit to this length. Just be aware that it will consume memory. +# You can reclaim memory used by the slow log with SLOWLOG RESET. +slowlog-max-len 128 + +# If you run kvrocks from upstart or systemd, kvrocks can interact with your +# supervision tree. Options: +# supervised no - no supervision interaction +# supervised upstart - signal upstart by putting kvrocks into SIGSTOP mode +# supervised systemd - signal systemd by writing READY=1 to $NOTIFY_SOCKET +# supervised auto - detect upstart or systemd method based on +# UPSTART_JOB or NOTIFY_SOCKET environment variables +# Note: these supervision methods only signal "process is ready." +# They do not enable continuous liveness pings back to your supervisor. +supervised no + +################################## CRON ################################### + +# Compact Scheduler, auto compact at schedule time +# time expression format is the same as crontab(currently only support * and int) +# e.g. compact-cron 0 3 * * * 0 4 * * * +# would compact the db at 3am and 4am everyday +# compact-cron 0 3 * * * + +# The hour range that compaction checker would be active +# e.g. compaction-checker-range 0-7 means compaction checker would be worker between +# 0-7am every day. +compaction-checker-range 0-7 + +# Bgsave scheduler, auto bgsave at schedule time +# time expression format is the same as crontab(currently only support * and int) +# e.g. bgsave-cron 0 3 * * * 0 4 * * * +# would bgsave the db at 3am and 4am everyday + +# Command renaming. +# +# It is possible to change the name of dangerous commands in a shared +# environment. For instance the KEYS command may be renamed into something +# hard to guess so that it will still be available for internal-use tools +# but not available for general clients. +# +# Example: +# +# rename-command KEYS b840fc02d524045429941cc15f59e41cb7be6c52 +# +# It is also possible to completely kill a command by renaming it into +# an empty string: +# +# rename-command KEYS "" + +# The key-value size may so be quite different in many scenes, and use 256MiB as SST file size +# may cause data loading(large index/filter block) ineffective when the key-value was too small. +# kvrocks supports user-defined SST file in config(rocksdb.target_file_size_base), +# but it still too trivial and inconvenient to adjust the different sizes for different instances. +# so we want to periodic auto-adjust the SST size in-flight with user avg key-value size. +# +# If enabled, kvrocks will auto resize rocksdb.target_file_size_base +# and rocksdb.write_buffer_size in-flight with user avg key-value size. +# Please see #118. +# +# Default: yes +auto-resize-block-and-sst yes + +################################ ROCKSDB ##################################### + +# Specify the capacity of metadata column family block cache. Larger block cache +# may make request faster while more keys would be cached. Max Size is 200*1024. +# Default: 2048MB +rocksdb.metadata_block_cache_size 2048 + +# Specify the capacity of subkey column family block cache. Larger block cache +# may make request faster while more keys would be cached. Max Size is 200*1024. +# Default: 2048MB +rocksdb.subkey_block_cache_size 2048 + +# Metadata column family and subkey column family will share a single block cache +# if set 'yes'. The capacity of shared block cache is +# metadata_block_cache_size + subkey_block_cache_size +# +# Default: yes +rocksdb.share_metadata_and_subkey_block_cache yes + +# Number of open files that can be used by the DB. You may need to +# increase this if your database has a large working set. Value -1 means +# files opened are always kept open. You can estimate number of files based +# on target_file_size_base and target_file_size_multiplier for level-based +# compaction. For universal-style compaction, you can usually set it to -1. +# Default: 4096 +rocksdb.max_open_files 8096 + +# Amount of data to build up in memory (backed by an unsorted log +# on disk) before converting to a sorted on-disk file. +# +# Larger values increase performance, especially during bulk loads. +# Up to max_write_buffer_number write buffers may be held in memory +# at the same time, +# so you may wish to adjust this parameter to control memory usage. +# Also, a larger write buffer will result in a longer recovery time +# the next time the database is opened. +# +# Note that write_buffer_size is enforced per column family. +# See db_write_buffer_size for sharing memory across column families. + +# default is 64MB +rocksdb.write_buffer_size 16 + +# Target file size for compaction, target file size for Leve N can be caculated +# by target_file_size_base * (target_file_size_multiplier ^ (L-1)) +# +# Default: 128MB +rocksdb.target_file_size_base 16 + +# The maximum number of write buffers that are built up in memory. +# The default and the minimum number is 2, so that when 1 write buffer +# is being flushed to storage, new writes can continue to the other +# write buffer. +# If max_write_buffer_number > 3, writing will be slowed down to +# options.delayed_write_rate if we are writing to the last write buffer +# allowed. +rocksdb.max_write_buffer_number 4 + +# Maximum number of concurrent background compaction jobs, submitted to +# the default LOW priority thread pool. +rocksdb.max_background_compactions 4 + +# Maximum number of concurrent background memtable flush jobs, submitted by +# default to the HIGH priority thread pool. If the HIGH priority thread pool +# is configured to have zero threads, flush jobs will share the LOW priority +# thread pool with compaction jobs. +rocksdb.max_background_flushes 4 + +# This value represents the maximum number of threads that will +# concurrently perform a compaction job by breaking it into multiple, +# smaller ones that are run simultaneously. +# Default: 2 (i.e. no subcompactions) +rocksdb.max_sub_compactions 2 + +# In order to limit the size of WALs, RocksDB uses DBOptions::max_total_wal_size +# as the trigger of column family flush. Once WALs exceed this size, RocksDB +# will start forcing the flush of column families to allow deletion of some +# oldest WALs. This config can be useful when column families are updated at +# non-uniform frequencies. If there's no size limit, users may need to keep +# really old WALs when the infrequently-updated column families hasn't flushed +# for a while. +# +# In kvrocks, we use multiple column families to store metadata, subkeys, etc. +# If users always use string type, but use list, hash and other complex data types +# infrequently, there will be a lot of old WALs if we don't set size limit +# (0 by default in rocksdb), because rocksdb will dynamically choose the WAL size +# limit to be [sum of all write_buffer_size * max_write_buffer_number] * 4 if set to 0. +# +# Moreover, you should increase this value if you already set rocksdb.write_buffer_size +# to a big value, to avoid influencing the effect of rocksdb.write_buffer_size and +# rocksdb.max_write_buffer_number. +# +# default is 512MB +rocksdb.max_total_wal_size 512 + +# We impl the repliction with rocksdb WAL, it would trigger full sync when the seq was out of range. +# wal_ttl_seconds and wal_size_limit_mb would affect how archived logswill be deleted. +# If WAL_ttl_seconds is not 0, then WAL files will be checked every WAL_ttl_seconds / 2 and those that +# are older than WAL_ttl_seconds will be deleted# +# +# Default: 3 Hours +rocksdb.wal_ttl_seconds 10800 + +# If WAL_ttl_seconds is 0 and WAL_size_limit_MB is not 0, +# WAL files will be checked every 10 min and if total size is greater +# then WAL_size_limit_MB, they will be deleted starting with the +# earliest until size_limit is met. All empty files will be deleted +# Default: 16GB +rocksdb.wal_size_limit_mb 16384 + +# Approximate size of user data packed per block. Note that the +# block size specified here corresponds to uncompressed data. The +# actual size of the unit read from disk may be smaller if +# compression is enabled. +# +# Default: 4KB +rocksdb.block_size 2048 + +# Indicating if we'd put index/filter blocks to the block cache +# +# Default: no +rocksdb.cache_index_and_filter_blocks yes + +# Specify the compression to use. +# Accept value: "no", "snappy" +# default snappy +rocksdb.compression snappy + +# If non-zero, we perform bigger reads when doing compaction. If you're +# running RocksDB on spinning disks, you should set this to at least 2MB. +# That way RocksDB's compaction is doing sequential instead of random reads. +# When non-zero, we also force new_table_reader_for_compaction_inputs to +# true. +# +# Default: 2 MB +rocksdb.compaction_readahead_size 2097152 + +# he limited write rate to DB if soft_pending_compaction_bytes_limit or +# level0_slowdown_writes_trigger is triggered. + +# If the value is 0, we will infer a value from `rater_limiter` value +# if it is not empty, or 16MB if `rater_limiter` is empty. Note that +# if users change the rate in `rate_limiter` after DB is opened, +# `delayed_write_rate` won't be adjusted. +# +rocksdb.delayed_write_rate 0 +# If enable_pipelined_write is true, separate write thread queue is +# maintained for WAL write and memtable write. +# +# Default: no +rocksdb.enable_pipelined_write no + +# Soft limit on number of level-0 files. We start slowing down writes at this +# point. A value <0 means that no writing slow down will be triggered by +# number of files in level-0. +# +# Default: 20 +rocksdb.level0_slowdown_writes_trigger 20 + +# Maximum number of level-0 files. We stop writes at this point. +# +# Default: 40 +rocksdb.level0_stop_writes_trigger 40 + +# if not zero, dump rocksdb.stats to LOG every stats_dump_period_sec +# +# Default: 0 +rocksdb.stats_dump_period_sec 0 + +# if yes, the auto compaction would be disabled, but the manual compaction remain works +# +# Default: no +rocksdb.disable_auto_compactions no +################################ NAMESPACE ##################################### +# namespace.test change.me + + +backup-dir .//backup +log-dir ./ diff --git a/storage/run_ardb.sh b/storage/run_ardb.sh deleted file mode 100755 index dc211d9..0000000 --- a/storage/run_ardb.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -set -e -set -x - -../../ardb/src/ardb-server ardb.conf diff --git a/storage/run_kvrocks.sh b/storage/run_kvrocks.sh new file mode 100755 index 0000000..6df689d --- /dev/null +++ b/storage/run_kvrocks.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -e +set -x + +../../kvrocks/src/kvrocks -c kvrocks.conf diff --git a/storage/shutdown_ardb.sh b/storage/shutdown_ardb.sh deleted file mode 100755 index d888a20..0000000 --- a/storage/shutdown_ardb.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -set -e -set -x - -../../redis/src/redis-cli -s ./storage.sock shutdown save diff --git a/tools/3rdparty.py b/tools/3rdparty.py new file mode 100755 index 0000000..852b35f --- /dev/null +++ b/tools/3rdparty.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import requests + +from bgpranking.default import get_homedir + +d3js_version = '7' +bootstrap_select_version = "1.13.18" + + +if __name__ == '__main__': + dest_dir = get_homedir() / 'website' / 'web' / 'static' + + d3 = requests.get(f'https://d3js.org/d3.v{d3js_version}.min.js') + with (dest_dir / f'd3.v{d3js_version}.min.js').open('wb') as f: + f.write(d3.content) + print(f'Downloaded d3js v{d3js_version}.') + + bootstrap_select_js = requests.get(f'https://cdn.jsdelivr.net/npm/bootstrap-select@{bootstrap_select_version}/dist/js/bootstrap-select.min.js') + with (dest_dir / 'bootstrap-select.min.js').open('wb') as f: + f.write(bootstrap_select_js.content) + print(f'Downloaded bootstrap_select js v{bootstrap_select_version}.') + + bootstrap_select_css = requests.get(f'https://cdn.jsdelivr.net/npm/bootstrap-select@{bootstrap_select_version}/dist/css/bootstrap-select.min.css') + with (dest_dir / 'bootstrap-select.min.css').open('wb') as f: + f.write(bootstrap_select_css.content) + print(f'Downloaded bootstrap_select css v{bootstrap_select_version}.') + + print('All 3rd party modules for the website were downloaded.') diff --git a/tools/migrate.py b/tools/migrate.py new file mode 100644 index 0000000..3d1a34f --- /dev/null +++ b/tools/migrate.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from datetime import datetime +from typing import Set + +from redis import Redis + +redis_src = Redis(unix_socket_path='../storage/storage.sock') +redis_dst = Redis('127.0.0.1', 5188) + +chunk_size = 100000 + + +def process_chunk(src: Redis, dst: Redis, keys: Set[str]): + src_pipeline = src.pipeline() + [src_pipeline.type(key) for key in keys] + to_process = {key: key_type for key, key_type in zip(keys, src_pipeline.execute())} + + src_pipeline = src.pipeline() + for key, key_type in to_process.items(): + if key_type == b"string": + src_pipeline.get(key) + elif key_type == b"list": + raise Exception('Lists should not be used.') + elif key_type == b"set": + src_pipeline.smembers(key) + elif key_type == b"zset": + src_pipeline.zrangebyscore(key, '-Inf', '+Inf', withscores=True) + elif key_type == b"hash": + src_pipeline.hgetall(key) + else: + raise Exception(f'{key_type} not supported {key}.') + + dest_pipeline = dst.pipeline() + for key, content in zip(to_process.keys(), src_pipeline.execute()): + if to_process[key] == b"string": + dest_pipeline.set(key, content) + elif to_process[key] == b"set": + dest_pipeline.sadd(key, *content) + elif to_process[key] == b"zset": + dest_pipeline.zadd(key, {value: rank for value, rank in content}) + elif to_process[key] == b"hash": + dest_pipeline.hmset(key, content) + + dest_pipeline.execute() + + +def migrate(src: Redis, dst: Redis): + keys = set() + pos = 0 + for key in src.scan_iter(count=chunk_size, match='2017*'): + keys.add(key) + + if len(keys) == chunk_size: + process_chunk(src, dst, keys) + pos += len(keys) + print(f'{datetime.now()} - {pos} keys done.') + keys = set() + + # migrate remaining keys + process_chunk(src, dst, keys) + pos += len(keys) + print(f'{datetime.now()} - {pos} keys done.') + + +if __name__ == '__main__': + migrate(redis_src, redis_dst) diff --git a/bgpranking/monitor.py b/tools/monitoring.py old mode 100644 new mode 100755 similarity index 52% rename from bgpranking/monitor.py rename to tools/monitoring.py index a34ed74..3de448e --- a/bgpranking/monitor.py +++ b/tools/monitoring.py @@ -1,20 +1,18 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -try: - import simplejson as json -except ImportError: - import json +import json -from redis import StrictRedis -from .libs.helpers import get_socket_path, get_ipasn +from redis import Redis +from bgpranking.default import get_socket_path +from bgpranking.helpers import get_ipasn class Monitor(): def __init__(self): - self.intake = StrictRedis(unix_socket_path=get_socket_path('intake'), db=0, decode_responses=True) - self.sanitize = StrictRedis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) - self.cache = StrictRedis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) + self.intake = Redis(unix_socket_path=get_socket_path('intake'), db=0, decode_responses=True) + self.sanitize = Redis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True) + self.cache = Redis(unix_socket_path=get_socket_path('cache'), db=1, decode_responses=True) self.ipasn = get_ipasn() def get_values(self): @@ -26,5 +24,11 @@ class Monitor(): if len(ipasn_meta['cached_dates']['caida']['v6']['cached']) > 15: ipasn_meta['cached_dates']['caida']['v6']['cached'] = 'Too many entries' return json.dumps({'Non-parsed IPs': ips_in_intake, 'Parsed IPs': ready_to_insert, - 'running': self.cache.hgetall('running'), 'IPASN History': ipasn_meta}, + 'running': self.cache.zrangebyscore('running', '-inf', '+inf', withscores=True), + 'IPASN History': ipasn_meta}, indent=2) + + +if __name__ == '__main__': + m = Monitor() + print(m.get_values()) diff --git a/tools/validate_config_files.py b/tools/validate_config_files.py new file mode 100755 index 0000000..13af602 --- /dev/null +++ b/tools/validate_config_files.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import json +import logging +import argparse + +from bgpranking.default import get_homedir + + +def validate_generic_config_file(): + user_config = get_homedir() / 'config' / 'generic.json' + with user_config.open() as f: + generic_config = json.load(f) + with (get_homedir() / 'config' / 'generic.json.sample').open() as f: + generic_config_sample = json.load(f) + # Check documentation + for key in generic_config_sample.keys(): + if key == '_notes': + continue + if key not in generic_config_sample['_notes']: + raise Exception(f'###### - Documentation missing for {key}') + + # Check all entries in the sample files are in the user file, and they have the same type + for key in generic_config_sample.keys(): + if key == '_notes': + continue + if generic_config.get(key) is None: + logger.warning(f'Entry missing in user config file: {key}. Will default to: {generic_config_sample[key]}') + continue + if not isinstance(generic_config[key], type(generic_config_sample[key])): + raise Exception(f'Invalid type for {key}. Got: {type(generic_config[key])} ({generic_config[key]}), expected: {type(generic_config_sample[key])} ({generic_config_sample[key]})') + + if isinstance(generic_config[key], dict): + # Check entries + for sub_key in generic_config_sample[key].keys(): + if sub_key not in generic_config[key]: + raise Exception(f'{sub_key} is missing in generic_config[key]. Default from sample file: {generic_config_sample[key][sub_key]}') + if not isinstance(generic_config[key][sub_key], type(generic_config_sample[key][sub_key])): + raise Exception(f'Invalid type for {sub_key} in {key}. Got: {type(generic_config[key][sub_key])} ({generic_config[key][sub_key]}), expected: {type(generic_config_sample[key][sub_key])} ({generic_config_sample[key][sub_key]})') + + # Make sure the user config file doesn't have entries missing in the sample config + for key in generic_config.keys(): + if key not in generic_config_sample: + raise Exception(f'{key} is missing in the sample config file. You need to compare {user_config} with {user_config}.sample.') + + return True + + +def update_user_configs(): + for file_name in ['generic']: + with (get_homedir() / 'config' / f'{file_name}.json').open() as f: + try: + generic_config = json.load(f) + except Exception: + generic_config = {} + with (get_homedir() / 'config' / f'{file_name}.json.sample').open() as f: + generic_config_sample = json.load(f) + + has_new_entry = False + for key in generic_config_sample.keys(): + if key == '_notes': + continue + if generic_config.get(key) is None: + print(f'{key} was missing in {file_name}, adding it.') + print(f"Description: {generic_config_sample['_notes'][key]}") + generic_config[key] = generic_config_sample[key] + has_new_entry = True + elif isinstance(generic_config[key], dict): + for sub_key in generic_config_sample[key].keys(): + if sub_key not in generic_config[key]: + print(f'{sub_key} was missing in {key} from {file_name}, adding it.') + generic_config[key][sub_key] = generic_config_sample[key][sub_key] + has_new_entry = True + if has_new_entry: + with (get_homedir() / 'config' / f'{file_name}.json').open('w') as fw: + json.dump(generic_config, fw, indent=2, sort_keys=True) + return has_new_entry + + +if __name__ == '__main__': + logger = logging.getLogger('Config validator') + parser = argparse.ArgumentParser(description='Check the config files.') + parser.add_argument('--check', default=False, action='store_true', help='Check if the sample config and the user config are in-line') + parser.add_argument('--update', default=False, action='store_true', help='Update the user config with the entries from the sample config if entries are missing') + args = parser.parse_args() + + if args.check: + if validate_generic_config_file(): + print(f"The entries in {get_homedir() / 'config' / 'generic.json'} are valid.") + + if args.update: + if not update_user_configs(): + print(f"No updates needed in {get_homedir() / 'config' / 'generic.json'}.") diff --git a/website/3drparty.sh b/website/3drparty.sh deleted file mode 100755 index faf0c5d..0000000 --- a/website/3drparty.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -set -e -set -x - -mkdir -p web/static/ - -wget https://d3js.org/d3.v5.js -O web/static/d3.v5.js - -BOOTSTRAP_SELECT="1.13.5" - -wget https://cdnjs.cloudflare.com/ajax/libs/bootstrap-select/${BOOTSTRAP_SELECT}/css/bootstrap-select.min.css -O web/static/bootstrap-select.min.css -wget https://cdnjs.cloudflare.com/ajax/libs/bootstrap-select/${BOOTSTRAP_SELECT}/js/bootstrap-select.min.js -O web/static/bootstrap-select.min.js diff --git a/website/web/__init__.py b/website/web/__init__.py index a57c4b3..22b6283 100644 --- a/website/web/__init__.py +++ b/website/web/__init__.py @@ -1,40 +1,40 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import json +import pkg_resources + +from collections import defaultdict +from datetime import date, timedelta from urllib.parse import urljoin -try: - import simplejson as json -except ImportError: - import json - -import os +from typing import Dict, Any, Tuple, List, Optional, Union +import pycountry # type: ignore import requests from flask import Flask, render_template, request, session, Response, redirect, url_for -from flask_bootstrap import Bootstrap +from flask_bootstrap import Bootstrap # type: ignore +from flask_restx import Api # type: ignore -from bgpranking.querying import Querying -from bgpranking.libs.exceptions import MissingConfigEntry -from bgpranking.libs.helpers import load_general_config, get_homedir, get_ipasn -from datetime import date, timedelta -import pycountry -from collections import defaultdict +from bgpranking.bgpranking import BGPRanking +from bgpranking.default import get_config +from bgpranking.helpers import get_ipasn + +from .genericapi import api as generic_api +from .helpers import get_secret_key +from .proxied import ReverseProxied app = Flask(__name__) -secret_file_path = get_homedir() / 'website' / 'secret_key' +app.wsgi_app = ReverseProxied(app.wsgi_app) # type: ignore -if not secret_file_path.exists() or secret_file_path.stat().st_size < 64: - with open(secret_file_path, 'wb') as f: - f.write(os.urandom(64)) - -with open(secret_file_path, 'rb') as f: - app.config['SECRET_KEY'] = f.read() +app.config['SECRET_KEY'] = get_secret_key() Bootstrap(app) app.config['BOOTSTRAP_SERVE_LOCAL'] = True +bgpranking = BGPRanking() + # ############# Helpers ############# @@ -42,7 +42,7 @@ def load_session(): if request.method == 'POST': d = request.form elif request.method == 'GET': - d = request.args + d = request.args # type: ignore for key in d: if '_all' in d.getlist(key): @@ -79,27 +79,25 @@ def index(): # Just returns ack if the webserver is running return 'Ack' load_session() - q = Querying() - sources = q.get_sources(date=session['date'])['response'] + sources = bgpranking.get_sources(date=session['date'])['response'] session.pop('asn', None) session.pop('country', None) - ranks = q.asns_global_ranking(limit=100, **session)['response'] - r = [(asn, rank, q.get_asn_descriptions(int(asn))['response']) for asn, rank in ranks] + ranks = bgpranking.asns_global_ranking(limit=100, **session)['response'] + r = [(asn, rank, bgpranking.get_asn_descriptions(int(asn))['response']) for asn, rank in ranks] return render_template('index.html', ranks=r, sources=sources, countries=get_country_codes(), **session) @app.route('/asn', methods=['GET', 'POST']) def asn_details(): load_session() - q = Querying() if 'asn' not in session: return redirect(url_for('/')) - asn_descriptions = q.get_asn_descriptions(asn=session['asn'], all_descriptions=True)['response'] - sources = q.get_sources(date=session['date'])['response'] + asn_descriptions = bgpranking.get_asn_descriptions(asn=session['asn'], all_descriptions=True)['response'] + sources = bgpranking.get_sources(date=session['date'])['response'] prefix = session.pop('prefix', None) - ranks = q.asn_details(**session)['response'] + ranks = bgpranking.asn_details(**session)['response'] if prefix: - prefix_ips = q.get_prefix_ips(prefix=prefix, **session)['response'] + prefix_ips = bgpranking.get_prefix_ips(prefix=prefix, **session)['response'] prefix_ips = [(ip, sorted(sources)) for ip, sources in prefix_ips.items()] prefix_ips.sort(key=lambda entry: len(entry[1]), reverse=True) else: @@ -111,20 +109,20 @@ def asn_details(): @app.route('/country', methods=['GET', 'POST']) def country(): load_session() - q = Querying() - sources = q.get_sources(date=session['date'])['response'] + sources = bgpranking.get_sources(date=session['date'])['response'] return render_template('country.html', sources=sources, countries=get_country_codes(), **session) @app.route('/country_history_callback', methods=['GET', 'POST']) def country_history_callback(): - history_data = request.get_json(force=True) + history_data: Dict[str, Tuple[str, str, List[Any]]] + history_data = request.get_json(force=True) # type: ignore to_display = [] - mapping = defaultdict(dict) + mapping: Dict[str, Any] = defaultdict(dict) dates = [] all_asns = set([]) - for country, data in history_data.items(): - for d, r_sum, details in data: + for country, foo in history_data.items(): + for d, r_sum, details in foo: dates.append(d) for detail in details: asn, r = detail @@ -146,7 +144,7 @@ def country_history_callback(): @app.route('/ipasn', methods=['GET', 'POST']) def ipasn(): - d = None + d: Optional[Dict] = None if request.method == 'POST': d = request.form elif request.method == 'GET': @@ -160,12 +158,11 @@ def ipasn(): else: ip = d['ip'] ipasn = get_ipasn() - q = Querying() response = ipasn.query(first=(date.today() - timedelta(days=60)).isoformat(), aggregate=True, ip=ip) for r in response['response']: r['asn_descriptions'] = [] - asn_descriptions = q.get_asn_descriptions(asn=r['asn'], all_descriptions=True)['response'] + asn_descriptions = bgpranking.get_asn_descriptions(asn=r['asn'], all_descriptions=True)['response'] for timestamp in sorted(asn_descriptions.keys()): if r['first_seen'] <= timestamp <= r['last_seen']: r['asn_descriptions'].append(asn_descriptions[timestamp]) @@ -185,16 +182,11 @@ def ipasn(): @app.route('/ipasn_history/', defaults={'path': ''}, methods=['GET', 'POST']) @app.route('/ipasn_history/', methods=['GET', 'POST']) def ipasn_history_proxy(path): - config, general_config_file = load_general_config() - if 'ipasnhistory_url' not in config: - raise MissingConfigEntry(f'"ipasnhistory_url" is missing in {general_config_file}.') path_for_ipasnhistory = request.full_path.replace('/ipasn_history', '') if '/?' in path_for_ipasnhistory: path_for_ipasnhistory = path_for_ipasnhistory.replace('/?', '/ip?') - print(path_for_ipasnhistory) - proxied_url = urljoin(config['ipasnhistory_url'], path_for_ipasnhistory) - print(proxied_url) + proxied_url = urljoin(get_config('generic', 'ipasnhistory_url'), path_for_ipasnhistory) if request.method in ['GET', 'HEAD']: to_return = requests.get(proxied_url).json() elif request.method == 'POST': @@ -206,17 +198,17 @@ def ipasn_history_proxy(path): def json_asn(): # TODO # * Filter on date => if only returning one descr, return the desription at that date - query = request.get_json(force=True) - to_return = {'meta': query, 'response': {}} + query: Dict[str, Any] = request.get_json(force=True) # type: ignore + to_return: Dict[str, Union[str, Dict[str, Any]]] = {'meta': query, 'response': {}} if 'asn' not in query: to_return['error'] = f'You need to pass an asn - {query}' return Response(json.dumps(to_return), mimetype='application/json') - q = Querying() asn_description_query = {'asn': query['asn']} if 'all_descriptions' in query: asn_description_query['all_descriptions'] = query['all_descriptions'] - to_return['response']['asn_description'] = q.get_asn_descriptions(**asn_description_query)['response'] + responses = bgpranking.get_asn_descriptions(**asn_description_query)['response'] + to_return['response']['asn_description'] = responses # type: ignore asn_rank_query = {'asn': query['asn']} if 'date' in query: @@ -228,60 +220,65 @@ def json_asn(): if 'ipversion' in query: asn_rank_query['ipversion'] = query['ipversion'] - to_return['response']['ranking'] = q.asn_rank(**asn_rank_query)['response'] + to_return['response']['ranking'] = bgpranking.asn_rank(**asn_rank_query)['response'] # type: ignore return Response(json.dumps(to_return), mimetype='application/json') @app.route('/json/asn_descriptions', methods=['POST']) def asn_description(): - query = request.get_json(force=True) - to_return = {'meta': query, 'response': {}} + query: Dict = request.get_json(force=True) # type: ignore + to_return: Dict[str, Union[str, Dict[str, Any]]] = {'meta': query, 'response': {}} if 'asn' not in query: to_return['error'] = f'You need to pass an asn - {query}' return Response(json.dumps(to_return), mimetype='application/json') - q = Querying() - to_return['response']['asn_descriptions'] = q.get_asn_descriptions(**query)['response'] + to_return['response']['asn_descriptions'] = bgpranking.get_asn_descriptions(**query)['response'] # type: ignore return Response(json.dumps(to_return), mimetype='application/json') @app.route('/json/asn_history', methods=['GET', 'POST']) def asn_history(): - q = Querying() if request.method == 'GET': load_session() if 'asn' in session: - return Response(json.dumps(q.get_asn_history(**session)), mimetype='application/json') + return Response(json.dumps(bgpranking.get_asn_history(**session)), mimetype='application/json') - query = request.get_json(force=True) - to_return = {'meta': query, 'response': {}} + query: Dict = request.get_json(force=True) # type: ignore + to_return: Dict[str, Union[str, Dict[str, Any]]] = {'meta': query, 'response': {}} if 'asn' not in query: to_return['error'] = f'You need to pass an asn - {query}' return Response(json.dumps(to_return), mimetype='application/json') - to_return['response']['asn_history'] = q.get_asn_history(**query)['response'] + to_return['response']['asn_history'] = bgpranking.get_asn_history(**query)['response'] # type: ignore return Response(json.dumps(to_return), mimetype='application/json') @app.route('/json/country_history', methods=['GET', 'POST']) def country_history(): - q = Querying() if request.method == 'GET': load_session() - return Response(json.dumps(q.country_history(**session)), mimetype='application/json') + return Response(json.dumps(bgpranking.country_history(**session)), mimetype='application/json') - query = request.get_json(force=True) - to_return = {'meta': query, 'response': {}} - to_return['response']['country_history'] = q.country_history(**query)['response'] + query: Dict = request.get_json(force=True) # type: ignore + to_return: Dict[str, Union[str, Dict[str, Any]]] = {'meta': query, 'response': {}} + to_return['response']['country_history'] = bgpranking.country_history(**query)['response'] # type: ignore return Response(json.dumps(to_return), mimetype='application/json') @app.route('/json/asns_global_ranking', methods=['POST']) def json_asns_global_ranking(): - query = request.get_json(force=True) - to_return = {'meta': query, 'response': {}} - q = Querying() - to_return['response'] = q.asns_global_ranking(**query)['response'] + query: Dict = request.get_json(force=True) # type: ignore + to_return: Dict[str, Union[str, Dict[str, Any]]] = {'meta': query, 'response': {}} + to_return['response'] = bgpranking.asns_global_ranking(**query)['response'] return Response(json.dumps(to_return), mimetype='application/json') # ############# Json outputs ############# + + +# Query API + +api = Api(app, title='BGP Ranking API', + description='API to query BGP Ranking.', + version=pkg_resources.get_distribution('bgpranking').version) + +api.add_namespace(generic_api) diff --git a/website/web/genericapi.py b/website/web/genericapi.py new file mode 100644 index 0000000..5145375 --- /dev/null +++ b/website/web/genericapi.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import pkg_resources + +from flask import Flask +from flask_restx import Api, Resource # type: ignore + +from bgpranking.bgpranking import BGPRanking + +from .helpers import get_secret_key +from .proxied import ReverseProxied + +app: Flask = Flask(__name__) + +app.wsgi_app = ReverseProxied(app.wsgi_app) # type: ignore + +app.config['SECRET_KEY'] = get_secret_key() + +api = Api(app, title='BGP Ranking API', + description='API to query BGP Ranking.', + version=pkg_resources.get_distribution('bgpranking').version) + +bgpranking: BGPRanking = BGPRanking() + + +@api.route('/redis_up') +@api.doc(description='Check if redis is up and running') +class RedisUp(Resource): + + def get(self): + return bgpranking.check_redis_up() diff --git a/website/web/helpers.py b/website/web/helpers.py new file mode 100644 index 0000000..f5fcb3c --- /dev/null +++ b/website/web/helpers.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +from functools import lru_cache +from pathlib import Path + +from bgpranking.default import get_homedir + + +def src_request_ip(request) -> str: + # NOTE: X-Real-IP is the IP passed by the reverse proxy in the headers. + real_ip = request.headers.get('X-Real-IP') + if not real_ip: + real_ip = request.remote_addr + return real_ip + + +@lru_cache(64) +def get_secret_key() -> bytes: + secret_file_path: Path = get_homedir() / 'secret_key' + if not secret_file_path.exists() or secret_file_path.stat().st_size < 64: + if not secret_file_path.exists() or secret_file_path.stat().st_size < 64: + with secret_file_path.open('wb') as f: + f.write(os.urandom(64)) + with secret_file_path.open('rb') as f: + return f.read() diff --git a/website/web/proxied.py b/website/web/proxied.py new file mode 100644 index 0000000..1175294 --- /dev/null +++ b/website/web/proxied.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from typing import Any, MutableMapping + + +class ReverseProxied(): + def __init__(self, app: Any) -> None: + self.app = app + + def __call__(self, environ: MutableMapping[str, Any], start_response: Any) -> Any: + scheme = environ.get('HTTP_X_FORWARDED_PROTO') + if not scheme: + scheme = environ.get('HTTP_X_SCHEME') + + if scheme: + environ['wsgi.url_scheme'] = scheme + return self.app(environ, start_response) diff --git a/website/web/static/linegraph_country.js b/website/web/static/linegraph_country.js index 6acc867..cb4f701 100644 --- a/website/web/static/linegraph_country.js +++ b/website/web/static/linegraph_country.js @@ -1,3 +1,5 @@ +"use strict"; + function linegraph(call_path) { var svg = d3.select("svg"), margin = {top: 20, right: 80, bottom: 30, left: 50}, @@ -16,12 +18,11 @@ function linegraph(call_path) { .x(function(d) { return x(d.date); }) .y(function(d) { return y(d.rank); }); - d3.json(call_path, {credentials: 'same-origin'}).then(function(data) { - - var country_ranks = d3.entries(data.response).map(function(country_rank) { + d3.json(call_path, {credentials: 'same-origin'}).then(data => { + var country_ranks = $.map(data.response, function(value, key) { return { - country: country_rank.key, - values: d3.values(country_rank.value).map(function(d) { + country: key, + values: $.map(value, function(d) { return {date: parseTime(d[0]), rank: d[1]}; }) }; @@ -72,8 +73,9 @@ function linegraph(call_path) { {credentials: 'same-origin', method: 'POST', body: JSON.stringify(data.response), - }).then(function(data) { + }) + .then(function(data) { d3.select('#asn_details').html(data); - }); + }); }); }; diff --git a/website/web/templates/main.html b/website/web/templates/main.html index 45a1aa8..ef8420c 100644 --- a/website/web/templates/main.html +++ b/website/web/templates/main.html @@ -1,21 +1,28 @@ -{% extends "bootstrap/base.html" %} - -{% block scripts %} - {{ super() }} - - - -{% endblock %} - -{% block head %} - - {{ super() }} -{% endblock %} - -{% block content %} - {{ super() }} -{% endblock %} + + + + {% block head %} + + + {% block styles %} + {{ bootstrap.load_css() }} + + {% endblock %} + {% endblock %} + + +
+ {% block content %}{% endblock%} +
+ {% block scripts %} + {{ bootstrap.load_js() }} + + + + {% endblock %} + +