fix: do not keep broken IPs forever in redis

pull/19/head
Raphaël Vinot 2022-12-12 11:41:13 +01:00
parent f49bbb87b6
commit 79102a23eb
4 changed files with 23 additions and 3 deletions

View File

@ -74,6 +74,7 @@ class DBInsertManager(AbstractManager):
data = sanitized_data[i] data = sanitized_data[i]
if not data: if not data:
self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.') self.logger.warning(f'No data for UUID {uuid}. This should not happen, but lets move on.')
done.append(uuid)
continue continue
routing_info = responses['responses'][i]['response'] # our queries are on one single date, not a range routing_info = responses['responses'][i]['response'] # our queries are on one single date, not a range
# Data gathered from IPASN History: # Data gathered from IPASN History:
@ -81,6 +82,7 @@ class DBInsertManager(AbstractManager):
# * AS number # * AS number
if 'error' in routing_info: if 'error' in routing_info:
self.logger.warning(f"Unable to find routing information for {data['ip']} - {data['datetime']}: {routing_info['error']}") self.logger.warning(f"Unable to find routing information for {data['ip']} - {data['datetime']}: {routing_info['error']}")
done.append(uuid)
continue continue
# Single date query, getting from the object # Single date query, getting from the object
datetime_routing = list(routing_info.keys())[0] datetime_routing = list(routing_info.keys())[0]
@ -91,9 +93,11 @@ class DBInsertManager(AbstractManager):
continue continue
if 'asn' in entry and entry['asn'] in [None, '0']: if 'asn' in entry and entry['asn'] in [None, '0']:
self.logger.warning(f"Unable to find the AS number associated to {data['ip']} - {data['datetime']} (got {entry['asn']}).") self.logger.warning(f"Unable to find the AS number associated to {data['ip']} - {data['datetime']} (got {entry['asn']}).")
done.append(uuid)
continue continue
if 'prefix' in entry and entry['prefix'] in [None, '0.0.0.0/0', '::/0']: if 'prefix' in entry and entry['prefix'] in [None, '0.0.0.0/0', '::/0']:
self.logger.warning(f"Unable to find the prefix associated to {data['ip']} - {data['datetime']} (got {entry['prefix']}).") self.logger.warning(f"Unable to find the prefix associated to {data['ip']} - {data['datetime']} (got {entry['prefix']}).")
done.append(uuid)
continue continue
# Format: <YYYY-MM-DD>|sources -> set([<source>, ...]) # Format: <YYYY-MM-DD>|sources -> set([<source>, ...])

4
poetry.lock generated
View File

@ -818,8 +818,8 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.8" python-versions = "^3.8.1"
content-hash = "a7b14e4e425b55589575ff0b0b2e8ec99f3d32ef32c8c3ac90dcf7ec2afb9fee" content-hash = "00557ce6a5aa68b4251296d5d45716a49467cecf5cde1a0cab3c00e2f1d721ff"
[metadata.files] [metadata.files]
aiohttp = [ aiohttp = [

View File

@ -24,7 +24,7 @@ ssfetcher = "bin.ssfetcher:main"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8.1"
redis = {version = "^4.4.0", extras = ["hiredis"]} redis = {version = "^4.4.0", extras = ["hiredis"]}
flask-restx = "^0.5.1" flask-restx = "^0.5.1"
gunicorn = "^20.1.0" gunicorn = "^20.1.0"

16
tools/clear_prepare_db.py Normal file
View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3
import uuid
from redis import Redis
from bgpranking.default import get_socket_path
redis_sanitized = Redis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True)
while name := redis_sanitized.scan_iter(_type='HASH', count=10):
try:
uuid.uuid(name)
except Exception:
pass
if not redis_sanitized.sismember('to_insert'):
print(name)