BGP-Ranking/bgpranking/sanitizer.py

82 lines
3.3 KiB
Python
Raw Normal View History

2018-03-29 22:37:28 +02:00
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import timezone
2018-03-29 22:37:28 +02:00
from dateutil import parser
import logging
from redis import StrictRedis
import ipaddress
from .libs.helpers import shutdown_requested, set_running, unset_running, get_socket_path, get_ipasn, sanity_check_ipasn
2018-03-29 22:37:28 +02:00
class Sanitizer():
def __init__(self, loglevel: int=logging.DEBUG):
self.__init_logger(loglevel)
self.redis_intake = StrictRedis(unix_socket_path=get_socket_path('intake'), db=0, decode_responses=True)
self.redis_sanitized = StrictRedis(unix_socket_path=get_socket_path('prepare'), db=0, decode_responses=True)
self.ipasn = get_ipasn()
2018-03-29 22:37:28 +02:00
self.logger.debug('Starting import')
def __init_logger(self, loglevel):
self.logger = logging.getLogger(f'{self.__class__.__name__}')
2018-03-29 22:37:28 +02:00
self.logger.setLevel(loglevel)
def sanitize(self):
ready, message = sanity_check_ipasn(self.ipasn)
if not ready:
# Try again later.
self.logger.warning(message)
return
self.logger.debug(message)
2018-03-29 22:37:28 +02:00
set_running(self.__class__.__name__)
while True:
if shutdown_requested() or not self.ipasn.is_up:
2018-03-29 22:37:28 +02:00
break
uuids = self.redis_intake.spop('intake', 100)
2018-03-29 22:37:28 +02:00
if not uuids:
break
for_cache = []
2018-03-29 22:37:28 +02:00
pipeline = self.redis_sanitized.pipeline(transaction=False)
for uuid in uuids:
data = self.redis_intake.hgetall(uuid)
try:
ip = ipaddress.ip_address(data['ip'])
if isinstance(ip, ipaddress.IPv6Address):
address_family = 'v6'
else:
address_family = 'v4'
2018-03-29 22:37:28 +02:00
except ValueError:
self.logger.info(f"Invalid IP address: {data['ip']}")
2018-03-29 22:37:28 +02:00
continue
if not ip.is_global:
self.logger.info(f"The IP address {data['ip']} is not global")
2018-03-29 22:37:28 +02:00
continue
datetime = parser.parse(data['datetime'])
if datetime.tzinfo:
# Make sure the datetime isn't TZ aware, and UTC.
datetime = datetime.astimezone(timezone.utc).replace(tzinfo=None)
for_cache.append({'ip': str(ip), 'address_family': address_family, 'source': 'caida',
'date': datetime.isoformat(), 'precision_delta': {'days': 3}})
2018-03-29 22:37:28 +02:00
# Add to temporay DB for further processing
pipeline.hmset(uuid, {'ip': str(ip), 'source': data['source'], 'address_family': address_family,
'date': datetime.date().isoformat(), 'datetime': datetime.isoformat()})
2018-03-29 22:37:28 +02:00
pipeline.sadd('to_insert', uuid)
pipeline.execute()
2018-04-10 11:27:52 +02:00
self.redis_intake.delete(*uuids)
try:
# Just cache everything so the lookup scripts can do their thing.
self.ipasn.mass_cache(for_cache)
except Exception:
self.logger.exception('Mass cache in IPASN History failed, trying again later.')
# Rollback the spop
self.redis_intake.sadd('intake', *uuids)
break
2018-03-29 22:37:28 +02:00
unset_running(self.__class__.__name__)