new: Force usage of redis-py 3+

pull/12/head
Raphaël Vinot 2018-11-26 14:59:53 +01:00
parent aae34e324d
commit 367f2f8421
3 changed files with 17 additions and 11 deletions

View File

@ -11,7 +11,7 @@ from uuid import uuid4
from io import BytesIO
import importlib
from typing import List, Union, Tuple
from typing import List
import types
from .libs.helpers import safe_create_dir, set_running, unset_running, get_socket_path
@ -74,9 +74,9 @@ class RawFilesParser():
else:
datetime = self.datetime
uuid = uuid4()
p.hmset(uuid, {'ip': ip, 'source': self.source,
'datetime': datetime.isoformat()})
p.sadd('intake', uuid)
p.hmset(str(uuid), {'ip': ip, 'source': self.source,
'datetime': datetime.isoformat()})
p.sadd('intake', str(uuid))
p.execute()
self._archive(filepath)
except Exception as e:

View File

@ -51,13 +51,13 @@ class Ranking():
for ip_ts in self.storage.smembers(f'{day}|{source}|{asn}|{prefix}')])
py_prefix = ip_network(prefix)
prefix_rank = float(len(ips)) / py_prefix.num_addresses
r_pipeline.zadd(f'{day}|{source}|{asn}|v{py_prefix.version}|prefixes', prefix_rank, prefix)
r_pipeline.zadd(f'{day}|{source}|{asn}|v{py_prefix.version}|prefixes', {prefix: prefix_rank})
if py_prefix.version == 4:
asn_rank_v4 += len(ips) * self.config_files[source]['impact']
r_pipeline.zincrby(prefixes_aggregation_key_v4, prefix, prefix_rank * self.config_files[source]['impact'])
r_pipeline.zincrby(prefixes_aggregation_key_v4, prefix_rank * self.config_files[source]['impact'], prefix)
else:
asn_rank_v6 += len(ips) * self.config_files[source]['impact']
r_pipeline.zincrby(prefixes_aggregation_key_v6, prefix, prefix_rank * self.config_files[source]['impact'])
r_pipeline.zincrby(prefixes_aggregation_key_v6, prefix_rank * self.config_files[source]['impact'], prefix)
v4info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v4', date=day)
v6info = self.ipasn.asn_meta(asn=asn, source='caida', address_family='v6', date=day)
ipasnhistory_date_v4 = list(v4info['response'].keys())[0]
@ -68,14 +68,14 @@ class Ranking():
asn_rank_v4 /= float(v4count)
if asn_rank_v4:
r_pipeline.set(f'{day}|{source}|{asn}|v4', asn_rank_v4)
r_pipeline.zincrby(asns_aggregation_key_v4, asn, asn_rank_v4)
r_pipeline.zadd(source_aggregation_key_v4, asn_rank_v4, asn)
r_pipeline.zincrby(asns_aggregation_key_v4, asn_rank_v4, asn)
r_pipeline.zadd(source_aggregation_key_v4, {asn: asn_rank_v4})
if v6count:
asn_rank_v6 /= float(v6count)
if asn_rank_v6:
r_pipeline.set(f'{day}|{source}|{asn}|v6', asn_rank_v6)
r_pipeline.zincrby(asns_aggregation_key_v6, asn, asn_rank_v6)
r_pipeline.zadd(source_aggregation_key_v6, asn_rank_v4, asn)
r_pipeline.zincrby(asns_aggregation_key_v6, asn_rank_v6, asn)
r_pipeline.zadd(source_aggregation_key_v6, {asn: asn_rank_v6})
self.ranking.delete(*to_delete)
r_pipeline.execute()

View File

@ -4,6 +4,12 @@
from subprocess import Popen
from bgpranking.libs.helpers import get_homedir
import redis
import sys
if redis.VERSION < (3, ):
print('redis-py >= 3 is required.')
sys.exit()
if __name__ == '__main__':
# Just fail if the env isn't set.