new: Add Python client, rework API accordingly.
parent
794e050c16
commit
36003a6a02
|
@ -42,7 +42,7 @@ class Querying():
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise InvalidDateFormat('Unable to parse the date. Should be YYYY-MM-DD.')
|
raise InvalidDateFormat('Unable to parse the date. Should be YYYY-MM-DD.')
|
||||||
|
|
||||||
def ranking_cache_wrapper(self, key):
|
def _ranking_cache_wrapper(self, key):
|
||||||
if not self.cache.exists(key):
|
if not self.cache.exists(key):
|
||||||
if self.ranking.exists(key):
|
if self.ranking.exists(key):
|
||||||
key_dump = self.ranking.dump(key)
|
key_dump = self.ranking.dump(key)
|
||||||
|
@ -52,13 +52,17 @@ class Querying():
|
||||||
def asns_global_ranking(self, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
def asns_global_ranking(self, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
||||||
ipversion: str='v4', limit: int=100):
|
ipversion: str='v4', limit: int=100):
|
||||||
'''Aggregated ranking of all the ASNs known in the system, weighted by source.'''
|
'''Aggregated ranking of all the ASNs known in the system, weighted by source.'''
|
||||||
|
to_return = {'meta': {'ipversion': ipversion, 'limit': limit}, 'source': source,
|
||||||
|
'response': set()}
|
||||||
|
|
||||||
d = self.__normalize_date(date)
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
if source:
|
if source:
|
||||||
if isinstance(source, list):
|
if isinstance(source, list):
|
||||||
keys = []
|
keys = []
|
||||||
for s in source:
|
for s in source:
|
||||||
key = f'{d}|{s}|asns|{ipversion}'
|
key = f'{d}|{s}|asns|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
keys.append(key)
|
keys.append(key)
|
||||||
# union the ranked sets
|
# union the ranked sets
|
||||||
key = '|'.join(sorted(source)) + f'|{d}|asns|{ipversion}'
|
key = '|'.join(sorted(source)) + f'|{d}|asns|{ipversion}'
|
||||||
|
@ -68,19 +72,24 @@ class Querying():
|
||||||
key = f'{d}|{source}|asns|{ipversion}'
|
key = f'{d}|{source}|asns|{ipversion}'
|
||||||
else:
|
else:
|
||||||
key = f'{d}|asns|{ipversion}'
|
key = f'{d}|asns|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
return self.cache.zrevrange(key, start=0, end=limit, withscores=True)
|
to_return['response'] = self.cache.zrevrange(key, start=0, end=limit, withscores=True)
|
||||||
|
return to_return
|
||||||
|
|
||||||
def asn_details(self, asn: int, date: Dates= datetime.date.today(), source: Union[list, str]='',
|
def asn_details(self, asn: int, date: Dates= datetime.date.today(), source: Union[list, str]='',
|
||||||
ipversion: str='v4'):
|
ipversion: str='v4'):
|
||||||
'''Aggregated ranking of all the prefixes anounced by the given ASN, weighted by source.'''
|
'''Aggregated ranking of all the prefixes anounced by the given ASN, weighted by source.'''
|
||||||
|
to_return = {'meta': {'asn': asn, 'ipversion': ipversion, 'source': source},
|
||||||
|
'response': set()}
|
||||||
|
|
||||||
d = self.__normalize_date(date)
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
if source:
|
if source:
|
||||||
if isinstance(source, list):
|
if isinstance(source, list):
|
||||||
keys = []
|
keys = []
|
||||||
for s in source:
|
for s in source:
|
||||||
key = f'{d}|{s}|{asn}|{ipversion}|prefixes'
|
key = f'{d}|{s}|{asn}|{ipversion}|prefixes'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
keys.append(key)
|
keys.append(key)
|
||||||
# union the ranked sets
|
# union the ranked sets
|
||||||
key = '|'.join(sorted(source)) + f'|{d}|{asn}|{ipversion}'
|
key = '|'.join(sorted(source)) + f'|{d}|{asn}|{ipversion}'
|
||||||
|
@ -90,84 +99,122 @@ class Querying():
|
||||||
key = f'{d}|{source}|{asn}|{ipversion}|prefixes'
|
key = f'{d}|{source}|{asn}|{ipversion}|prefixes'
|
||||||
else:
|
else:
|
||||||
key = f'{d}|{asn}|{ipversion}'
|
key = f'{d}|{asn}|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
return self.cache.zrevrange(key, start=0, end=-1, withscores=True)
|
to_return['response'] = self.cache.zrevrange(key, start=0, end=-1, withscores=True)
|
||||||
|
return to_return
|
||||||
|
|
||||||
def asn_rank(self, asn: int, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
def asn_rank(self, asn: int, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
||||||
ipversion: str='v4'):
|
ipversion: str='v4', with_position: bool=False):
|
||||||
'''Get the rank of a single ASN, weighted by source.'''
|
'''Get the rank of a single ASN, weighted by source.'''
|
||||||
|
to_return = {'meta': {'asn': asn, 'ipversion': ipversion,
|
||||||
|
'source': source, 'with_position': with_position},
|
||||||
|
'response': 0.0}
|
||||||
|
|
||||||
d = self.__normalize_date(date)
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
if source:
|
if source:
|
||||||
|
to_return['meta']['source'] = source
|
||||||
if isinstance(source, list):
|
if isinstance(source, list):
|
||||||
keys = []
|
keys = []
|
||||||
for s in source:
|
for s in source:
|
||||||
key = f'{d}|{s}|{asn}|{ipversion}'
|
key = f'{d}|{s}|{asn}|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
keys.append(key)
|
keys.append(key)
|
||||||
r = sum(float(self.cache.get(key)) for key in keys if self.cache.exists(key))
|
r = sum(float(self.cache.get(key)) for key in keys if self.cache.exists(key))
|
||||||
else:
|
else:
|
||||||
key = f'{d}|{source}|{asn}|{ipversion}'
|
key = f'{d}|{source}|{asn}|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
r = self.cache.get(key)
|
r = self.cache.get(key)
|
||||||
else:
|
else:
|
||||||
key = f'{d}|asns|{ipversion}'
|
key = f'{d}|asns|{ipversion}'
|
||||||
self.ranking_cache_wrapper(key)
|
self._ranking_cache_wrapper(key)
|
||||||
r = self.cache.zscore(key, asn)
|
r = self.cache.zscore(key, asn)
|
||||||
if r:
|
if not r:
|
||||||
return float(r)
|
r = 0
|
||||||
return 0
|
if with_position and not source:
|
||||||
|
to_return['response'] = {'rank': float(r), 'position': self.cache.zrank(key, asn),
|
||||||
|
'total_known_asns': self.cache.zcard(key)}
|
||||||
|
else:
|
||||||
|
to_return['response'] = float(r)
|
||||||
|
return to_return
|
||||||
|
|
||||||
def get_sources(self, date: Dates=datetime.date.today()):
|
def get_sources(self, date: Dates=datetime.date.today()):
|
||||||
'''Get the sources availables for a specific day (default: today).'''
|
'''Get the sources availables for a specific day (default: today).'''
|
||||||
|
to_return = {'meta': {}, 'response': set()}
|
||||||
|
|
||||||
d = self.__normalize_date(date)
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
key = f'{d}|sources'
|
key = f'{d}|sources'
|
||||||
return self.storage.smembers(key)
|
to_return['response'] = self.storage.smembers(key)
|
||||||
|
return to_return
|
||||||
|
|
||||||
def get_asn_descriptions(self, asn: int, all_descriptions=False):
|
def get_asn_descriptions(self, asn: int, all_descriptions=False):
|
||||||
|
to_return = {'meta': {'asn': asn, 'all_descriptions': all_descriptions},
|
||||||
|
'response': []}
|
||||||
descriptions = self.asn_meta.hgetall(f'{asn}|descriptions')
|
descriptions = self.asn_meta.hgetall(f'{asn}|descriptions')
|
||||||
if all_descriptions or not descriptions:
|
if all_descriptions or not descriptions:
|
||||||
return descriptions
|
to_return['response'] = descriptions
|
||||||
return descriptions[sorted(descriptions.keys(), reverse=True)[0]]
|
else:
|
||||||
|
to_return['response'] = descriptions[sorted(descriptions.keys(), reverse=True)[0]]
|
||||||
|
return to_return
|
||||||
|
|
||||||
def get_prefix_ips(self, asn: int, prefix: str, date: Dates=datetime.date.today(),
|
def get_prefix_ips(self, asn: int, prefix: str, date: Dates=datetime.date.today(),
|
||||||
source: Union[list, str]='', ipversion: str='v4'):
|
source: Union[list, str]='', ipversion: str='v4'):
|
||||||
|
to_return = {'meta': {'asn': asn, 'prefix': prefix, 'ipversion': ipversion,
|
||||||
|
'source': source},
|
||||||
|
'response': defaultdict(list)}
|
||||||
|
|
||||||
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
|
|
||||||
if source:
|
if source:
|
||||||
|
to_return['meta']['source'] = source
|
||||||
if isinstance(source, list):
|
if isinstance(source, list):
|
||||||
sources = source
|
sources = source
|
||||||
else:
|
else:
|
||||||
sources = [source]
|
sources = [source]
|
||||||
else:
|
else:
|
||||||
sources = self.get_sources(date)
|
sources = self.get_sources(d)['response']
|
||||||
prefix_ips = defaultdict(list)
|
|
||||||
d = self.__normalize_date(date)
|
|
||||||
for source in sources:
|
for source in sources:
|
||||||
ips = set([ip_ts.split('|')[0]
|
ips = set([ip_ts.split('|')[0]
|
||||||
for ip_ts in self.storage.smembers(f'{d}|{source}|{asn}|{prefix}')])
|
for ip_ts in self.storage.smembers(f'{d}|{source}|{asn}|{prefix}')])
|
||||||
[prefix_ips[ip].append(source) for ip in ips]
|
[to_return['response'][ip].append(source) for ip in ips]
|
||||||
return prefix_ips
|
return to_return
|
||||||
|
|
||||||
def get_asn_history(self, asn: int, period: int=100, source: Union[list, str]='',
|
def get_asn_history(self, asn: int, period: int=100, source: Union[list, str]='',
|
||||||
ipversion: str='v4', date: Dates=datetime.date.today()):
|
ipversion: str='v4', date: Dates=datetime.date.today()):
|
||||||
to_return = []
|
to_return = {'meta': {'asn': asn, 'period': period, 'ipversion': ipversion,
|
||||||
|
'source': source},
|
||||||
|
'response': []}
|
||||||
|
|
||||||
if isinstance(date, str):
|
if isinstance(date, str):
|
||||||
date = parse(date).date()
|
date = parse(date).date()
|
||||||
if date + timedelta(days=period / 3) > datetime.date.today():
|
if date + timedelta(days=period / 3) > datetime.date.today():
|
||||||
# the period to display will be around the date passed at least 2/3 before the date, at most 1/3 after
|
# the period to display will be around the date passed at least 2/3 before the date, at most 1/3 after
|
||||||
|
# FIXME: That is not doing what it is supposed to...
|
||||||
date = datetime.date.today()
|
date = datetime.date.today()
|
||||||
|
|
||||||
|
to_return['meta']['date'] = date.isoformat()
|
||||||
|
|
||||||
for i in range(period):
|
for i in range(period):
|
||||||
d = date - timedelta(days=i)
|
d = date - timedelta(days=i)
|
||||||
rank = self.asn_rank(asn, d, source, ipversion)
|
rank = self.asn_rank(asn, d, source, ipversion)
|
||||||
if rank is None:
|
if 'response' not in rank:
|
||||||
rank = 0
|
rank = 0
|
||||||
to_return.insert(0, (d.isoformat(), rank))
|
to_return['response'].insert(0, (d.isoformat(), rank['response']))
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
def country_rank(self, country: str, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
def country_rank(self, country: str, date: Dates=datetime.date.today(), source: Union[list, str]='',
|
||||||
ipversion: str='v4'):
|
ipversion: str='v4'):
|
||||||
ripe = StatsRIPE()
|
to_return = {'meta': {'country': country, 'ipversion': ipversion,
|
||||||
|
'source': source},
|
||||||
|
'response': []}
|
||||||
|
|
||||||
d = self.__normalize_date(date)
|
d = self.__normalize_date(date)
|
||||||
|
to_return['meta']['date'] = d
|
||||||
|
|
||||||
|
ripe = StatsRIPE()
|
||||||
response = ripe.country_asns(country, query_time=d, details=1)
|
response = ripe.country_asns(country, query_time=d, details=1)
|
||||||
if (not response.get('data') or not response['data'].get('countries') or not
|
if (not response.get('data') or not response['data'].get('countries') or not
|
||||||
response['data']['countries'][0].get('routed')):
|
response['data']['countries'][0].get('routed')):
|
||||||
|
@ -175,14 +222,17 @@ class Querying():
|
||||||
# FIXME: return something
|
# FIXME: return something
|
||||||
return 0, [(0, 0)]
|
return 0, [(0, 0)]
|
||||||
routed_asns = response['data']['countries'][0]['routed']
|
routed_asns = response['data']['countries'][0]['routed']
|
||||||
ranks = [self.asn_rank(asn, d, source, ipversion) for asn in routed_asns]
|
ranks = [self.asn_rank(asn, d, source, ipversion)['response'] for asn in routed_asns]
|
||||||
to_return = zip(routed_asns, ranks)
|
print(ranks)
|
||||||
daily_sum = sum(ranks)
|
to_return['response'] = [sum(ranks), zip(routed_asns, ranks)]
|
||||||
return daily_sum, to_return
|
return to_return
|
||||||
|
|
||||||
def country_history(self, country: Union[list, str], period: int=30, source: Union[list, str]='',
|
def country_history(self, country: Union[list, str], period: int=30, source: Union[list, str]='',
|
||||||
ipversion: str='v4', date: Dates=datetime.date.today()):
|
ipversion: str='v4', date: Dates=datetime.date.today()):
|
||||||
to_return = {}
|
to_return = {}
|
||||||
|
to_return = {'meta': {'country': country, 'ipversion': ipversion,
|
||||||
|
'source': source},
|
||||||
|
'response': defaultdict(list)}
|
||||||
|
|
||||||
if isinstance(date, str):
|
if isinstance(date, str):
|
||||||
date = parse(date).date()
|
date = parse(date).date()
|
||||||
|
@ -193,13 +243,12 @@ class Querying():
|
||||||
if isinstance(country, str):
|
if isinstance(country, str):
|
||||||
country = [country]
|
country = [country]
|
||||||
for c in country:
|
for c in country:
|
||||||
to_return[c] = []
|
|
||||||
for i in range(period):
|
for i in range(period):
|
||||||
d = date - timedelta(days=i)
|
d = date - timedelta(days=i)
|
||||||
rank, details = self.country_rank(c, d, source, ipversion)
|
rank, details = self.country_rank(c, d, source, ipversion)['response']
|
||||||
if rank is None:
|
if rank is None:
|
||||||
rank = 0
|
rank = 0
|
||||||
to_return[c].insert(0, (d.isoformat(), rank, list(details)))
|
to_return['response'][c].insert(0, (d.isoformat(), rank, list(details)))
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
def get_source_config(self):
|
def get_source_config(self):
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
try:
|
||||||
|
import simplejson as json
|
||||||
|
except ImportError:
|
||||||
|
import json
|
||||||
|
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from pybgpranking import BGPRanking
|
||||||
|
from pyipasnhistory import IPASNHistory
|
||||||
|
from datetime import date, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description='Run a query against BGP Ranking')
|
||||||
|
parser.add_argument('--url', type=str, help='URL of the instance.')
|
||||||
|
group = parser.add_mutually_exclusive_group(required=True)
|
||||||
|
|
||||||
|
group.add_argument('--asn', help='ASN to lookup')
|
||||||
|
group.add_argument('--ip', help='IP to lookup')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.url:
|
||||||
|
bgpranking = BGPRanking(args.url)
|
||||||
|
ipasn = IPASNHistory(urljoin(args.url, 'ipasn_history'))
|
||||||
|
else:
|
||||||
|
bgpranking = BGPRanking()
|
||||||
|
ipasn = IPASNHistory()
|
||||||
|
|
||||||
|
if args.ip:
|
||||||
|
response = ipasn.query(args.ip)
|
||||||
|
print(json.dumps(response, indent=2))
|
||||||
|
if 'response' in response and response['response']:
|
||||||
|
asn = response['response'][list(response['response'].keys())[0]]['asn']
|
||||||
|
else:
|
||||||
|
asn = args.asn
|
||||||
|
|
||||||
|
response = bgpranking.query(asn, date=(date.today() - timedelta(1)).isoformat())
|
||||||
|
print(json.dumps(response, indent=2))
|
|
@ -0,0 +1 @@
|
||||||
|
from .api import BGPRanking # noqa
|
|
@ -0,0 +1,42 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
try:
|
||||||
|
import simplejson as json
|
||||||
|
except ImportError:
|
||||||
|
import json
|
||||||
|
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
|
||||||
|
class BGPRanking():
|
||||||
|
|
||||||
|
def __init__(self, root_url: str='https://bgpranking-ng.circl.lu/'):
|
||||||
|
self.root_url = root_url
|
||||||
|
if not self.root_url.endswith('/'):
|
||||||
|
self.root_url += '/'
|
||||||
|
self.session = requests.session()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_up(self):
|
||||||
|
r = self.session.head(self.root_url)
|
||||||
|
return r.status_code == 200
|
||||||
|
|
||||||
|
def query(self, asn: str, address_family: str='v4', date: str=None,
|
||||||
|
source: Union[list, str]=''):
|
||||||
|
'''Launch a query.
|
||||||
|
:param asn: ASN to lookup
|
||||||
|
:param address_family: v4 or v6
|
||||||
|
:param date: Exact date to lookup. Fallback to most recent available.
|
||||||
|
:param source: Source to query. Can be a list of sources.
|
||||||
|
'''
|
||||||
|
to_query = {'asn': asn, 'address_family': address_family}
|
||||||
|
if date:
|
||||||
|
to_query['date'] = date
|
||||||
|
if source:
|
||||||
|
to_query['source'] = source
|
||||||
|
r = self.session.post(urljoin(self.root_url, '/json/asn'), data=json.dumps(to_query))
|
||||||
|
return r.json()
|
|
@ -0,0 +1 @@
|
||||||
|
requests
|
|
@ -0,0 +1,28 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='pybgpranking',
|
||||||
|
version='0.1',
|
||||||
|
author='Raphaël Vinot',
|
||||||
|
author_email='raphael.vinot@circl.lu',
|
||||||
|
maintainer='Raphaël Vinot',
|
||||||
|
url='https://github.com/D4-project/BGP-Ranking/client',
|
||||||
|
description='Python client for BGP Ranking',
|
||||||
|
packages=['pybgpranking'],
|
||||||
|
scripts=['bin/bgpranking'],
|
||||||
|
classifiers=[
|
||||||
|
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
|
||||||
|
'Development Status :: 3 - Alpha',
|
||||||
|
'Environment :: Console',
|
||||||
|
'Operating System :: POSIX :: Linux',
|
||||||
|
'Intended Audience :: Science/Research',
|
||||||
|
'Intended Audience :: Telecommunications Industry',
|
||||||
|
'Intended Audience :: Information Technology',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
'Topic :: Security',
|
||||||
|
'Topic :: Internet',
|
||||||
|
]
|
||||||
|
)
|
|
@ -27,6 +27,8 @@ Bootstrap(app)
|
||||||
app.config['BOOTSTRAP_SERVE_LOCAL'] = True
|
app.config['BOOTSTRAP_SERVE_LOCAL'] = True
|
||||||
|
|
||||||
|
|
||||||
|
# ############# Helpers #############
|
||||||
|
|
||||||
def get_request_parameter(parameter):
|
def get_request_parameter(parameter):
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
d = request.form
|
d = request.form
|
||||||
|
@ -72,20 +74,10 @@ def get_country_codes():
|
||||||
for c in pycountry.countries:
|
for c in pycountry.countries:
|
||||||
yield c.alpha_2, c.name
|
yield c.alpha_2, c.name
|
||||||
|
|
||||||
|
# ############# Helpers ######################
|
||||||
|
|
||||||
@app.route('/ipasn_history/', defaults={'path': ''}, methods=['GET', 'POST'])
|
|
||||||
@app.route('/ipasn_history/<path:path>', methods=['GET', 'POST'])
|
|
||||||
def ipasn_history_proxy(path):
|
|
||||||
config, general_config_file = load_general_config()
|
|
||||||
if 'ipasnhistory_url' not in config:
|
|
||||||
raise MissingConfigEntry(f'"ipasnhistory_url" is missing in {general_config_file}.')
|
|
||||||
proxied_url = urljoin(config['ipasnhistory_url'], request.full_path.replace('/ipasn_history', ''))
|
|
||||||
if request.method in ['GET', 'HEAD']:
|
|
||||||
to_return = requests.get(proxied_url).json()
|
|
||||||
elif request.method == 'POST':
|
|
||||||
to_return = requests.post(proxied_url, data=request.data).json()
|
|
||||||
return Response(json.dumps(to_return), mimetype='application/json')
|
|
||||||
|
|
||||||
|
# ############# Web UI #############
|
||||||
|
|
||||||
@app.route('/', methods=['GET', 'POST'])
|
@app.route('/', methods=['GET', 'POST'])
|
||||||
def index():
|
def index():
|
||||||
|
@ -94,12 +86,11 @@ def index():
|
||||||
return 'Ack'
|
return 'Ack'
|
||||||
load_session()
|
load_session()
|
||||||
q = Querying()
|
q = Querying()
|
||||||
sources = q.get_sources(date=session['date'])
|
sources = q.get_sources(date=session['date'])['response']
|
||||||
session.pop('asn', None)
|
session.pop('asn', None)
|
||||||
session.pop('country', None)
|
session.pop('country', None)
|
||||||
ranks = q.asns_global_ranking(limit=100, **session)
|
ranks = q.asns_global_ranking(limit=100, **session)['response']
|
||||||
descriptions = [q.get_asn_descriptions(int(asn)) for asn, rank in ranks]
|
r = [(asn, rank, q.get_asn_descriptions(int(asn))['response']) for asn, rank in ranks]
|
||||||
r = zip(ranks, descriptions)
|
|
||||||
return render_template('index.html', ranks=r, sources=sources, countries=get_country_codes(), **session)
|
return render_template('index.html', ranks=r, sources=sources, countries=get_country_codes(), **session)
|
||||||
|
|
||||||
|
|
||||||
|
@ -109,47 +100,31 @@ def asn_details():
|
||||||
q = Querying()
|
q = Querying()
|
||||||
if 'asn' not in session:
|
if 'asn' not in session:
|
||||||
return redirect(url_for('/'))
|
return redirect(url_for('/'))
|
||||||
asn_descriptions = q.get_asn_descriptions(asn=session['asn'], all_descriptions=True)
|
asn_descriptions = q.get_asn_descriptions(asn=session['asn'], all_descriptions=True)['response']
|
||||||
sources = q.get_sources(date=session['date'])
|
sources = q.get_sources(date=session['date'])['response']
|
||||||
ranks = q.asn_details(**session)
|
ranks = q.asn_details(**session)['response']
|
||||||
prefix = get_request_parameter('prefix')
|
prefix = get_request_parameter('prefix')
|
||||||
if prefix:
|
if prefix:
|
||||||
prefix_ips = q.get_prefix_ips(prefix=prefix, **session)
|
prefix_ips = q.get_prefix_ips(prefix=prefix, **session)['response']
|
||||||
prefix_ips = [(ip, sorted(sources)) for ip, sources in prefix_ips.items()]
|
prefix_ips = [(ip, sorted(sources)) for ip, sources in prefix_ips.items()]
|
||||||
prefix_ips.sort(key=lambda entry: len(entry[1]), reverse=True)
|
prefix_ips.sort(key=lambda entry: len(entry[1]), reverse=True)
|
||||||
else:
|
else:
|
||||||
prefix_ips = []
|
prefix_ips = []
|
||||||
return render_template('asn.html', sources=sources, ranks=ranks, prefix_ips=prefix_ips, asn_descriptions=asn_descriptions, **session)
|
return render_template('asn.html', sources=sources, ranks=ranks,
|
||||||
|
prefix_ips=prefix_ips, asn_descriptions=asn_descriptions, **session)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/asn_description', methods=['POST'])
|
@app.route('/country', methods=['GET', 'POST'])
|
||||||
def asn_description():
|
def country():
|
||||||
load_session()
|
|
||||||
asn = None
|
|
||||||
if request.form.get('asn'):
|
|
||||||
asn = request.form.get('asn')
|
|
||||||
elif session.get('asn'):
|
|
||||||
asn = session.get('asn')
|
|
||||||
else:
|
|
||||||
to_return = {'error': 'asn required'}
|
|
||||||
if asn:
|
|
||||||
q = Querying()
|
|
||||||
to_return = q.get_asn_descriptions(asn, session.get('all_descriptions'))
|
|
||||||
return Response(json.dumps(to_return), mimetype='application/json')
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/asn_history', methods=['GET', 'POST'])
|
|
||||||
def asn_history():
|
|
||||||
load_session()
|
load_session()
|
||||||
q = Querying()
|
q = Querying()
|
||||||
if 'asn' in session:
|
sources = q.get_sources(date=session['date'])['response']
|
||||||
return Response(json.dumps(q.get_asn_history(**session)), mimetype='application/json')
|
return render_template('country.html', sources=sources, countries=get_country_codes(), **session)
|
||||||
return Response(json.dumps({'error': f'asn key is required: {session}'}), mimetype='application/json')
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/country_history_callback', methods=['GET', 'POST'])
|
@app.route('/country_history_callback', methods=['GET', 'POST'])
|
||||||
def country_history_callback():
|
def country_history_callback():
|
||||||
history_data = json.loads(request.data)
|
history_data = request.get_json(force=True)
|
||||||
to_display = []
|
to_display = []
|
||||||
mapping = defaultdict(dict)
|
mapping = defaultdict(dict)
|
||||||
dates = []
|
dates = []
|
||||||
|
@ -172,19 +147,86 @@ def country_history_callback():
|
||||||
line.append('N/A')
|
line.append('N/A')
|
||||||
to_display_temp.append(line)
|
to_display_temp.append(line)
|
||||||
to_display.append(to_display_temp)
|
to_display.append(to_display_temp)
|
||||||
return json.dumps(render_template('country_asn_map.html', to_display=to_display))
|
return render_template('country_asn_map.html', to_display=to_display)
|
||||||
|
|
||||||
|
# ############# Web UI #############
|
||||||
|
|
||||||
|
|
||||||
@app.route('/country_history', methods=['GET', 'POST'])
|
# ############# Json outputs #############
|
||||||
|
|
||||||
|
@app.route('/ipasn_history/', defaults={'path': ''}, methods=['GET', 'POST'])
|
||||||
|
@app.route('/ipasn_history/<path:path>', methods=['GET', 'POST'])
|
||||||
|
def ipasn_history_proxy(path):
|
||||||
|
config, general_config_file = load_general_config()
|
||||||
|
if 'ipasnhistory_url' not in config:
|
||||||
|
raise MissingConfigEntry(f'"ipasnhistory_url" is missing in {general_config_file}.')
|
||||||
|
proxied_url = urljoin(config['ipasnhistory_url'], request.full_path.replace('/ipasn_history', ''))
|
||||||
|
if request.method in ['GET', 'HEAD']:
|
||||||
|
to_return = requests.get(proxied_url).json()
|
||||||
|
elif request.method == 'POST':
|
||||||
|
to_return = requests.post(proxied_url, data=request.data).json()
|
||||||
|
return Response(json.dumps(to_return), mimetype='application/json')
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/json/asn', methods=['POST'])
|
||||||
|
def json_asn():
|
||||||
|
# TODO
|
||||||
|
# * Filter on date => if only returning one descr, return the desription at that date
|
||||||
|
query = request.get_json(force=True)
|
||||||
|
to_return = {'meta': query, 'response': {}}
|
||||||
|
if 'asn' not in query:
|
||||||
|
to_return['error'] = f'You need to pass an asn - {query}'
|
||||||
|
return to_return
|
||||||
|
|
||||||
|
q = Querying()
|
||||||
|
asn_description_query = {'asn': query['asn']}
|
||||||
|
if 'all_descriptions' in query:
|
||||||
|
asn_description_query['all_descriptions'] = query['all_descriptions']
|
||||||
|
to_return['response']['asn_description'] = q.get_asn_descriptions(**asn_description_query)['response']
|
||||||
|
|
||||||
|
asn_rank_query = {'asn': query['asn']}
|
||||||
|
if 'date' in query:
|
||||||
|
asn_rank_query['date'] = query['date']
|
||||||
|
if 'source' in query:
|
||||||
|
asn_rank_query['source'] = query['source']
|
||||||
|
else:
|
||||||
|
asn_rank_query['with_position'] = True
|
||||||
|
if 'ipversion' in query:
|
||||||
|
asn_rank_query['ipversion'] = query['ipversion']
|
||||||
|
|
||||||
|
to_return['response']['ranking'] = q.asn_rank(**asn_rank_query)['response']
|
||||||
|
return Response(json.dumps(to_return), mimetype='application/json')
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/json/asn_description', methods=['POST'])
|
||||||
|
def asn_description():
|
||||||
|
load_session()
|
||||||
|
asn = None
|
||||||
|
if request.form.get('asn'):
|
||||||
|
asn = request.form.get('asn')
|
||||||
|
elif session.get('asn'):
|
||||||
|
asn = session.get('asn')
|
||||||
|
else:
|
||||||
|
to_return = {'error': 'asn required'}
|
||||||
|
if asn:
|
||||||
|
q = Querying()
|
||||||
|
to_return = q.get_asn_descriptions(asn, session.get('all_descriptions'))
|
||||||
|
return Response(json.dumps(to_return), mimetype='application/json')
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/json/asn_history', methods=['GET', 'POST'])
|
||||||
|
def asn_history():
|
||||||
|
load_session()
|
||||||
|
q = Querying()
|
||||||
|
if 'asn' in session:
|
||||||
|
return Response(json.dumps(q.get_asn_history(**session)), mimetype='application/json')
|
||||||
|
return Response(json.dumps({'error': f'asn key is required: {session}'}), mimetype='application/json')
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/json/country_history', methods=['GET', 'POST'])
|
||||||
def country_history():
|
def country_history():
|
||||||
load_session()
|
load_session()
|
||||||
q = Querying()
|
q = Querying()
|
||||||
return Response(json.dumps(q.country_history(**session)), mimetype='application/json')
|
return Response(json.dumps(q.country_history(**session)), mimetype='application/json')
|
||||||
|
|
||||||
|
# ############# Json outputs #############
|
||||||
@app.route('/country', methods=['GET', 'POST'])
|
|
||||||
def country():
|
|
||||||
load_session()
|
|
||||||
q = Querying()
|
|
||||||
sources = q.get_sources(date=session['date'])
|
|
||||||
return render_template('country.html', sources=sources, countries=get_country_codes(), **session)
|
|
||||||
|
|
|
@ -25,14 +25,14 @@ function linegraph(call_path) {
|
||||||
|
|
||||||
// Get the data
|
// Get the data
|
||||||
d3.json(call_path, {credentials: 'same-origin'}).then(function(data) {
|
d3.json(call_path, {credentials: 'same-origin'}).then(function(data) {
|
||||||
x.domain(d3.extent(data, function(d) { return parseTime(d[0]); }));
|
x.domain(d3.extent(data.response, function(d) { return parseTime(d[0]); }));
|
||||||
y.domain(d3.extent(data, function(d) { return d[1]; }));
|
y.domain(d3.extent(data.response, function(d) { return d[1]; }));
|
||||||
|
|
||||||
xAxis();
|
xAxis();
|
||||||
yAxis();
|
yAxis();
|
||||||
|
|
||||||
context.beginPath();
|
context.beginPath();
|
||||||
line(data);
|
line(data.response);
|
||||||
context.lineWidth = 1.5;
|
context.lineWidth = 1.5;
|
||||||
context.strokeStyle = "steelblue";
|
context.strokeStyle = "steelblue";
|
||||||
context.stroke();
|
context.stroke();
|
||||||
|
|
|
@ -18,7 +18,7 @@ function linegraph(call_path) {
|
||||||
|
|
||||||
d3.json(call_path, {credentials: 'same-origin'}).then(function(data) {
|
d3.json(call_path, {credentials: 'same-origin'}).then(function(data) {
|
||||||
|
|
||||||
var country_ranks = d3.entries(data).map(function(country_rank) {
|
var country_ranks = d3.entries(data.response).map(function(country_rank) {
|
||||||
return {
|
return {
|
||||||
country: country_rank.key,
|
country: country_rank.key,
|
||||||
values: d3.values(country_rank.value).map(function(d) {
|
values: d3.values(country_rank.value).map(function(d) {
|
||||||
|
@ -68,11 +68,10 @@ function linegraph(call_path) {
|
||||||
.style("font", "10px sans-serif")
|
.style("font", "10px sans-serif")
|
||||||
.text(function(d) { return d.id; });
|
.text(function(d) { return d.id; });
|
||||||
|
|
||||||
d3.json(call_path + '_callback',
|
d3.text('/country_history_callback',
|
||||||
{credentials: 'same-origin',
|
{credentials: 'same-origin',
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
body: JSON.stringify(data),
|
body: JSON.stringify(data.response),
|
||||||
// headers: {'Content-Type': 'application/json'}
|
|
||||||
}).then(function(data) {
|
}).then(function(data) {
|
||||||
d3.select('#asn_details').html(data);
|
d3.select('#asn_details').html(data);
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
{% block scripts %}
|
{% block scripts %}
|
||||||
{{ super() }}
|
{{ super() }}
|
||||||
<script src='{{ url_for('static', filename='linegraph.js') }}'></script>
|
<script src='{{ url_for('static', filename='linegraph.js') }}'></script>
|
||||||
<script>linegraph('/asn_history');</script>
|
<script>linegraph('/json/asn_history');</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
{% block scripts %}
|
{% block scripts %}
|
||||||
{{ super() }}
|
{{ super() }}
|
||||||
<script src='{{ url_for('static', filename='linegraph_country.js') }}'></script>
|
<script src='{{ url_for('static', filename='linegraph_country.js') }}'></script>
|
||||||
<script>linegraph('/country_history');</script>
|
<script>linegraph('/json/country_history');</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
<th>Rank</th>
|
<th>Rank</th>
|
||||||
<th>Description</th>
|
<th>Description</th>
|
||||||
</tr>
|
</tr>
|
||||||
{% for (asn, rank), description in ranks %}
|
{% for asn, rank, description in ranks %}
|
||||||
<tr>
|
<tr>
|
||||||
<td><a href="{{ url_for('asn_details', asn=asn) }}">{{ asn }}</a></td>
|
<td><a href="{{ url_for('asn_details', asn=asn) }}">{{ asn }}</a></td>
|
||||||
<td>{{ rank }}</td>
|
<td>{{ rank }}</td>
|
||||||
|
|
Loading…
Reference in New Issue