lookyloo/tools/stats.py

78 lines
3.3 KiB
Python
Raw Normal View History

from lookyloo.lookyloo import Lookyloo
import calendar
import datetime
from urllib.parse import urlparse
2021-01-25 13:14:33 +01:00
from typing import Dict, Any, Union, Set
2020-11-20 12:17:54 +01:00
lookyloo = Lookyloo()
2020-11-20 12:17:54 +01:00
2021-01-25 13:14:33 +01:00
stats: Dict[Union[str, int], Any] = {}
2020-11-20 12:17:54 +01:00
today = datetime.date.today()
calendar_week = today.isocalendar()[1]
2021-01-25 13:14:33 +01:00
weeks_stats: Dict[int, Dict[str, Union[int, Set[str]]]] = \
{calendar_week - 1: {'analysis': 0, 'analysis_with_redirects': 0, 'redirects': 0, 'uniq_urls': set()},
calendar_week: {'analysis': 0, 'analysis_with_redirects': 0, 'redirects': 0, 'uniq_urls': set()}}
def uniq_domains(uniq_urls):
domains = set()
for url in uniq_urls:
splitted = urlparse(url)
domains.add(splitted.hostname)
return domains
2020-11-20 12:17:54 +01:00
for uuid in lookyloo.capture_uuids:
cache = lookyloo.capture_cache(uuid)
2021-01-25 13:14:33 +01:00
if not cache or not hasattr(cache, 'timestamp'):
2020-11-20 12:17:54 +01:00
continue
2021-01-25 13:14:33 +01:00
date = cache.timestamp
2020-11-20 12:17:54 +01:00
if date.year not in stats:
stats[date.year] = {}
if date.month not in stats[date.year]:
2021-01-25 13:14:33 +01:00
stats[date.year][date.month] = {'analysis': 0, 'analysis_with_redirects': 0, 'redirects': 0, 'uniq_urls': set()}
2020-11-20 12:17:54 +01:00
stats[date.year][date.month]['analysis'] += 1
2021-01-25 13:14:33 +01:00
if len(cache.redirects) > 0:
stats[date.year][date.month]['analysis_with_redirects'] += 1
2021-01-25 13:14:33 +01:00
stats[date.year][date.month]['redirects'] += len(cache.redirects)
stats[date.year][date.month]['uniq_urls'].update(cache.redirects)
stats[date.year][date.month]['uniq_urls'].add(cache.url)
if date.isocalendar()[1] in weeks_stats:
2021-01-25 13:14:33 +01:00
weeks_stats[date.isocalendar()[1]]['analysis'] += 1 # type: ignore
if len(cache.redirects) > 0:
weeks_stats[date.isocalendar()[1]]['analysis_with_redirects'] += 1 # type: ignore
weeks_stats[date.isocalendar()[1]]['redirects'] += len(cache.redirects) # type: ignore
weeks_stats[date.isocalendar()[1]]['uniq_urls'].update(cache.redirects) # type: ignore
weeks_stats[date.isocalendar()[1]]['uniq_urls'].add(cache.url) # type: ignore
print('Statistics for the last two weeks:')
for week_number, week_stat in weeks_stats.items():
print(f'Week {week_number}:')
print(' Number of analysis:', week_stat['analysis'])
print(' Number of analysis with redirects:', week_stat['analysis_with_redirects'])
print(' Number of redirects:', week_stat['redirects'])
2021-01-25 13:14:33 +01:00
print(' Number of unique URLs:', len(week_stat['uniq_urls'])) # type: ignore
domains = uniq_domains(week_stat['uniq_urls'])
print(' Number of unique domains:', len(domains))
2020-11-20 12:17:54 +01:00
for year, data in stats.items():
print('Year:', year)
yearly_analysis = 0
yearly_redirects = 0
2020-11-20 12:17:54 +01:00
for month in sorted(data.keys()):
stats = data[month]
print(' ', calendar.month_name[month])
print("\tNumber of analysis :", stats['analysis'])
print("\tNumber of analysis with redirects:", stats['analysis_with_redirects'])
print("\tNumber of redirects :", stats['redirects'])
print('\tNumber of unique URLs:', len(stats['uniq_urls']))
domains = uniq_domains(stats['uniq_urls'])
print('\tNumber of unique domains:', len(domains))
2020-11-20 12:17:54 +01:00
yearly_analysis += stats['analysis']
yearly_redirects += stats['redirects']
2020-11-20 12:17:54 +01:00
print(" Sum analysis:", yearly_analysis)
print(" Sum redirects:", yearly_redirects)