diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index d422a91d..3cecb779 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -931,6 +931,13 @@ def get_crawlers_stats_by_month(domain_type, date=None): stats.append(get_crawlers_stats_by_day(date, domain_type)) return stats +def get_crawlers_stats_up_down_by_month(domain_type, date=None): + stats = {'down': 0, 'up': 0} + for date in Date.get_month_dates(date=date): + day = get_crawlers_stats_by_day(date, domain_type) + stats['down'] += day.get('down', 0) + stats['up'] += day.get('up', 0) + return stats def get_crawlers_stats(domain_type=None): stats = {} diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index 937ec732..666645cb 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -316,6 +316,19 @@ def crawlers_last_domains_month_json(): stats = crawlers.get_crawlers_stats_by_month(domain_type) return jsonify(stats) +@crawler_splash.route('/crawlers/last/domains/status/month/json') +@login_required +@login_read_only +def crawlers_last_domains_status_month_json(): + domain_type = request.args.get('type') + if domain_type not in crawlers.get_crawler_all_types(): + return jsonify({'error': 'Invalid domain type'}), 400 + stats = crawlers.get_crawlers_stats_up_down_by_month(domain_type) + data = [] + for key in stats: + data.append({'name': key, 'value': stats[key]}) + return jsonify(data) + #### Domains #### diff --git a/var/www/templates/crawler/crawler_splash/last_crawled.html b/var/www/templates/crawler/crawler_splash/last_crawled.html index d4a9363b..e26710bf 100644 --- a/var/www/templates/crawler/crawler_splash/last_crawled.html +++ b/var/www/templates/crawler/crawler_splash/last_crawled.html @@ -17,6 +17,7 @@ +