diff --git a/bin/lib/objects/Domains.py b/bin/lib/objects/Domains.py index 2cf796e6..8b3e40f7 100755 --- a/bin/lib/objects/Domains.py +++ b/bin/lib/objects/Domains.py @@ -531,16 +531,19 @@ def get_domains_by_daterange(date_from, date_to, domain_type, up=True, down=Fals domains.extend(get_domains_down_by_date(date, domain_type)) return domains -def get_domains_dates_by_daterange(date_from, date_to, domain_type, up=True, down=False): +def get_domains_dates_by_daterange(date_from, date_to, domain_types, up=True, down=False): + if not domain_types: + domain_types = get_all_domains_types() date_domains = {} for date in Date.substract_date(date_from, date_to): domains = [] - if up: - domains.extend(get_domains_up_by_date(date, domain_type)) - if down: - domains.extend(get_domains_down_by_date(date, domain_type)) - if domains: - date_domains[date] = list(domains) + for domain_type in domain_types: + if up: + domains.extend(get_domains_up_by_date(date, domain_type)) + if down: + domains.extend(get_domains_down_by_date(date, domain_type)) + if domains: + date_domains[date] = list(domains) return date_domains def get_domains_meta(domains): diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index c0b36c79..52ac5be9 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -68,6 +68,7 @@ def crawlers_dashboard(): return render_template("dashboard_crawler.html", date=date, is_manager_connected=is_manager_connected, crawlers_status=crawlers_status, + filter_up=True, crawlers_latest_stats=crawlers_latest_stats) @@ -471,6 +472,9 @@ def domains_search_name(): except: page = 1 + if not name: + return create_json_response({'error': 'Mandatory args name not provided'}, 400) + domains_types = request.args.getlist('domain_types') if domains_types: domains_types = domains_types[0].split(',') @@ -487,13 +491,25 @@ def domains_search_name(): @login_analyst def domains_search_date(): # TODO sanitize type + date - domain_type = request.args.get('type') + dom_types = request.args.get('type') date_from = request.args.get('date_from') date_to = request.args.get('date_to') + down = bool(request.args.get('down', False)) + up = bool(request.args.get('up')) # page = request.args.get('page') + all_types = Domains.get_all_domains_types() + if dom_types == 'all': + domain_types = all_types + elif dom_types in Domains.get_all_domains_types(): + domain_types = [dom_types] + else: + dom_types = 'all' + domain_types = all_types + date = Date.sanitise_date_range(date_from, date_to) - domains_date = Domains.get_domains_dates_by_daterange(date['date_from'], date['date_to'], domain_type) + domains_date = Domains.get_domains_dates_by_daterange(date['date_from'], date['date_to'], domain_types, + up=up, down=down) dict_domains = {} for d in domains_date: dict_domains[d] = Domains.get_domains_meta(domains_date[d]) @@ -502,7 +518,8 @@ def domains_search_date(): return render_template("domains_daterange.html", date_from=date_from, date_to=date_to, bootstrap_label=bootstrap_label, - dict_domains=dict_domains, type=domain_type) + filter_down=down, filter_up=up, + dict_domains=dict_domains, type=dom_types) @crawler_splash.route('/domains/date/post', methods=['POST']) @@ -512,7 +529,10 @@ def domains_search_date_post(): domain_type = request.form.get('type') date_from = request.form.get('date_from') date_to = request.form.get('date_to') - return redirect(url_for('crawler_splash.domains_search_date', date_from=date_from, date_to=date_to, type=domain_type)) + down = request.form.get('down') + up = request.form.get('up') + return redirect(url_for('crawler_splash.domains_search_date', date_from=date_from, date_to=date_to, + type=domain_type, down=down, up=up)) ##-- --## diff --git a/var/www/templates/crawler/crawler_splash/dashboard_crawler.html b/var/www/templates/crawler/crawler_splash/dashboard_crawler.html index 65056494..4ef8d865 100644 --- a/var/www/templates/crawler/crawler_splash/dashboard_crawler.html +++ b/var/www/templates/crawler/crawler_splash/dashboard_crawler.html @@ -39,11 +39,11 @@
Onions Crawlers
- {{ crawlers_latest_stats['onion']['up'] }} UP - {{ crawlers_latest_stats['onion']['down'] }} DOWN + {{ crawlers_latest_stats['onion']['up'] }} UP + {{ crawlers_latest_stats['onion']['down'] }} DOWN
- {{ crawlers_latest_stats['onion']['crawled'] }} Crawled + {{ crawlers_latest_stats['onion']['crawled'] }} Crawled {{ crawlers_latest_stats['onion']['queue'] }} Queue
@@ -57,11 +57,11 @@
Web Crawlers
- {{ crawlers_latest_stats['web']['up'] }} UP - {{ crawlers_latest_stats['web']['down'] }} DOWN + {{ crawlers_latest_stats['web']['up'] }} UP + {{ crawlers_latest_stats['web']['down'] }} DOWN
- {{ crawlers_latest_stats['web']['crawled'] }} Crawled + {{ crawlers_latest_stats['web']['crawled'] }} Crawled {{ crawlers_latest_stats['web']['queue'] }} Queue
diff --git a/var/www/templates/crawler/show_domains_by_daterange.html b/var/www/templates/crawler/show_domains_by_daterange.html index 313e91a6..2440e02d 100644 --- a/var/www/templates/crawler/show_domains_by_daterange.html +++ b/var/www/templates/crawler/show_domains_by_daterange.html @@ -3,11 +3,11 @@
- {{ crawler_stats[type]['crawled'] }} Crawled + {{ crawler_stats[type]['crawled'] }} Crawled {{ crawler_stats[type]['queue'] }} Queue
@@ -18,7 +18,7 @@
- +
@@ -30,13 +30,13 @@
- +
- + diff --git a/var/www/templates/domains/block_domains_name_search.html b/var/www/templates/domains/block_domains_name_search.html index 6fabc144..84e2be27 100644 --- a/var/www/templates/domains/block_domains_name_search.html +++ b/var/www/templates/domains/block_domains_name_search.html @@ -31,20 +31,19 @@