fix: [search domains] fix search domains by daterange

pull/594/head
Terrtia 2023-04-24 13:35:55 +02:00
parent 10fbf228c1
commit 8f65fd5fe7
No known key found for this signature in database
GPG Key ID: 1E1B1F50D84613D0
5 changed files with 55 additions and 33 deletions

View File

@ -531,16 +531,19 @@ def get_domains_by_daterange(date_from, date_to, domain_type, up=True, down=Fals
domains.extend(get_domains_down_by_date(date, domain_type))
return domains
def get_domains_dates_by_daterange(date_from, date_to, domain_type, up=True, down=False):
def get_domains_dates_by_daterange(date_from, date_to, domain_types, up=True, down=False):
if not domain_types:
domain_types = get_all_domains_types()
date_domains = {}
for date in Date.substract_date(date_from, date_to):
domains = []
if up:
domains.extend(get_domains_up_by_date(date, domain_type))
if down:
domains.extend(get_domains_down_by_date(date, domain_type))
if domains:
date_domains[date] = list(domains)
for domain_type in domain_types:
if up:
domains.extend(get_domains_up_by_date(date, domain_type))
if down:
domains.extend(get_domains_down_by_date(date, domain_type))
if domains:
date_domains[date] = list(domains)
return date_domains
def get_domains_meta(domains):

View File

@ -68,6 +68,7 @@ def crawlers_dashboard():
return render_template("dashboard_crawler.html", date=date,
is_manager_connected=is_manager_connected,
crawlers_status=crawlers_status,
filter_up=True,
crawlers_latest_stats=crawlers_latest_stats)
@ -471,6 +472,9 @@ def domains_search_name():
except:
page = 1
if not name:
return create_json_response({'error': 'Mandatory args name not provided'}, 400)
domains_types = request.args.getlist('domain_types')
if domains_types:
domains_types = domains_types[0].split(',')
@ -487,13 +491,25 @@ def domains_search_name():
@login_analyst
def domains_search_date():
# TODO sanitize type + date
domain_type = request.args.get('type')
dom_types = request.args.get('type')
date_from = request.args.get('date_from')
date_to = request.args.get('date_to')
down = bool(request.args.get('down', False))
up = bool(request.args.get('up'))
# page = request.args.get('page')
all_types = Domains.get_all_domains_types()
if dom_types == 'all':
domain_types = all_types
elif dom_types in Domains.get_all_domains_types():
domain_types = [dom_types]
else:
dom_types = 'all'
domain_types = all_types
date = Date.sanitise_date_range(date_from, date_to)
domains_date = Domains.get_domains_dates_by_daterange(date['date_from'], date['date_to'], domain_type)
domains_date = Domains.get_domains_dates_by_daterange(date['date_from'], date['date_to'], domain_types,
up=up, down=down)
dict_domains = {}
for d in domains_date:
dict_domains[d] = Domains.get_domains_meta(domains_date[d])
@ -502,7 +518,8 @@ def domains_search_date():
return render_template("domains_daterange.html", date_from=date_from, date_to=date_to,
bootstrap_label=bootstrap_label,
dict_domains=dict_domains, type=domain_type)
filter_down=down, filter_up=up,
dict_domains=dict_domains, type=dom_types)
@crawler_splash.route('/domains/date/post', methods=['POST'])
@ -512,7 +529,10 @@ def domains_search_date_post():
domain_type = request.form.get('type')
date_from = request.form.get('date_from')
date_to = request.form.get('date_to')
return redirect(url_for('crawler_splash.domains_search_date', date_from=date_from, date_to=date_to, type=domain_type))
down = request.form.get('down')
up = request.form.get('up')
return redirect(url_for('crawler_splash.domains_search_date', date_from=date_from, date_to=date_to,
type=domain_type, down=down, up=up))
##-- --##

View File

@ -39,11 +39,11 @@
<h5><a class="text-info" href="{{ url_for('crawler_splash.crawlers_last_domains')}}?type=onion"><i class="fas fa-user-secret"></i> Onions Crawlers</a></h5>
<div class="row">
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=onion&domains_up=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_onion_domain_up">{{ crawlers_latest_stats['onion']['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=onion&domains_down=True&date_from={{date}}&date_to={{date}}" class="badge badge-danger ml-md-3" id="stat_onion_domain_down">{{ crawlers_latest_stats['onion']['down'] }}</a> DOWN
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=onion&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_onion_domain_up">{{ crawlers_latest_stats['onion']['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=onion&down=True&up=False&date_from={{date}}&date_to={{date}}" class="badge badge-danger ml-md-3" id="stat_onion_domain_down">{{ crawlers_latest_stats['onion']['down'] }}</a> DOWN
</div>
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=onion&domains_up=True&domains_down=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_onion_total">{{ crawlers_latest_stats['onion']['crawled'] }}</a> Crawled
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=onion&up=True&down=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_onion_total">{{ crawlers_latest_stats['onion']['crawled'] }}</a> Crawled
<span class="badge badge-warning ml-md-3" id="stat_onion_queue">{{ crawlers_latest_stats['onion']['queue'] }}</span> Queue
</div>
</div>
@ -57,11 +57,11 @@
<h5><a class="text-info" href="{{ url_for('crawler_splash.crawlers_last_domains')}}?type=web"><i class="fab fa-html5"></i> Web Crawlers</a></h5>
<div class="row">
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=web&domains_up=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_web_domain_up">{{ crawlers_latest_stats['web']['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=web&domains_down=True&date_from={{date}}&date_to={{date}}" class="badge badge-danger ml-md-3" id="stat_web_domain_down">{{ crawlers_latest_stats['web']['down'] }}</a> DOWN
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=web&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_web_domain_up">{{ crawlers_latest_stats['web']['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=web&down=True&up=False&date_from={{date}}&date_to={{date}}" class="badge badge-danger ml-md-3" id="stat_web_domain_down">{{ crawlers_latest_stats['web']['down'] }}</a> DOWN
</div>
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type=web&domains_up=True&domains_down=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_web_total">{{ crawlers_latest_stats['web']['crawled'] }}</a> Crawled
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type=web&up=True&down=True&date_from={{date}}&date_to={{date}}" class="badge badge-success" id="stat_web_total">{{ crawlers_latest_stats['web']['crawled'] }}</a> Crawled
<span class="badge badge-warning ml-md-3" id="stat_web_queue">{{ crawlers_latest_stats['web']['queue'] }}</span> Queue
</div>
</div>

View File

@ -3,11 +3,11 @@
<div class="card-header">
<div class="row">
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type={{type}}&domains_up=True&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-success">{{ crawler_stats[type]['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type={{type}}&domains_down=True&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-danger ml-md-3">{{ crawler_stats[type]['down'] }}</a> DOWN
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type={{type}}&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-success">{{ crawler_stats[type]['up'] }}</a> UP
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type={{type}}&down=True&up=False&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-danger ml-md-3">{{ crawler_stats[type]['down'] }}</a> DOWN
</div>
<div class="col-6">
<a href="{{ url_for('crawler_splash.domains_search_date') }}?service_type={{type}}&domains_up=True&domains_down=True&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-success">{{ crawler_stats[type]['crawled'] }}</a> Crawled
<a href="{{ url_for('crawler_splash.domains_search_date') }}?type={{type}}&up=True&down=True&date_from={{date_from}}&date_to={{date_to}}" class="badge badge-success">{{ crawler_stats[type]['crawled'] }}</a> Crawled
<span class="badge badge-warning ml-md-3">{{ crawler_stats[type]['queue'] }}</span> Queue
</div>
</div>
@ -18,7 +18,7 @@
<form action="{{ url_for('crawler_splash.domains_search_date_post') }}" id="hash_selector_form" method='post'>
<div class="row">
<div class="col-6">
<input type="hidden" id="type" name="type" value="{{type}}">
<input type="hidden" id="type" name="type" value="{% if type %}{{type}}{% else %}all{% endif %}">
<div class="input-group" id="date-range-from">
<div class="input-group-prepend"><span class="input-group-text"><i class="far fa-calendar-alt" aria-hidden="true"></i></span></div>
<input class="form-control" id="date-range-from-input" placeholder="yyyy-mm-dd" value="{{ date_from }}" name="date_from" autocomplete="off">
@ -30,13 +30,13 @@
</div>
<div class="col-6">
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domains_up" value="True" id="domains_up_id" checked>
<input class="custom-control-input" type="checkbox" name="up" value="True" id="domains_up_id" {% if filter_up is none or filter_up %}checked{% endif %}>
<label class="custom-control-label" for="domains_up_id">
<span class="badge badge-success"><i class="fas fa-check-circle"></i> Domains UP </span>
</label>
</div>
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domains_down" value="True" id="domains_down_id">
<input class="custom-control-input" type="checkbox" name="down" value="True" id="domains_down_id" {% if filter_down %}checked{% endif %}>
<label class="custom-control-label" for="domains_down_id">
<span class="badge badge-danger"><i class="fas fa-times-circle"></i> Domains DOWN</span>
</label>

View File

@ -31,20 +31,19 @@
</div>
<script>
function searchDomainName() {
var all_domain_types = ['onion', 'regular'] // TODO: load from flask
var l_domains_types = [];
function searchDomainName() {
var all_domain_types = ['onion', 'regular'] // TODO: load from flask
// var l_domains_types = [];
console.log(document.getElementById('in_search_name'));
console.log(document.getElementById('in_search_name'));
var data = document.getElementById('in_search_name').value;
for (var i = 0; i < all_domain_types.length; i++) {
if (document.getElementById('domain_'+ all_domain_types[i] +'_switch').checked) {
l_domains_types.push(all_domain_types[i])
var data = document.getElementById('in_search_name').value;
for (var i = 0; i < all_domain_types.length; i++) {
if (document.getElementById('domain_'+ all_domain_types[i] +'_switch').checked) {
l_domains_types.push(all_domain_types[i])
}
}
var parameter = "?name=" + data + "&domain_types=" + l_domains_types +"{%if page%}&page={{ page }}{%endif%}";
var parameter = "?name=" + data + "&domain_types=" + l_domains_types +"{%if page%}&page={{ page }}{%endif%}";
window.location.href = "{{ url_for('crawler_splash.domains_search_name') }}" + parameter;
}
</script>