From 6b9ba9d37714f0417f33a22063b45f51800d9533 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 8 Nov 2019 09:25:09 +0100 Subject: [PATCH] chg: [Domain] Show last origin --- bin/lib/Domain.py | 29 +++++++++++++++++++ var/www/blueprints/crawler_splash.py | 5 ++-- .../crawler/crawler_splash/showDomain.html | 12 ++++---- 3 files changed, 39 insertions(+), 7 deletions(-) diff --git a/bin/lib/Domain.py b/bin/lib/Domain.py index f305afa6..962c2e19 100755 --- a/bin/lib/Domain.py +++ b/bin/lib/Domain.py @@ -154,6 +154,21 @@ def get_domain_last_check(domain, domain_type=None, r_format="str"): last_check = '{}/{}/{}'.format(last_check[0:4], last_check[4:6], last_check[6:8]) return last_check +def get_domain_last_origin(domain, domain_type): + ''' + Get domain last origin + + :param domain: crawled domain + :type domain: str + :param domain_type: domain type + :type domain_type: str + + :return: last orgin item_id + :rtype: str + ''' + origin_item = r_serv_onion.hget('{}_metadata:{}'.format(domain_type, domain), 'paste_parent') + return origin_item + def get_domain_tags(domain): ''' Retun all tags of a given domain. @@ -280,6 +295,20 @@ class Domain(object): ''' return get_domain_last_check(self.domain, domain_type=self.type) + def get_domain_last_origin(self): + ''' + Get domain last origin + + :param domain: crawled domain + :type domain: str + :param domain_type: domain type + :type domain_type: str + + :return: last orgin item_id + :rtype: str + ''' + return get_domain_last_origin(self.domain, self.type) + def is_domain_up(self): # # TODO: handle multiple ports ''' Return True if this domain is UP diff --git a/var/www/blueprints/crawler_splash.py b/var/www/blueprints/crawler_splash.py index 6977aa4b..2f142a9c 100644 --- a/var/www/blueprints/crawler_splash.py +++ b/var/www/blueprints/crawler_splash.py @@ -46,8 +46,8 @@ def api_validator(api_response): # ============= ROUTES ============== # add route : /crawlers/show_domain @crawler_splash.route('/crawlers/showDomain') -#@login_required -#@login_analyst +@login_required +@login_analyst def showDomain(): domain_name = request.args.get('domain') epoch = request.args.get('epoch') @@ -63,6 +63,7 @@ def showDomain(): dict_domain['domain'] = domain_name if domain.is_domain_up(): dict_domain = {**dict_domain, **domain.get_domain_correlation()} + dict_domain['origin_item'] = domain.get_domain_last_origin() dict_domain['tags'] = domain.get_domain_tags() dict_domain['history'] = domain.get_domain_history_with_status() dict_domain['crawler_history'] = domain.get_domain_items_crawled(items_link=True, epoch=epoch, item_screenshot=True, item_tag=True) # # TODO: handle multiple port diff --git a/var/www/templates/crawler/crawler_splash/showDomain.html b/var/www/templates/crawler/crawler_splash/showDomain.html index 11b21440..8e95dcb5 100644 --- a/var/www/templates/crawler/crawler_splash/showDomain.html +++ b/var/www/templates/crawler/crawler_splash/showDomain.html @@ -78,11 +78,6 @@ - {% if origin_paste_name=='manual' or origin_paste_name=='auto' %} - {{ origin_paste_name }} - {%else%} - {{ origin_paste_name }} - {%endif%}
{% for tag in dict_domain['tags'] %} @@ -96,6 +91,13 @@
+
+ {% if dict_domain['origin_item']=='manual' or dict_domain['origin_item']=='auto' %} + {{ dict_domain['origin_item'] }} + {%else%} + Last Origin:
{{ dict_domain['origin_item'] }} + {%endif%} +