mirror of https://github.com/CIRCL/AIL-framework
fix: [dashboard] fix objects links
parent
eb6adc4b98
commit
13372f8c85
|
@ -22,11 +22,8 @@ REGEX_JOIN_HASH = re.compile(r'[0-9a-zA-z-]+')
|
|||
|
||||
## ##
|
||||
|
||||
def save_item_correlation(username, item_id, item_date):
|
||||
Username.save_item_correlation('telegram', username, item_id, item_date)
|
||||
|
||||
def save_telegram_invite_hash(invite_hash, item_id):
|
||||
r_obj.sadd('telegram:invite_code', f'{invite_hash};{item_id}')
|
||||
def save_telegram_invite_hash(invite_hash, obj_global_id):
|
||||
r_obj.sadd('telegram:invite_code', f'{invite_hash};{obj_global_id}')
|
||||
|
||||
def get_data_from_telegram_url(base_url, url_path):
|
||||
dict_url = {}
|
||||
|
|
|
@ -61,7 +61,7 @@ class ApiKey(AbstractModule):
|
|||
|
||||
if google_api_key:
|
||||
print(f'found google api key: {to_print}')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(google_api_key)} found Google API Key;{item.get_id()}')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(google_api_key)} found Google API Key;{self.obj.get_global_id()}')
|
||||
|
||||
tag = 'infoleak:automatic-detection="google-api-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
@ -69,10 +69,10 @@ class ApiKey(AbstractModule):
|
|||
# # TODO: # FIXME: AWS regex/validate/sanitize KEY + SECRET KEY
|
||||
if aws_access_key:
|
||||
print(f'found AWS key: {to_print}')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(aws_access_key)} found AWS Key;{item.get_id()}')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(aws_access_key)} found AWS Key;{self.obj.get_global_id()}')
|
||||
if aws_secret_key:
|
||||
print(f'found AWS secret key')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(aws_secret_key)} found AWS secret Key;{item.get_id()}')
|
||||
self.redis_logger.warning(f'{to_print}Checked {len(aws_secret_key)} found AWS secret Key;{self.obj.get_global_id()}')
|
||||
|
||||
tag = 'infoleak:automatic-detection="aws-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
|
|
@ -103,11 +103,11 @@ class Credential(AbstractModule):
|
|||
|
||||
print(message)
|
||||
|
||||
to_print = f'Credential;{item.get_source()};{item.get_date()};{item.get_basename()};{message};{item.get_id()}'
|
||||
to_print = f'Credential;{item.get_source()};{item.get_date()};{item.get_basename()};{message};{self.obj.get_global_id()}'
|
||||
|
||||
# num of creds above threshold, publish an alert
|
||||
if nb_cred > self.criticalNumberToAlert:
|
||||
print(f"========> Found more than 10 credentials in this file : {item.get_id()}")
|
||||
print(f"========> Found more than 10 credentials in this file : {self.obj.get_global_id()}")
|
||||
self.redis_logger.warning(to_print)
|
||||
|
||||
tag = 'infoleak:automatic-detection="credential"'
|
||||
|
|
|
@ -86,7 +86,7 @@ class CreditCards(AbstractModule):
|
|||
# print(creditcard_set)
|
||||
to_print = f'CreditCard;{item.get_source()};{item.get_date()};{item.get_basename()};'
|
||||
if creditcard_set:
|
||||
mess = f'{to_print}Checked {len(creditcard_set)} valid number(s);{item.id}'
|
||||
mess = f'{to_print}Checked {len(creditcard_set)} valid number(s);{self.obj.get_global_id()}'
|
||||
print(mess)
|
||||
self.redis_logger.warning(mess)
|
||||
|
||||
|
@ -96,7 +96,7 @@ class CreditCards(AbstractModule):
|
|||
if r_result:
|
||||
return creditcard_set
|
||||
else:
|
||||
self.redis_logger.info(f'{to_print}CreditCard related;{item.id}')
|
||||
self.redis_logger.info(f'{to_print}CreditCard related;{self.obj.get_global_id()}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -149,7 +149,7 @@ class Cryptocurrencies(AbstractModule, ABC):
|
|||
item.get_date(),
|
||||
item.get_basename())
|
||||
self.redis_logger.warning('{}Detected {} {} private key;{}'.format(
|
||||
to_print, len(private_keys), currency['name'], item_id))
|
||||
to_print, len(private_keys), currency['name'], self.obj.get_global_id()))
|
||||
else:
|
||||
private_keys = []
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ class CveModule(AbstractModule):
|
|||
cve = Cves.Cve(cve_id)
|
||||
cve.add(date, item)
|
||||
|
||||
warning = f'{item_id} contains CVEs {cves}'
|
||||
warning = f'{self.obj.get_global_id()} contains CVEs {cves}'
|
||||
print(warning)
|
||||
self.redis_logger.warning(warning)
|
||||
|
||||
|
|
|
@ -82,20 +82,20 @@ class DomClassifier(AbstractModule):
|
|||
localizeddomains = self.dom_classifier.include(expression=self.cc_tld)
|
||||
if localizeddomains:
|
||||
print(localizeddomains)
|
||||
self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{item.get_id()}")
|
||||
self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{self.obj.get_global_id()}")
|
||||
|
||||
if self.cc:
|
||||
localizeddomains = self.dom_classifier.localizedomain(cc=self.cc)
|
||||
if localizeddomains:
|
||||
print(localizeddomains)
|
||||
self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{item.get_id()}")
|
||||
self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{self.obj.get_global_id()}")
|
||||
|
||||
if r_result:
|
||||
return self.dom_classifier.vdomain
|
||||
|
||||
except IOError as err:
|
||||
self.redis_logger.error(f"Duplicate;{item_source};{item_date};{item_basename};CRC Checksum Failed")
|
||||
raise Exception(f"CRC Checksum Failed on: {item.get_id()}")
|
||||
raise Exception(f"CRC Checksum Failed on: {self.obj.get_global_id()}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -92,10 +92,10 @@ class Duplicates(AbstractModule):
|
|||
Duplicate.save_object_hash(algo, curr_date_ymonth, self.algos[algo]['hash'], item.get_id())
|
||||
|
||||
if nb_duplicates:
|
||||
self.redis_logger.info(f'Duplicate;{item.get_source()};{item.get_date()};{item.get_basename()};Detected {nb_duplicates};{item.get_id()}')
|
||||
self.redis_logger.info(f'Duplicate;{item.get_source()};{item.get_date()};{item.get_basename()};Detected {nb_duplicates};{self.obj.get_global_id()}')
|
||||
|
||||
y = time.time()
|
||||
print(f'{item.get_id()} Processed in {y-x} sec')
|
||||
print(f'{self.obj.get_global_id()} Processed in {y-x} sec')
|
||||
# self.redis_logger.debug('{}Processed in {} sec'.format(to_print, y-x))
|
||||
|
||||
|
||||
|
|
|
@ -82,8 +82,8 @@ class IPAddress(AbstractModule):
|
|||
matching_ips.append(address)
|
||||
|
||||
if len(matching_ips) > 0:
|
||||
self.logger.info(f'{item.get_id()} contains {len(matching_ips)} IPs')
|
||||
self.redis_logger.warning(f'{item.get_id()} contains {item.get_id()} IPs')
|
||||
self.logger.info(f'{self.obj.get_global_id()} contains {len(matching_ips)} IPs')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} contains IPs')
|
||||
|
||||
# Tag message with IP
|
||||
tag = 'infoleak:automatic-detection="ip"'
|
||||
|
|
|
@ -95,7 +95,7 @@ class Iban(AbstractModule):
|
|||
# Statistics.add_module_tld_stats_by_date('iban', date, iban[0:2], 1)
|
||||
|
||||
to_print = f'Iban;{item.get_source()};{item.get_date()};{item.get_basename()};'
|
||||
self.redis_logger.warning(f'{to_print}Checked found {len(valid_ibans)} IBAN;{item_id}')
|
||||
self.redis_logger.warning(f'{to_print}Checked found {len(valid_ibans)} IBAN;{self.obj.get_global_id()}')
|
||||
# Tags
|
||||
tag = 'infoleak:automatic-detection="iban"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
|
|
@ -63,7 +63,7 @@ class Keys(AbstractModule):
|
|||
get_pgp_content = False
|
||||
|
||||
if KeyEnum.PGP_MESSAGE.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a PGP enc message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a PGP enc message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="pgp-message"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
@ -81,21 +81,21 @@ class Keys(AbstractModule):
|
|||
get_pgp_content = True
|
||||
|
||||
if KeyEnum.PGP_PRIVATE_KEY_BLOCK.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a pgp private key block message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a pgp private key block message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="pgp-private-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
get_pgp_content = True
|
||||
|
||||
if KeyEnum.CERTIFICATE.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a certificate message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a certificate message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="certificate"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
# find = True
|
||||
|
||||
if KeyEnum.RSA_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a RSA private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a RSA private key message')
|
||||
print('rsa private key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="rsa-private-key"'
|
||||
|
@ -103,7 +103,7 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a private key message')
|
||||
print('private key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="private-key"'
|
||||
|
@ -111,7 +111,7 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.ENCRYPTED_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has an encrypted private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has an encrypted private key message')
|
||||
print('encrypted private key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="encrypted-private-key"'
|
||||
|
@ -119,7 +119,7 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.OPENSSH_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has an openssh private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has an openssh private key message')
|
||||
print('openssh private key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="private-ssh-key"'
|
||||
|
@ -127,7 +127,7 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.SSH2_ENCRYPTED_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has an ssh2 private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has an ssh2 private key message')
|
||||
print('SSH2 private key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="private-ssh-key"'
|
||||
|
@ -135,7 +135,7 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.OPENVPN_STATIC_KEY_V1.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has an openssh private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has an openssh private key message')
|
||||
print('OpenVPN Static key message found')
|
||||
|
||||
tag = 'infoleak:automatic-detection="vpn-static-key"'
|
||||
|
@ -143,21 +143,21 @@ class Keys(AbstractModule):
|
|||
# find = True
|
||||
|
||||
if KeyEnum.DSA_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a dsa private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a dsa private key message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="dsa-private-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
# find = True
|
||||
|
||||
if KeyEnum.EC_PRIVATE_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has an ec private key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has an ec private key message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="ec-private-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
# find = True
|
||||
|
||||
if KeyEnum.PUBLIC_KEY.value in content:
|
||||
self.redis_logger.warning(f'{item.get_basename()} has a public key message')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} has a public key message')
|
||||
|
||||
tag = 'infoleak:automatic-detection="public-key"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
|
|
@ -70,7 +70,7 @@ class LibInjection(AbstractModule):
|
|||
print(f"Detected (libinjection) SQL in URL: {item_id}")
|
||||
print(unquote(url))
|
||||
|
||||
to_print = f'LibInjection;{item.get_source()};{item.get_date()};{item.get_basename()};Detected SQL in URL;{item_id}'
|
||||
to_print = f'LibInjection;{item.get_source()};{item.get_date()};{item.get_basename()};Detected SQL in URL;{self.obj.get_global_id()}'
|
||||
self.redis_logger.warning(to_print)
|
||||
|
||||
# Add tag
|
||||
|
|
|
@ -172,7 +172,7 @@ class Mail(AbstractModule):
|
|||
# for tld in mx_tlds:
|
||||
# Statistics.add_module_tld_stats_by_date('mail', item_date, tld, mx_tlds[tld])
|
||||
|
||||
msg = f'Mails;{item.get_source()};{item_date};{item.get_basename()};Checked {num_valid_email} e-mail(s);{item.id}'
|
||||
msg = f'Mails;{item.get_source()};{item_date};{item.get_basename()};Checked {num_valid_email} e-mail(s);{self.obj.get_global_id()}'
|
||||
if num_valid_email > self.mail_threshold:
|
||||
print(f'{item.id} Checked {num_valid_email} e-mail(s)')
|
||||
self.redis_logger.warning(msg)
|
||||
|
|
|
@ -98,8 +98,8 @@ class Onion(AbstractModule):
|
|||
print(f'{domain} added to crawler queue: {task_uuid}')
|
||||
else:
|
||||
to_print = f'Onion;{item.get_source()};{item.get_date()};{item.get_basename()};'
|
||||
print(f'{to_print}Detected {len(domains)} .onion(s);{item.get_id()}')
|
||||
self.redis_logger.warning(f'{to_print}Detected {len(domains)} .onion(s);{item.get_id()}')
|
||||
print(f'{to_print}Detected {len(domains)} .onion(s);{self.obj.get_global_id()}')
|
||||
self.redis_logger.warning(f'{to_print}Detected {len(domains)} .onion(s);{self.obj.get_global_id()}')
|
||||
|
||||
# TAG Item
|
||||
tag = 'infoleak:automatic-detection="onion"'
|
||||
|
|
|
@ -62,7 +62,7 @@ class Phone(AbstractModule):
|
|||
tag = 'infoleak:automatic-detection="phone-number"'
|
||||
self.add_message_to_queue(message=tag, queue='Tags')
|
||||
|
||||
self.redis_logger.warning(f'{item.get_id()} contains {len(phone)} Phone numbers')
|
||||
self.redis_logger.warning(f'{self.obj.get_global_id()} contains {len(phone)} Phone numbers')
|
||||
|
||||
# # List of the regex results in the Item, may be null
|
||||
# results = self.REG_PHONE.findall(content)
|
||||
|
|
|
@ -53,7 +53,7 @@ class SQLInjectionDetection(AbstractModule):
|
|||
|
||||
print(f"Detected SQL in URL: {item.id}")
|
||||
print(urllib.request.unquote(url))
|
||||
to_print = f'SQLInjection;{item.get_source()};{item.get_date()};{item.get_basename()};Detected SQL in URL;{item.id}'
|
||||
to_print = f'SQLInjection;{item.get_source()};{item.get_date()};{item.get_basename()};Detected SQL in URL;{self.obj.get_global_id()}'
|
||||
self.redis_logger.warning(to_print)
|
||||
|
||||
# Tag
|
||||
|
|
|
@ -41,7 +41,7 @@ class Tags(AbstractModule):
|
|||
|
||||
# Create a new tag
|
||||
item.add_tag(tag)
|
||||
print(f'{item.get_id()}: Tagged {tag}')
|
||||
print(f'{self.obj.get_global_id()}: Tagged {tag}')
|
||||
|
||||
# Forward message to channel
|
||||
self.add_message_to_queue(message=tag, queue='Tag_feed')
|
||||
|
|
|
@ -62,7 +62,7 @@ class Telegram(AbstractModule):
|
|||
print(f'username: {user_id}')
|
||||
invite_hash = dict_url.get('invite_hash')
|
||||
if invite_hash:
|
||||
telegram.save_telegram_invite_hash(invite_hash, item.id)
|
||||
telegram.save_telegram_invite_hash(invite_hash, self.obj.get_global_id())
|
||||
print(f'invite code: {invite_hash}')
|
||||
invite_code_found = True
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ class Urls(AbstractModule):
|
|||
except AttributeError:
|
||||
url = url_decoded['url']
|
||||
|
||||
print(url, item.get_id())
|
||||
print(url, self.obj.get_global_id())
|
||||
self.add_message_to_queue(message=str(url), queue='Url')
|
||||
self.logger.debug(f"url_parsed: {url}")
|
||||
|
||||
|
|
|
@ -116,8 +116,8 @@ class Tracker_Regex(AbstractModule):
|
|||
if ail_objects.is_filtered(obj, filters):
|
||||
continue
|
||||
|
||||
print(f'new tracked regex found: {tracker_name} in {obj_id}')
|
||||
self.redis_logger.warning(f'new tracked regex found: {tracker_name} in {obj_id}')
|
||||
print(f'new tracked regex found: {tracker_name} in {self.obj.get_global_id()}')
|
||||
self.redis_logger.warning(f'new tracked regex found: {tracker_name} in {self.obj.get_global_id()}')
|
||||
|
||||
tracker.add(obj.get_type(), obj.get_subtype(r_str=True), obj_id)
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ class Tracker_Term(AbstractModule):
|
|||
try:
|
||||
dict_words_freq = Tracker.get_text_word_frequency(content)
|
||||
except TimeoutException:
|
||||
self.redis_logger.warning(f"{obj.get_id()} processing timeout")
|
||||
self.redis_logger.warning(f"{self.obj.get_global_id()} processing timeout")
|
||||
else:
|
||||
signal.alarm(0)
|
||||
|
||||
|
@ -124,8 +124,8 @@ class Tracker_Term(AbstractModule):
|
|||
if ail_objects.is_filtered(obj, filters):
|
||||
continue
|
||||
|
||||
print(f'new tracked term {tracker_uuid} found: {tracker_name} in {obj_id}')
|
||||
self.redis_logger.warning(f'new tracked term found: {tracker_name} in {obj_id}')
|
||||
print(f'new tracked term {tracker_uuid} found: {tracker_name} in {self.obj.get_global_id()}')
|
||||
self.redis_logger.warning(f'new tracked term found: {tracker_name} in {self.obj.get_global_id()}')
|
||||
|
||||
tracker.add(obj.get_type(), obj.get_subtype(), obj_id)
|
||||
|
||||
|
|
|
@ -75,8 +75,8 @@ class Tracker_Typo_Squatting(AbstractModule):
|
|||
if ail_objects.is_filtered(obj, filters):
|
||||
continue
|
||||
|
||||
print(f'new tracked typosquatting found: {tracked} in {obj_id}')
|
||||
self.redis_logger.warning(f'tracker typosquatting: {tracked} in {obj_id}')
|
||||
print(f'new tracked typosquatting found: {tracked} in {self.obj.get_global_id()}')
|
||||
self.redis_logger.warning(f'tracker typosquatting: {tracked} in {self.obj.get_global_id()}')
|
||||
|
||||
tracker.add(obj.get_type(), obj.get_subtype(r_str=True), obj_id)
|
||||
|
||||
|
|
|
@ -69,8 +69,8 @@ class Tracker_Yara(AbstractModule):
|
|||
yara_match = self.rules[obj_type].match(data=content, callback=self.yara_rules_match,
|
||||
which_callbacks=yara.CALLBACK_MATCHES, timeout=60)
|
||||
if yara_match:
|
||||
self.redis_logger.warning(f'tracker yara: new match {self.obj.get_id()}: {yara_match}')
|
||||
print(f'{self.obj.get_id()}: {yara_match}')
|
||||
self.redis_logger.warning(f'tracker yara: new match {self.obj.get_global_id()}: {yara_match}')
|
||||
print(f'{self.obj.get_global_id()}: {yara_match}')
|
||||
except yara.TimeoutError:
|
||||
print(f'{self.obj.get_id()}: yara scanning timed out')
|
||||
self.redis_logger.info(f'{self.obj.get_id()}: yara scanning timed out')
|
||||
|
|
|
@ -209,6 +209,14 @@ def unregister_investigation():
|
|||
def get_investigations_selector_json():
|
||||
return jsonify(Investigations.get_investigations_selector())
|
||||
|
||||
@investigations_b.route("/object/gid")
|
||||
@login_required
|
||||
@login_read_only
|
||||
def get_object_gid():
|
||||
obj_global_id = request.args.get('gid')
|
||||
ail_obj = ail_objects.get_obj_from_global_id(obj_global_id)
|
||||
url = ail_obj.get_link(flask_context=True)
|
||||
return redirect(url)
|
||||
|
||||
#
|
||||
# @investigations_b.route("/object/item") #completely shows the paste in a new tab
|
||||
|
|
|
@ -44,6 +44,7 @@ def event_stream():
|
|||
pubsub.psubscribe("Script" + '.*')
|
||||
for msg in pubsub.listen():
|
||||
|
||||
# print(msg)
|
||||
type = msg['type']
|
||||
pattern = msg['pattern']
|
||||
channel = msg['channel']
|
||||
|
@ -77,7 +78,7 @@ def dashboard_alert(log):
|
|||
log = log[46:].split(';')
|
||||
if len(log) == 6:
|
||||
date_time = datetime_from_utc_to_local(utc_str)
|
||||
path = url_for('objects_item.showItem', id=log[5])
|
||||
path = url_for('investigations_b.get_object_gid', gid=log[5])
|
||||
|
||||
res = {'date': date, 'time': date_time, 'script': log[0], 'domain': log[1], 'date_paste': log[2],
|
||||
'paste': log[3], 'message': log[4], 'path': path}
|
||||
|
|
|
@ -162,7 +162,7 @@
|
|||
|
||||
</body>
|
||||
|
||||
<script> var url_showSavedPath = "{{ url_for('objects_item.showItem') }}"; </script>
|
||||
<script> var url_showSavedPath = "{{ url_for('investigations_b.get_object_gid') }}"; </script>
|
||||
|
||||
<script>
|
||||
$("#page-Dashboard").addClass("active");
|
||||
|
@ -181,6 +181,8 @@
|
|||
var msage = document.createElement('TD')
|
||||
var inspect = document.createElement('TD')
|
||||
|
||||
// console.log(d)
|
||||
|
||||
tr.className = "table-log-warning";
|
||||
time.appendChild(document.createTextNode(d.time))
|
||||
chan.appendChild(document.createTextNode('Script'))
|
||||
|
|
|
@ -212,18 +212,18 @@ function create_log_table(obj_json) {
|
|||
tr.className = "table-danger"
|
||||
}
|
||||
|
||||
source_link = document.createElement("A");
|
||||
if (parsedmess[1] == "slexy.org"){
|
||||
source_url = "http://"+parsedmess[1]+"/view/"+parsedmess[3].split(".")[0];
|
||||
}
|
||||
else{
|
||||
source_url = "http://"+parsedmess[1]+"/"+parsedmess[3].split(".")[0];
|
||||
}
|
||||
source_link.setAttribute("HREF",source_url);
|
||||
source_link.setAttribute("TARGET", "_blank");
|
||||
source_link.appendChild(document.createTextNode(parsedmess[1]));
|
||||
// source_link = document.createElement("A");
|
||||
// if (parsedmess[1] == "slexy.org"){
|
||||
// source_url = "http://"+parsedmess[1]+"/view/"+parsedmess[3].split(".")[0];
|
||||
// }
|
||||
// else{
|
||||
// source_url = "http://"+parsedmess[1]+"/"+parsedmess[3].split(".")[0];
|
||||
// }
|
||||
// source_link.setAttribute("HREF",source_url);
|
||||
// src.appendChild(source_link);
|
||||
|
||||
src.appendChild(document.createTextNode(parsedmess[1]));
|
||||
|
||||
src.appendChild(source_link);
|
||||
|
||||
var now = new Date();
|
||||
var timepaste = pad_2(now.getHours()) + ":" + pad_2(now.getMinutes()) + ":" + pad_2(now.getSeconds());
|
||||
|
@ -250,8 +250,10 @@ function create_log_table(obj_json) {
|
|||
|
||||
msage.appendChild(document.createTextNode(message.join(" ")));
|
||||
|
||||
// console.log(parsedmess)
|
||||
|
||||
var paste_path = parsedmess[5];
|
||||
var url_to_saved_paste = url_showSavedPath+"?id="+paste_path;
|
||||
var url_to_saved_paste = url_showSavedPath+"?gid="+paste_path;
|
||||
|
||||
var action_icon_a = document.createElement("A");
|
||||
action_icon_a.setAttribute("TARGET", "_blank");
|
||||
|
|
Loading…
Reference in New Issue