2020-07-10 15:54:14 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*-coding:UTF-8 -*
|
2023-05-04 16:35:56 +02:00
|
|
|
import json
|
2020-07-10 15:54:14 +02:00
|
|
|
import os
|
2023-09-29 15:43:37 +02:00
|
|
|
import logging
|
|
|
|
import logging.config
|
2020-12-08 16:47:55 +01:00
|
|
|
import re
|
2020-07-10 15:54:14 +02:00
|
|
|
import sys
|
|
|
|
import time
|
2020-12-08 16:47:55 +01:00
|
|
|
import uuid
|
2020-08-12 09:28:36 +02:00
|
|
|
import yara
|
2020-12-08 16:47:55 +01:00
|
|
|
import datetime
|
2021-10-06 11:12:43 +02:00
|
|
|
import base64
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2022-05-02 16:20:55 +02:00
|
|
|
import math
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
from collections import defaultdict
|
2024-02-06 11:56:39 +01:00
|
|
|
from markupsafe import escape
|
2023-05-04 16:35:56 +02:00
|
|
|
from textblob import TextBlob
|
|
|
|
from nltk.tokenize import RegexpTokenizer
|
2020-07-10 15:54:14 +02:00
|
|
|
|
2022-11-28 15:01:40 +01:00
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
|
|
|
##################################
|
|
|
|
# Import Project packages
|
|
|
|
##################################
|
|
|
|
from packages import Date
|
2023-05-10 16:26:46 +02:00
|
|
|
from lib.ail_core import get_objects_tracked, get_object_all_subtypes, get_objects_retro_hunted
|
2023-09-29 15:43:37 +02:00
|
|
|
from lib import ail_logger
|
2024-08-28 16:47:44 +02:00
|
|
|
from lib import ail_orgs
|
2022-11-28 15:01:40 +01:00
|
|
|
from lib import ConfigLoader
|
|
|
|
from lib import item_basic
|
|
|
|
from lib import Tag
|
2020-07-10 15:54:14 +02:00
|
|
|
|
2023-09-29 15:43:37 +02:00
|
|
|
# LOGS
|
|
|
|
logging.config.dictConfig(ail_logger.get_config(name='modules'))
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2020-07-10 15:54:14 +02:00
|
|
|
config_loader = ConfigLoader.ConfigLoader()
|
2021-07-14 13:58:00 +02:00
|
|
|
r_cache = config_loader.get_redis_conn("Redis_Cache")
|
2023-02-22 11:08:29 +01:00
|
|
|
r_tracker = config_loader.get_db_conn("Kvrocks_Trackers")
|
2020-07-10 15:54:14 +02:00
|
|
|
config_loader = None
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# NLTK tokenizer
|
2024-04-09 14:22:11 +02:00
|
|
|
TOKENIZER = None
|
|
|
|
|
|
|
|
def init_tokenizer():
|
|
|
|
global TOKENIZER
|
|
|
|
TOKENIZER = RegexpTokenizer('[\&\~\:\;\,\.\(\)\{\}\|\[\]\\\\/\-/\=\'\"\%\$\?\@\+\#\_\^\<\>\!\*\n\r\t\s]+',
|
2024-09-05 14:41:13 +02:00
|
|
|
gaps=True, discard_empty=True)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-04-09 14:22:11 +02:00
|
|
|
def get_special_characters():
|
|
|
|
special_characters = set('[<>~!?@#$%^&*|()_-+={}":;,.\'\n\r\t]/\\')
|
|
|
|
special_characters.add('\\s')
|
|
|
|
return special_characters
|
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
###############
|
|
|
|
#### UTILS ####
|
2023-05-10 16:26:46 +02:00
|
|
|
def is_valid_uuid_v4(curr_uuid):
|
|
|
|
if not curr_uuid:
|
2020-12-08 16:47:55 +01:00
|
|
|
return False
|
2023-05-10 16:26:46 +02:00
|
|
|
curr_uuid = curr_uuid.replace('-', '')
|
2020-12-08 16:47:55 +01:00
|
|
|
try:
|
2023-05-10 16:26:46 +02:00
|
|
|
uuid_test = uuid.UUID(hex=curr_uuid, version=4)
|
|
|
|
return uuid_test.hex == curr_uuid
|
2020-12-08 16:47:55 +01:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_valid_regex(tracker_regex):
|
|
|
|
try:
|
|
|
|
re.compile(tracker_regex)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_valid_mail(email):
|
2024-04-09 14:22:11 +02:00
|
|
|
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
|
|
|
email_regex = re.compile(email_regex)
|
2020-12-08 16:47:55 +01:00
|
|
|
result = email_regex.match(email)
|
|
|
|
if result:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def verify_mail_list(mail_list):
|
|
|
|
for mail in mail_list:
|
|
|
|
if not is_valid_mail(mail):
|
2023-02-22 11:08:29 +01:00
|
|
|
return {'status': 'error', 'reason': 'Invalid email', 'value': mail}, 400
|
2020-12-08 16:47:55 +01:00
|
|
|
return None
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
## -- UTILS -- ##
|
|
|
|
#################
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
###################
|
|
|
|
#### TRACKER ####
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
class Tracker:
|
|
|
|
def __init__(self, tracker_uuid):
|
|
|
|
self.uuid = tracker_uuid
|
|
|
|
|
|
|
|
def get_uuid(self):
|
|
|
|
return self.uuid
|
|
|
|
|
|
|
|
def exists(self):
|
|
|
|
return r_tracker.exists(f'tracker:{self.uuid}')
|
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
def _get_field(self, field):
|
|
|
|
return r_tracker.hget(f'tracker:{self.uuid}', field)
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def _set_field(self, field, value):
|
|
|
|
r_tracker.hset(f'tracker:{self.uuid}', field, value)
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_date(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('date')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_last_change(self, r_str=False):
|
2023-05-11 16:21:43 +02:00
|
|
|
last_change = self._get_field('last_change')
|
2023-05-04 16:35:56 +02:00
|
|
|
if r_str and last_change:
|
|
|
|
last_change = datetime.datetime.fromtimestamp(float(last_change)).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
return last_change
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_first_seen(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('first_seen')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
def get_last_seen(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('last_seen')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def _set_first_seen(self, date):
|
|
|
|
self._set_field('first_seen', date)
|
|
|
|
|
|
|
|
def _set_last_seen(self, date):
|
|
|
|
self._set_field('last_seen', date)
|
|
|
|
|
|
|
|
def _exist_date(self, date):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.exists(f'tracker:objs:{self.uuid}:{date}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# TODO: ADD CACHE ???
|
|
|
|
def update_daterange(self, date=None):
|
|
|
|
first_seen = self.get_first_seen()
|
|
|
|
# Added Object
|
|
|
|
if date:
|
|
|
|
date = int(date)
|
|
|
|
first_seen = self.get_first_seen()
|
|
|
|
# if op == 'add':
|
|
|
|
if not first_seen:
|
2024-09-05 14:41:13 +02:00
|
|
|
self._set_first_seen(date)
|
|
|
|
self._set_last_seen(date)
|
2023-05-04 16:35:56 +02:00
|
|
|
else:
|
|
|
|
first_seen = int(first_seen)
|
|
|
|
last_seen = int(self.get_last_seen())
|
|
|
|
if date < first_seen:
|
|
|
|
self._set_first_seen(date)
|
|
|
|
if date > last_seen:
|
|
|
|
self._set_last_seen(date)
|
|
|
|
else:
|
|
|
|
last_seen = self.get_last_seen()
|
|
|
|
if first_seen and last_seen:
|
|
|
|
valid_first_seen = self._exist_date(first_seen)
|
|
|
|
valid_last_seen = self._exist_date(last_seen)
|
|
|
|
# update first seen
|
|
|
|
if not valid_first_seen:
|
|
|
|
for date in Date.get_daterange(first_seen, last_seen):
|
|
|
|
if self._exist_date(date):
|
|
|
|
self._set_first_seen(date)
|
|
|
|
valid_first_seen = True
|
|
|
|
break
|
|
|
|
# update last seen
|
|
|
|
if not valid_last_seen:
|
|
|
|
for date in reversed(Date.get_daterange(first_seen, last_seen)):
|
|
|
|
if self._exist_date(date):
|
|
|
|
self._set_first_seen(date)
|
|
|
|
valid_last_seen = True
|
|
|
|
break
|
|
|
|
if not valid_first_seen or not valid_last_seen:
|
|
|
|
r_tracker.hdel(f'tracker:{self.uuid}', 'first_seen')
|
|
|
|
r_tracker.hdel(f'tracker:{self.uuid}', 'last_seen')
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_description(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('description')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
## LEVEL ##
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_level(self):
|
2024-08-28 16:47:44 +02:00
|
|
|
level = int(self._get_field('level'))
|
2023-02-22 11:08:29 +01:00
|
|
|
if not level:
|
|
|
|
level = 0
|
|
|
|
return int(level)
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def set_level(self, level, org_uuid):
|
|
|
|
tracker_type = self.get_type()
|
|
|
|
if level == 0: # user only
|
|
|
|
user_id = self.get_user()
|
|
|
|
r_tracker.sadd(f'user:tracker:{user_id}', self.uuid)
|
|
|
|
r_tracker.sadd(f'user:tracker:{user_id}:{tracker_type}', self.uuid)
|
|
|
|
elif level == 1: # global
|
|
|
|
r_tracker.sadd('global:tracker', self.uuid)
|
|
|
|
r_tracker.sadd(f'global:tracker:{tracker_type}', self.uuid)
|
|
|
|
elif level == 2: # org only
|
|
|
|
r_tracker.sadd(f'org:tracker:{org_uuid}', self.uuid)
|
|
|
|
r_tracker.sadd(f'org:tracker:{org_uuid}:{tracker_type}', self.uuid)
|
|
|
|
self.add_to_org(org_uuid)
|
|
|
|
self._set_field('level', level)
|
|
|
|
|
|
|
|
def reset_level(self, old_level, new_level, new_org_uuid):
|
|
|
|
if old_level == 0:
|
|
|
|
user_id = self.get_user()
|
|
|
|
r_tracker.srem(f'user:tracker:{user_id}', self.uuid)
|
|
|
|
r_tracker.srem(f'user:tracker:{user_id}:{self.get_type()}', self.uuid)
|
|
|
|
elif old_level == 1:
|
|
|
|
r_tracker.srem('global:tracker', self.uuid)
|
|
|
|
r_tracker.srem(f'global:tracker:{self.get_type()}', self.uuid)
|
|
|
|
# Org
|
|
|
|
elif old_level == 2:
|
|
|
|
old_org = self.get_org()
|
|
|
|
r_tracker.srem(f'org:tracker:{old_org}', self.uuid)
|
|
|
|
r_tracker.srem(f'org:tracker:{old_org}:{self.get_type()}', self.uuid)
|
|
|
|
ail_orgs.remove_obj_to_org(old_org, 'tracker', self.uuid)
|
|
|
|
self.set_level(new_level, new_org_uuid)
|
|
|
|
|
2024-09-03 16:27:02 +02:00
|
|
|
def check_level(self, user_org, user_id):
|
|
|
|
level = self.get_level()
|
|
|
|
if level == 1:
|
|
|
|
return True
|
|
|
|
elif level == 0:
|
|
|
|
return self.get_user() == user_id
|
|
|
|
elif level == 2:
|
|
|
|
return self.get_org() == user_org
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def is_level_user(self):
|
|
|
|
return self.get_level() == 0
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def is_level_org(self):
|
|
|
|
return self.get_level() == 2
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def is_level_global(self):
|
|
|
|
return self.get_level() == 1
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
## ORG ##
|
|
|
|
|
|
|
|
def get_creator_org(self):
|
|
|
|
return self._get_field('creator_org')
|
|
|
|
|
|
|
|
def get_org(self):
|
|
|
|
return self._get_field('org')
|
|
|
|
|
|
|
|
def add_to_org(self, org_uuid):
|
|
|
|
self._set_field('org', org_uuid)
|
|
|
|
ail_orgs.add_obj_to_org(org_uuid, 'tracker', self.uuid)
|
|
|
|
|
|
|
|
## -ORG- ##
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_filters(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
filters = self._get_field('filters')
|
2023-05-04 16:35:56 +02:00
|
|
|
if not filters:
|
|
|
|
return {}
|
|
|
|
else:
|
|
|
|
return json.loads(filters)
|
|
|
|
|
|
|
|
def set_filters(self, filters):
|
|
|
|
if filters:
|
|
|
|
self._set_field('filters', json.dumps(filters))
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-07-31 16:00:31 +02:00
|
|
|
def del_filters(self, tracker_type, to_track):
|
|
|
|
filters = self.get_filters()
|
2024-08-13 11:43:05 +02:00
|
|
|
if not filters:
|
|
|
|
filters = get_objects_tracked()
|
2023-07-31 16:00:31 +02:00
|
|
|
for obj_type in filters:
|
|
|
|
r_tracker.srem(f'trackers:objs:{tracker_type}:{obj_type}', to_track)
|
|
|
|
r_tracker.srem(f'trackers:uuid:{tracker_type}:{to_track}', f'{self.uuid}:{obj_type}')
|
|
|
|
r_tracker.hdel(f'tracker:{self.uuid}', 'filters')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_tracked(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('tracked')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
def get_type(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('type')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
def get_tags(self):
|
|
|
|
return r_tracker.smembers(f'tracker:tags:{self.uuid}')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def _set_tags(self, tags):
|
|
|
|
for tag in tags:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'tracker:tags:{self.uuid}', tag)
|
|
|
|
Tag.create_custom_tag(tag) # TODO CUSTOM TAGS
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
def _del_tags(self):
|
|
|
|
return r_tracker.delete(f'tracker:tags:{self.uuid}')
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def mail_export(self):
|
|
|
|
return r_tracker.exists(f'tracker:mail:{self.uuid}')
|
|
|
|
|
|
|
|
def get_mails(self):
|
|
|
|
return r_tracker.smembers(f'tracker:mail:{self.uuid}')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def _set_mails(self, mails):
|
|
|
|
for mail in mails:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'tracker:mail:{self.uuid}', escape(mail))
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
def _del_mails(self):
|
|
|
|
r_tracker.delete(f'tracker:mail:{self.uuid}')
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_user(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
return self._get_field('user_id')
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
def webhook_export(self):
|
2023-08-24 11:11:57 +02:00
|
|
|
webhook = self.get_webhook()
|
|
|
|
return webhook is not None and webhook
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
def get_webhook(self):
|
|
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'webhook')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_sparkline(self, nb_day=6):
|
|
|
|
date_range_sparkline = Date.get_date_range(nb_day)
|
|
|
|
sparkline = []
|
|
|
|
for date in date_range_sparkline:
|
|
|
|
nb_seen_this_day = self.get_nb_objs_by_date(date)
|
|
|
|
if nb_seen_this_day is None:
|
|
|
|
nb_seen_this_day = 0
|
|
|
|
sparkline.append(int(nb_seen_this_day))
|
|
|
|
return sparkline
|
|
|
|
|
|
|
|
def get_rule(self):
|
|
|
|
yar_path = self.get_tracked()
|
|
|
|
return yara.compile(filepath=os.path.join(get_yara_rules_dir(), yar_path))
|
|
|
|
|
2023-02-22 11:08:29 +01:00
|
|
|
def get_meta(self, options):
|
|
|
|
if not options:
|
|
|
|
options = set()
|
|
|
|
meta = {'uuid': self.uuid,
|
2023-05-04 16:35:56 +02:00
|
|
|
'tracked': self.get_tracked(), # TODO TO CHECK
|
2023-02-22 11:08:29 +01:00
|
|
|
'type': self.get_type(),
|
|
|
|
'date': self.get_date(),
|
|
|
|
'first_seen': self.get_first_seen(),
|
|
|
|
'last_seen': self.get_last_seen()}
|
2024-08-26 15:56:46 +02:00
|
|
|
if 'org' in options:
|
|
|
|
meta['org'] = self.get_org()
|
2024-09-06 13:46:04 +02:00
|
|
|
if 'org_name' in options:
|
|
|
|
meta['org_name'] = ail_orgs.Organisation(self.get_org()).get_name()
|
2023-02-22 11:08:29 +01:00
|
|
|
if 'user' in options:
|
|
|
|
meta['user'] = self.get_user()
|
|
|
|
if 'level' in options:
|
|
|
|
meta['level'] = self.get_level()
|
|
|
|
if 'description' in options:
|
|
|
|
meta['description'] = self.get_description()
|
2023-05-11 16:21:43 +02:00
|
|
|
if 'nb_objs' in options:
|
|
|
|
meta['nb_objs'] = self.get_nb_objs()
|
2023-02-22 11:08:29 +01:00
|
|
|
if 'tags' in options:
|
|
|
|
meta['tags'] = self.get_tags()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'filters' in options:
|
|
|
|
meta['filters'] = self.get_filters()
|
2023-02-22 11:08:29 +01:00
|
|
|
if 'mails' in options:
|
|
|
|
meta['mails'] = self.get_mails()
|
|
|
|
if 'webhooks' in options:
|
|
|
|
meta['webhook'] = self.get_webhook()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'sparkline' in options:
|
|
|
|
meta['sparkline'] = self.get_sparkline(6)
|
|
|
|
return meta
|
|
|
|
|
|
|
|
def _add_to_dashboard(self, obj_type, subtype, obj_id):
|
|
|
|
mess = f'{self.uuid}:{int(time.time())}:{obj_type}:{subtype}:{obj_id}'
|
|
|
|
if self.is_level_user():
|
|
|
|
user = self.get_user()
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.lpush(f'trackers:user:{user}', mess)
|
|
|
|
r_tracker.ltrim(f'trackers:user:{user}', 0, 9)
|
2023-05-04 16:35:56 +02:00
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.lpush('trackers:dashboard', mess)
|
|
|
|
r_tracker.ltrim(f'trackers:dashboard', 0, 9)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
def get_nb_objs_by_type(self, obj_type):
|
|
|
|
return r_tracker.scard(f'tracker:objs:{self.uuid}:{obj_type}')
|
|
|
|
|
|
|
|
def get_objs_by_type(self, obj_type):
|
|
|
|
return r_tracker.smembers(f'tracker:objs:{self.uuid}:{obj_type}')
|
|
|
|
|
|
|
|
def get_nb_objs(self):
|
|
|
|
objs = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
nb = self.get_nb_objs_by_type(obj_type)
|
|
|
|
if nb:
|
|
|
|
objs[obj_type] = nb
|
|
|
|
return objs
|
|
|
|
|
|
|
|
def get_objs(self):
|
|
|
|
objs = []
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
for obj in self.get_objs_by_type(obj_type):
|
|
|
|
subtype, obj_id = obj.split(':', 1)
|
|
|
|
objs.append((obj_type, subtype, obj_id))
|
|
|
|
return objs
|
|
|
|
|
|
|
|
def get_nb_objs_by_date(self, date):
|
|
|
|
return r_tracker.scard(f'tracker:objs:{self.uuid}:{date}')
|
|
|
|
|
|
|
|
def get_objs_by_date(self, date):
|
|
|
|
return r_tracker.smembers(f'tracker:objs:{self.uuid}:{date}')
|
|
|
|
|
|
|
|
def get_objs_by_daterange(self, date_from, date_to):
|
|
|
|
objs = set()
|
|
|
|
for date in Date.get_daterange(date_from, date_to):
|
|
|
|
objs |= self.get_objs_by_date(date)
|
|
|
|
return objs
|
|
|
|
|
|
|
|
def get_obj_dates(self, obj_type, subtype, obj_id):
|
|
|
|
return r_tracker.smembers(f'obj:tracker:{obj_type}:{subtype}:{obj_id}:{self.uuid}')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# - TODO Data Retention TO Implement - #
|
|
|
|
# Or Daily/Monthly Global DB Cleanup:
|
|
|
|
# Iterate on each tracker:
|
|
|
|
# Iterate on each Obj:
|
|
|
|
# Iterate on each date:
|
|
|
|
# Delete from tracker range if date limit exceeded
|
|
|
|
# - TODO
|
|
|
|
def add(self, obj_type, subtype, obj_id, date=None):
|
|
|
|
if not subtype:
|
|
|
|
subtype = ''
|
|
|
|
if not date:
|
|
|
|
date = Date.get_today_date_str()
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
new_obj_date = r_tracker.sadd(f'tracker:objs:{self.uuid}:{date}', f'{obj_type}:{subtype}:{obj_id}')
|
2023-05-11 16:21:43 +02:00
|
|
|
r_tracker.sadd(f'obj:trackers:{obj_type}:{subtype}:{obj_id}', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
# Only save object match date - Needed for the DB Cleaner
|
|
|
|
r_tracker.sadd(f'obj:tracker:{obj_type}:{subtype}:{obj_id}:{self.uuid}', date)
|
|
|
|
r_tracker.sadd(f'tracker:objs:{self.uuid}:{obj_type}', f'{subtype}:{obj_id}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
if new_obj_date:
|
|
|
|
self.update_daterange(date)
|
|
|
|
|
|
|
|
self._add_to_dashboard(obj_type, subtype, obj_id)
|
|
|
|
|
|
|
|
def remove(self, obj_type, subtype, obj_id):
|
|
|
|
if not subtype:
|
|
|
|
subtype = ''
|
|
|
|
|
|
|
|
for date in self.get_obj_dates(obj_type, subtype, obj_id):
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'tracker:objs:{self.uuid}:{date}', f'{obj_type}:{subtype}:{obj_id}')
|
|
|
|
r_tracker.srem(f'obj:tracker:{obj_type}:{subtype}:{obj_id}:{self.uuid}', date)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'obj:trackers:{obj_type}:{subtype}:{obj_id}', self.uuid)
|
2024-03-27 11:03:27 +01:00
|
|
|
r_tracker.srem(f'tracker:objs:{self.uuid}:{obj_type}', f'{subtype}:{obj_id}')
|
2023-05-04 16:35:56 +02:00
|
|
|
self.update_daterange()
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
# TODO escape custom tags
|
2023-05-04 16:35:56 +02:00
|
|
|
# TODO escape mails ????
|
2024-08-26 15:56:46 +02:00
|
|
|
def create(self, tracker_type, to_track, org, user_id, level, description=None, filters={}, tags=[], mails=[], webhook=None):
|
2023-05-04 16:35:56 +02:00
|
|
|
if self.exists():
|
|
|
|
raise Exception('Error: Tracker already exists')
|
|
|
|
|
|
|
|
# YARA
|
|
|
|
if tracker_type == 'yara_custom' or tracker_type == 'yara_default':
|
|
|
|
to_track = save_yara_rule(tracker_type, to_track, tracker_uuid=self.uuid)
|
|
|
|
tracker_type = 'yara'
|
|
|
|
|
|
|
|
elif tracker_type == 'typosquatting':
|
2024-04-09 14:22:11 +02:00
|
|
|
|
|
|
|
from ail_typo_squatting import runAll
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
domain = to_track.split(" ")[0]
|
|
|
|
typo_generation = runAll(domain=domain, limit=math.inf, formatoutput="text", pathOutput="-", verbose=False) # TODO REPLACE LIMIT BY -1
|
|
|
|
for typo in typo_generation:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'tracker:typosquatting:{to_track}', typo)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# create metadata
|
|
|
|
self._set_field('tracked', to_track)
|
|
|
|
self._set_field('type', tracker_type)
|
|
|
|
self._set_field('date', datetime.date.today().strftime("%Y%m%d"))
|
2024-08-28 16:47:44 +02:00
|
|
|
self._set_field('creator_org', org)
|
2023-05-04 16:35:56 +02:00
|
|
|
self._set_field('user_id', user_id)
|
|
|
|
if description:
|
|
|
|
self._set_field('description', escape(description))
|
|
|
|
if webhook:
|
|
|
|
self._set_field('webhook', webhook)
|
|
|
|
|
|
|
|
# create all tracker set
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'all:tracker:{tracker_type}', to_track)
|
2023-05-04 16:35:56 +02:00
|
|
|
# create tracker - uuid map
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'all:tracker_uuid:{tracker_type}:{to_track}', self.uuid)
|
|
|
|
r_tracker.sadd('trackers:all', self.uuid)
|
|
|
|
r_tracker.sadd(f'trackers:all:{tracker_type}', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# TRACKER LEVEL
|
2024-08-28 16:47:44 +02:00
|
|
|
self.set_level(level, org)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# create tracker tags list
|
|
|
|
if tags:
|
|
|
|
self._set_tags(tags)
|
|
|
|
|
|
|
|
# create tracker mail notification list
|
|
|
|
if mails:
|
|
|
|
self._set_mails(mails)
|
|
|
|
|
|
|
|
# Filters
|
|
|
|
if not filters:
|
|
|
|
filters = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
filters[obj_type] = {}
|
|
|
|
else:
|
|
|
|
self.set_filters(filters)
|
|
|
|
for obj_type in filters:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'trackers:objs:{tracker_type}:{obj_type}', to_track)
|
|
|
|
r_tracker.sadd(f'trackers:uuid:{tracker_type}:{to_track}', f'{self.uuid}:{obj_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
self._set_field('last_change', time.time())
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# toggle refresh module tracker list/set
|
|
|
|
trigger_trackers_refresh(tracker_type)
|
|
|
|
return self.uuid
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def edit(self, tracker_type, to_track, level, org, description=None, filters={}, tags=[], mails=[], webhook=None):
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# edit tracker
|
|
|
|
old_type = self.get_type()
|
|
|
|
old_to_track = self.get_tracked()
|
|
|
|
old_level = self.get_level()
|
|
|
|
user_id = self.get_user()
|
2023-02-22 11:08:29 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# YARA
|
|
|
|
if tracker_type == 'yara_custom' or tracker_type == 'yara_default':
|
|
|
|
# create yara rule
|
|
|
|
if tracker_type == 'yara_default' and old_type == 'yara':
|
|
|
|
if not is_default_yara_rule(old_to_track):
|
|
|
|
filepath = get_yara_rule_file_by_tracker_name(old_to_track)
|
|
|
|
if filepath:
|
|
|
|
os.remove(filepath)
|
|
|
|
to_track = save_yara_rule(tracker_type, to_track, tracker_uuid=self.uuid)
|
|
|
|
tracker_type = 'yara'
|
|
|
|
|
|
|
|
# TODO TYPO EDIT
|
|
|
|
elif tracker_type == 'typosquatting':
|
|
|
|
pass
|
|
|
|
|
|
|
|
if tracker_type != old_type:
|
|
|
|
# LEVEL
|
2024-08-28 16:47:44 +02:00
|
|
|
self.reset_level(old_level, level, org)
|
2023-05-04 16:35:56 +02:00
|
|
|
# Delete OLD YARA Rule File
|
|
|
|
if old_type == 'yara':
|
|
|
|
if not is_default_yara_rule(old_to_track):
|
|
|
|
filepath = get_yara_rule_file_by_tracker_name(old_to_track)
|
|
|
|
if filepath:
|
|
|
|
os.remove(filepath)
|
2023-05-11 16:49:09 +02:00
|
|
|
if old_type == 'typosquatting':
|
|
|
|
r_tracker.delete(f'tracker:typosquatting:{old_to_track}')
|
2023-05-04 16:35:56 +02:00
|
|
|
self._set_field('type', tracker_type)
|
|
|
|
|
|
|
|
# create all tracker set
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'all:tracker:{old_type}', old_to_track)
|
|
|
|
r_tracker.sadd(f'all:tracker:{tracker_type}', to_track)
|
2023-05-04 16:35:56 +02:00
|
|
|
# create tracker - uuid map
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'all:tracker_uuid:{old_type}:{old_to_track}', self.uuid)
|
|
|
|
r_tracker.sadd(f'all:tracker_uuid:{tracker_type}:{to_track}', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
# create all tracker set by type
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'trackers:all:{old_type}', self.uuid)
|
|
|
|
r_tracker.sadd(f'trackers:all:{tracker_type}', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# Same Type
|
2024-08-28 16:47:44 +02:00
|
|
|
|
|
|
|
# LEVEL
|
|
|
|
self.reset_level(old_level, level, org)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# To Track Edited
|
|
|
|
if to_track != old_to_track:
|
|
|
|
self._set_field('tracked', to_track)
|
|
|
|
|
|
|
|
self._set_field('description', description)
|
|
|
|
self._set_field('webhook', webhook)
|
|
|
|
|
|
|
|
# Tags
|
2023-05-10 16:26:46 +02:00
|
|
|
nb_old_tags = r_tracker.scard(f'tracker:tags:{self.uuid}')
|
2023-05-04 16:35:56 +02:00
|
|
|
if nb_old_tags > 0 or tags:
|
2023-05-11 16:21:43 +02:00
|
|
|
self._del_tags()
|
2023-05-04 16:35:56 +02:00
|
|
|
self._set_tags(tags)
|
|
|
|
|
|
|
|
# Mails
|
2023-05-10 16:26:46 +02:00
|
|
|
nb_old_mails = r_tracker.scard(f'tracker:mail:{self.uuid}')
|
2023-05-04 16:35:56 +02:00
|
|
|
if nb_old_mails > 0 or mails:
|
2023-05-11 16:21:43 +02:00
|
|
|
self._del_mails()
|
2023-05-04 16:35:56 +02:00
|
|
|
self._set_mails(mails)
|
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
# Filters
|
2023-07-31 16:00:31 +02:00
|
|
|
self.del_filters(old_type, old_to_track)
|
2023-05-11 16:21:43 +02:00
|
|
|
if not filters:
|
|
|
|
filters = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
filters[obj_type] = {}
|
|
|
|
else:
|
|
|
|
self.set_filters(filters)
|
|
|
|
for obj_type in filters:
|
|
|
|
r_tracker.sadd(f'trackers:objs:{tracker_type}:{obj_type}', to_track)
|
|
|
|
r_tracker.sadd(f'trackers:uuid:{tracker_type}:{to_track}', f'{self.uuid}:{obj_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-08-13 11:43:05 +02:00
|
|
|
self._set_field('last_change', time.time())
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# Refresh Trackers
|
|
|
|
trigger_trackers_refresh(tracker_type)
|
|
|
|
if tracker_type != old_type:
|
|
|
|
trigger_trackers_refresh(old_type)
|
|
|
|
return self.uuid
|
|
|
|
|
|
|
|
def delete(self):
|
2023-05-11 16:21:43 +02:00
|
|
|
for obj in self.get_objs():
|
|
|
|
self.remove(obj[0], obj[1], obj[2])
|
|
|
|
|
|
|
|
tracker_type = self.get_type()
|
|
|
|
tracked = self.get_tracked()
|
|
|
|
|
|
|
|
if tracker_type == 'typosquatting':
|
|
|
|
r_tracker.delete(f'tracker:typosquatting:{tracked}')
|
|
|
|
elif tracker_type == 'yara':
|
|
|
|
if not is_default_yara_rule(tracked):
|
|
|
|
filepath = get_yara_rule_file_by_tracker_name(tracked)
|
|
|
|
if filepath:
|
|
|
|
os.remove(filepath)
|
|
|
|
|
|
|
|
# Filters
|
2023-09-29 15:43:37 +02:00
|
|
|
filters = get_objects_tracked()
|
2023-05-11 16:21:43 +02:00
|
|
|
for obj_type in filters:
|
|
|
|
r_tracker.srem(f'trackers:objs:{tracker_type}:{obj_type}', tracked)
|
|
|
|
r_tracker.srem(f'trackers:uuid:{tracker_type}:{tracked}', f'{self.uuid}:{obj_type}')
|
|
|
|
|
|
|
|
self._del_mails()
|
|
|
|
self._del_tags()
|
2023-05-26 14:09:12 +02:00
|
|
|
|
|
|
|
level = self.get_level()
|
|
|
|
|
|
|
|
if level == 0: # user only
|
|
|
|
user = self.get_user()
|
|
|
|
r_tracker.srem(f'user:tracker:{user}', self.uuid)
|
|
|
|
r_tracker.srem(f'user:tracker:{user}:{tracker_type}', self.uuid)
|
|
|
|
elif level == 1: # global
|
|
|
|
r_tracker.srem('global:tracker', self.uuid)
|
|
|
|
r_tracker.srem(f'global:tracker:{tracker_type}', self.uuid)
|
2024-08-28 16:47:44 +02:00
|
|
|
elif level == 2:
|
2024-08-26 15:56:46 +02:00
|
|
|
org = self.get_org()
|
|
|
|
r_tracker.srem(f'org:tracker:{org}', self.uuid)
|
|
|
|
r_tracker.srem(f'org:tracker:{org}:{tracker_type}', self.uuid)
|
2023-05-26 14:09:12 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
r_tracker.srem(f'all:tracker:{tracker_type}', tracked)
|
|
|
|
# tracker - uuid map
|
|
|
|
r_tracker.srem(f'all:tracker_uuid:{tracker_type}:{tracked}', self.uuid)
|
|
|
|
r_tracker.srem('trackers:all', self.uuid)
|
|
|
|
r_tracker.srem(f'trackers:all:{tracker_type}', self.uuid)
|
|
|
|
ail_orgs.remove_obj_to_org(self.get_org(), 'tracker', self.uuid)
|
2023-05-11 16:21:43 +02:00
|
|
|
# meta
|
|
|
|
r_tracker.delete(f'tracker:{self.uuid}')
|
2023-05-26 14:09:12 +02:00
|
|
|
trigger_trackers_refresh(tracker_type)
|
2023-02-22 11:08:29 +01:00
|
|
|
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def create_tracker(tracker_type, to_track, org, user_id, level, description=None, filters={}, tags=[], mails=[], webhook=None, tracker_uuid=None):
|
2023-05-04 16:35:56 +02:00
|
|
|
if not tracker_uuid:
|
|
|
|
tracker_uuid = str(uuid.uuid4())
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-08-26 15:56:46 +02:00
|
|
|
return tracker.create(tracker_type, to_track, org, user_id, level, description=description, filters=filters, tags=tags,
|
2023-05-04 16:35:56 +02:00
|
|
|
mails=mails, webhook=webhook)
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def _re_create_tracker(tracker_type, tracker_uuid, to_track, org, user_id, level, description=None, filters={}, tags=[], mails=[], webhook=None, first_seen=None, last_seen=None):
|
|
|
|
create_tracker(tracker_type, to_track, org, user_id, level, description=description, filters=filters,
|
2023-05-04 16:35:56 +02:00
|
|
|
tags=tags, mails=mails, webhook=webhook, tracker_uuid=tracker_uuid)
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_trackers_types():
|
|
|
|
return ['word', 'set', 'regex', 'typosquatting', 'yara']
|
|
|
|
|
|
|
|
def get_trackers():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'trackers:all')
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_trackers_by_type(tracker_type):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'trackers:all:{tracker_type}')
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def _get_tracked_by_obj_type(tracker_type, obj_type):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'trackers:objs:{tracker_type}:{obj_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_trackers_by_tracked_obj_type(tracker_type, obj_type, tracked):
|
|
|
|
trackers_uuid = set()
|
2023-05-10 16:26:46 +02:00
|
|
|
for res in r_tracker.smembers(f'trackers:uuid:{tracker_type}:{tracked}'):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker_uuid, tracker_obj_type = res.split(':', 1)
|
|
|
|
if tracker_obj_type == obj_type:
|
|
|
|
trackers_uuid.add(tracker_uuid)
|
|
|
|
return trackers_uuid
|
|
|
|
|
|
|
|
def get_trackers_by_tracked(tracker_type, tracked):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'all:tracker_uuid:{tracker_type}:{tracked}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_user_trackers_by_tracked(tracker_type, tracked, user_id):
|
|
|
|
user_trackers = get_user_trackers(user_id, tracker_type=tracker_type)
|
|
|
|
trackers_uuid = get_trackers_by_tracked(tracker_type, tracked)
|
|
|
|
return trackers_uuid.intersection(user_trackers)
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_trackers_tracked_by_type(tracker_type):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'all:tracker:{tracker_type}')
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_global_trackers(tracker_type=None):
|
|
|
|
if tracker_type:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'global:tracker:{tracker_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('global:tracker')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_user_trackers(user_id, tracker_type=None):
|
2022-07-11 11:37:16 +02:00
|
|
|
if tracker_type:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'user:tracker:{user_id}:{tracker_type}')
|
2022-07-11 11:37:16 +02:00
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'user:tracker:{user_id}')
|
2022-07-11 11:37:16 +02:00
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def get_org_trackers(org, tracker_type=None):
|
|
|
|
if tracker_type:
|
|
|
|
return r_tracker.smembers(f'org:tracker:{org}:{tracker_type}')
|
|
|
|
else:
|
|
|
|
return r_tracker.smembers(f'org:tracker:{org}')
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_nb_global_trackers(tracker_type=None):
|
2022-07-11 11:37:16 +02:00
|
|
|
if tracker_type:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.scard(f'global:tracker:{tracker_type}')
|
2022-07-11 11:37:16 +02:00
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.scard('global:tracker')
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_nb_user_trackers(user_id, tracker_type=None):
|
|
|
|
if tracker_type:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.scard(f'user:tracker:{user_id}:{tracker_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.scard(f'user:tracker:{user_id}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def get_nb_org_trackers(org, tracker_type=None):
|
|
|
|
if tracker_type:
|
|
|
|
return r_tracker.scard(f'org:tracker:{org}:{tracker_type}')
|
|
|
|
else:
|
|
|
|
return r_tracker.scard(f'org:tracker:{org}')
|
|
|
|
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_user_trackers_meta(user_id, tracker_type=None):
|
|
|
|
metas = []
|
|
|
|
for tracker_uuid in get_user_trackers(user_id, tracker_type=tracker_type):
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-06 13:46:04 +02:00
|
|
|
metas.append(tracker.get_meta(options={'description', 'mails', 'org', 'org_name', 'sparkline', 'tags'}))
|
2023-05-04 16:35:56 +02:00
|
|
|
return metas
|
|
|
|
|
|
|
|
def get_global_trackers_meta(tracker_type=None):
|
|
|
|
metas = []
|
|
|
|
for tracker_uuid in get_global_trackers(tracker_type=tracker_type):
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-06 13:46:04 +02:00
|
|
|
metas.append(tracker.get_meta(options={'description', 'mails', 'org', 'org_name', 'sparkline', 'tags'}))
|
2023-05-04 16:35:56 +02:00
|
|
|
return metas
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def get_org_trackers_meta(user_org, tracker_type=None):
|
|
|
|
metas = []
|
|
|
|
for tracker_uuid in get_org_trackers(user_org, tracker_type=tracker_type):
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-06 13:46:04 +02:00
|
|
|
metas.append(tracker.get_meta(options={'description', 'mails', 'org', 'org_name', 'sparkline', 'tags'}))
|
2024-08-26 15:56:46 +02:00
|
|
|
return metas
|
|
|
|
|
2023-05-12 09:25:17 +02:00
|
|
|
def get_users_trackers_meta():
|
|
|
|
trackers = []
|
|
|
|
for tracker_uuid in get_trackers():
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
if tracker.is_level_user():
|
|
|
|
trackers.append(tracker.get_meta(options={'mails', 'sparkline', 'tags'}))
|
|
|
|
return trackers
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def get_orgs_trackers_meta():
|
|
|
|
trackers = []
|
|
|
|
for tracker_uuid in get_trackers():
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
if tracker.is_level_org():
|
|
|
|
trackers.append(tracker.get_meta(options={'mails', 'sparkline', 'tags'}))
|
|
|
|
return trackers
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_trackers_graph_by_day(l_trackers, num_day=31, date_from=None, date_to=None):
|
|
|
|
if date_from and date_to:
|
|
|
|
date_range = Date.substract_date(date_from, date_to)
|
|
|
|
else:
|
|
|
|
date_range = Date.get_date_range(num_day)
|
|
|
|
list_tracker_stats = []
|
|
|
|
for tracker_uuid in l_trackers:
|
|
|
|
dict_tracker_data = []
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
for date_day in date_range:
|
|
|
|
nb_seen_this_day = tracker.get_nb_objs_by_date(date_day)
|
|
|
|
if nb_seen_this_day is None:
|
|
|
|
nb_seen_this_day = 0
|
|
|
|
dict_tracker_data.append({"date": date_day, "value": int(nb_seen_this_day)})
|
|
|
|
list_tracker_stats.append({"name": tracker.get_tracked(), "Data": dict_tracker_data})
|
|
|
|
return list_tracker_stats
|
|
|
|
|
2024-09-11 17:27:05 +02:00
|
|
|
def get_trackers_dashboard(user_org, user_id):
|
2023-05-04 16:35:56 +02:00
|
|
|
trackers = []
|
2023-05-10 16:26:46 +02:00
|
|
|
for raw in r_tracker.lrange('trackers:dashboard', 0, -1):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker_uuid, timestamp, obj_type, subtype, obj_id = raw.split(':', 4)
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-12 09:28:32 +02:00
|
|
|
if not tracker.check_level(user_org, user_id):
|
2024-09-11 17:27:05 +02:00
|
|
|
continue
|
2024-02-15 12:02:01 +01:00
|
|
|
meta = tracker.get_meta(options={'description', 'tags'})
|
2023-05-11 16:21:43 +02:00
|
|
|
if not meta.get('type'):
|
|
|
|
meta['type'] = 'Tracker DELETED'
|
2023-05-04 16:35:56 +02:00
|
|
|
timestamp = datetime.datetime.fromtimestamp(float(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
meta['timestamp'] = timestamp
|
|
|
|
trackers.append(meta)
|
|
|
|
return trackers
|
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
def get_user_dashboard(user_id): # TODO SORT + REMOVE OLDER ROWS (trim)
|
2023-05-04 16:35:56 +02:00
|
|
|
trackers = []
|
2023-05-10 16:26:46 +02:00
|
|
|
for raw in r_tracker.lrange(f'trackers:user:{user_id}', 0, -1):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker_uuid, timestamp, obj_type, subtype, obj_id = raw.split(':', 4)
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
meta = tracker.get_meta(options={'tags'})
|
|
|
|
timestamp = datetime.datetime.fromtimestamp(float(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
|
meta['timestamp'] = timestamp
|
|
|
|
trackers.append(meta)
|
|
|
|
|
|
|
|
return trackers
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def get_trackers_stats(user_org, user_id):
|
2023-05-04 16:35:56 +02:00
|
|
|
stats = {'all': 0}
|
|
|
|
for tracker_type in get_trackers_types():
|
|
|
|
nb_global = get_nb_global_trackers(tracker_type=tracker_type)
|
|
|
|
nb_user = get_nb_user_trackers(user_id, tracker_type=tracker_type)
|
2024-08-26 15:56:46 +02:00
|
|
|
nb_org = get_nb_org_trackers(user_org, tracker_type=tracker_type)
|
|
|
|
stats[tracker_type] = nb_global + nb_user + nb_org
|
|
|
|
stats['all'] += nb_global + nb_user + nb_org
|
2023-05-04 16:35:56 +02:00
|
|
|
return stats
|
|
|
|
|
|
|
|
|
|
|
|
## Cache ##
|
|
|
|
# TODO API: Check Tracker type
|
|
|
|
def trigger_trackers_refresh(tracker_type):
|
|
|
|
r_cache.set(f'tracker:refresh:{tracker_type}', time.time())
|
2020-08-12 09:28:36 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_tracker_last_updated_by_type(tracker_type):
|
|
|
|
epoch_update = r_cache.get(f'tracker:refresh:{tracker_type}')
|
|
|
|
if not epoch_update:
|
|
|
|
epoch_update = 0
|
|
|
|
return float(epoch_update)
|
|
|
|
# - Cache - #
|
2020-08-12 09:28:36 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
## Objects ##
|
2021-08-23 15:24:34 +02:00
|
|
|
|
2022-12-19 16:38:20 +01:00
|
|
|
def is_obj_tracked(obj_type, subtype, obj_id):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.exists(f'obj:trackers:{obj_type}:{subtype}:{obj_id}')
|
2020-08-12 09:28:36 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_obj_trackers(obj_type, subtype, obj_id):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'obj:trackers:{obj_type}:{subtype}:{obj_id}')
|
2020-08-12 09:28:36 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def delete_obj_trackers(obj_type, subtype, obj_id):
|
|
|
|
for tracker_uuid in get_obj_trackers(obj_type, subtype, obj_id):
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
tracker.remove(obj_type, subtype, obj_id)
|
2021-06-14 17:36:30 +02:00
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
######################
|
|
|
|
#### TRACKERS ACL ####
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
## LEVEL ##
|
2024-06-24 16:23:00 +02:00
|
|
|
def is_tracker_global_level(tracker_uuid):
|
2024-06-24 16:37:30 +02:00
|
|
|
return int(r_tracker.hget(f'tracker:{tracker_uuid}', 'level')) == 1
|
2024-06-24 16:23:00 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def is_tracked_in_global_level(tracked, tracker_type):
|
|
|
|
for tracker_uuid in get_trackers_by_tracked(tracker_type, tracked):
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
if tracker.is_level_global():
|
|
|
|
return True
|
2020-12-08 16:47:55 +01:00
|
|
|
return False
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def is_tracked_in_user_level(tracked, tracker_type, user_id):
|
|
|
|
trackers_uuid = get_user_trackers_by_tracked(tracker_type, tracked, user_id)
|
|
|
|
if trackers_uuid:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2022-07-11 11:37:16 +02:00
|
|
|
## API ##
|
|
|
|
def api_check_tracker_uuid(tracker_uuid):
|
2023-02-22 11:08:29 +01:00
|
|
|
if not is_valid_uuid_v4(tracker_uuid):
|
2022-07-11 11:37:16 +02:00
|
|
|
return {"status": "error", "reason": "Invalid uuid"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
if not r_tracker.exists(f'tracker:{tracker_uuid}'):
|
2022-07-11 11:37:16 +02:00
|
|
|
return {"status": "error", "reason": "Unknown uuid"}, 404
|
|
|
|
return None
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_check_tracker_acl(tracker_uuid, user_org, user_id, user_role, action):
|
2023-05-04 16:35:56 +02:00
|
|
|
res = api_check_tracker_uuid(tracker_uuid)
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
if not ail_orgs.check_obj_access_acl(tracker, user_org, user_id, user_role, action):
|
|
|
|
return {"status": "error", "reason": "Access Denied"}, 403
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_is_allowed_to_edit_tracker_level(tracker_uuid, user_org, user_id, user_role, new_level):
|
2024-08-26 15:56:46 +02:00
|
|
|
tracker = Tracker(tracker_uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
if not ail_orgs.check_acl_edit_level(tracker, user_org, user_id, user_role, new_level):
|
|
|
|
return {"status": "error", "reason": "Access Denied - Tracker level"}, 403
|
2024-08-26 15:56:46 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
## --ACL-- ##
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
#### FIX DB #### TODO ###################################################################
|
2021-08-23 15:24:34 +02:00
|
|
|
def fix_tracker_stats_per_day(tracker_uuid):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
date_from = tracker.get_date()
|
2021-08-27 18:05:21 +02:00
|
|
|
date_to = Date.get_today_date_str()
|
2021-08-23 15:24:34 +02:00
|
|
|
# delete stats
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.delete(f'tracker:stat:{tracker_uuid}')
|
|
|
|
r_tracker.hdel(f'tracker:{tracker_uuid}', 'first_seen')
|
|
|
|
r_tracker.hdel(f'tracker:{tracker_uuid}', 'last_seen')
|
2021-08-23 15:24:34 +02:00
|
|
|
# create new stats
|
|
|
|
for date_day in Date.substract_date(date_from, date_to):
|
2021-08-27 18:05:21 +02:00
|
|
|
date_day = int(date_day)
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
nb_items = r_tracker.scard(f'tracker:item:{tracker_uuid}:{date_day}')
|
2021-08-27 18:05:21 +02:00
|
|
|
if nb_items:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.zincrby(f'tracker:stat:{tracker_uuid}', nb_items, int(date_day))
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2021-08-27 18:53:12 +02:00
|
|
|
# update first_seen/last_seen
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker.update_daterange(date_day)
|
2021-08-27 18:05:21 +02:00
|
|
|
|
|
|
|
def fix_tracker_item_link(tracker_uuid):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
date_from = tracker.get_first_seen()
|
|
|
|
date_to = tracker.get_last_seen()
|
2021-08-27 18:05:21 +02:00
|
|
|
|
2021-08-27 18:53:12 +02:00
|
|
|
if date_from and date_to:
|
|
|
|
for date_day in Date.substract_date(date_from, date_to):
|
2023-05-10 16:26:46 +02:00
|
|
|
l_items = r_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}')
|
2021-08-27 18:53:12 +02:00
|
|
|
for item_id in l_items:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'obj:trackers:item:{item_id}', tracker_uuid)
|
2021-08-27 18:05:21 +02:00
|
|
|
|
|
|
|
def fix_all_tracker_uuid_list():
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.delete(f'trackers:all')
|
2023-05-04 16:35:56 +02:00
|
|
|
for tracker_type in get_trackers_types():
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.delete(f'trackers:all:{tracker_type}')
|
2023-05-04 16:35:56 +02:00
|
|
|
for tracked in get_trackers_tracked_by_type(tracker_type):
|
|
|
|
l_tracker_uuid = get_trackers_by_tracked(tracker_type, tracked)
|
2021-08-27 18:05:21 +02:00
|
|
|
for tracker_uuid in l_tracker_uuid:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'trackers:all', tracker_uuid)
|
|
|
|
r_tracker.sadd(f'trackers:all:{tracker_type}', tracker_uuid)
|
2021-08-23 15:24:34 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
## --FIX DB-- ##
|
2021-08-23 15:24:34 +02:00
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
#### CREATE TRACKER ####
|
2023-05-10 16:26:46 +02:00
|
|
|
def api_validate_tracker_to_add(to_track, tracker_type, nb_words=1):
|
|
|
|
if tracker_type == 'regex':
|
2023-05-04 16:35:56 +02:00
|
|
|
if not is_valid_regex(to_track):
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "Invalid regex"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
elif tracker_type == 'word' or tracker_type == 'set':
|
2020-12-08 16:47:55 +01:00
|
|
|
# force lowercase
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track = to_track.lower()
|
|
|
|
word_set = set(to_track)
|
2024-04-09 14:22:11 +02:00
|
|
|
set_inter = word_set.intersection(get_special_characters())
|
2020-12-08 16:47:55 +01:00
|
|
|
if set_inter:
|
2023-05-10 16:26:46 +02:00
|
|
|
return {"status": "error",
|
|
|
|
"reason": f'special character(s) not allowed: {set_inter}',
|
|
|
|
"message": "Please use a python regex or remove all special characters"}, 400
|
2023-05-04 16:35:56 +02:00
|
|
|
words = to_track.split()
|
2020-12-08 16:47:55 +01:00
|
|
|
# not a word
|
2023-05-10 16:26:46 +02:00
|
|
|
if tracker_type == 'word' and len(words) > 1:
|
2020-12-08 16:47:55 +01:00
|
|
|
tracker_type = 'set'
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# output format: tracker1,tracker2,tracker3;2
|
2023-05-10 16:26:46 +02:00
|
|
|
if tracker_type == 'set':
|
2020-12-08 16:47:55 +01:00
|
|
|
try:
|
|
|
|
nb_words = int(nb_words)
|
2023-05-04 16:35:56 +02:00
|
|
|
except TypeError:
|
2020-12-08 16:47:55 +01:00
|
|
|
nb_words = 1
|
2023-05-04 16:35:56 +02:00
|
|
|
if nb_words == 0:
|
2020-12-08 16:47:55 +01:00
|
|
|
nb_words = 1
|
|
|
|
|
|
|
|
words_set = set(words)
|
|
|
|
words_set = sorted(words_set)
|
|
|
|
if nb_words > len(words_set):
|
|
|
|
nb_words = len(words_set)
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track = ",".join(words_set)
|
|
|
|
to_track = f"{to_track};{nb_words}"
|
2022-06-07 16:18:52 +02:00
|
|
|
elif tracker_type == 'typosquatting':
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track = to_track.lower()
|
2022-05-02 16:20:55 +02:00
|
|
|
# Take only the first term
|
2023-05-04 16:35:56 +02:00
|
|
|
domain = to_track.split(" ")
|
2022-06-07 16:18:52 +02:00
|
|
|
if len(domain) > 1:
|
|
|
|
return {"status": "error", "reason": "Only one domain is accepted at a time"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
if "." not in to_track:
|
2022-06-07 16:18:52 +02:00
|
|
|
return {"status": "error", "reason": "Invalid domain name"}, 400
|
2022-07-08 10:55:19 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
elif tracker_type == 'yara_custom':
|
2023-05-04 16:35:56 +02:00
|
|
|
if not is_valid_yara_rule(to_track):
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "Invalid custom Yara Rule"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
elif tracker_type == 'yara_default':
|
2023-05-04 16:35:56 +02:00
|
|
|
if not is_valid_default_yara_rule(to_track):
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "The Yara Rule doesn't exist"}, 400
|
2020-12-08 16:47:55 +01:00
|
|
|
else:
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "Incorrect type"}, 400
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "success", "tracked": to_track, "type": tracker_type}, 200
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
def api_add_tracker(dict_input, org, user_id):
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track = dict_input.get('tracked', None)
|
|
|
|
if not to_track:
|
2021-10-04 13:58:04 +02:00
|
|
|
return {"status": "error", "reason": "Tracker not provided"}, 400
|
2020-12-08 16:47:55 +01:00
|
|
|
tracker_type = dict_input.get('type', None)
|
|
|
|
if not tracker_type:
|
2021-10-04 13:58:04 +02:00
|
|
|
return {"status": "error", "reason": "Tracker type not provided"}, 400
|
2020-12-08 16:47:55 +01:00
|
|
|
nb_words = dict_input.get('nb_words', 1)
|
|
|
|
description = dict_input.get('description', '')
|
|
|
|
description = escape(description)
|
2021-09-28 20:48:47 +02:00
|
|
|
webhook = dict_input.get('webhook', '')
|
|
|
|
webhook = escape(webhook)
|
2023-05-11 16:21:43 +02:00
|
|
|
res = api_validate_tracker_to_add(to_track, tracker_type, nb_words=nb_words)
|
|
|
|
if res[1] != 200:
|
2020-12-08 16:47:55 +01:00
|
|
|
return res
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track = res[0]['tracked']
|
2020-12-08 16:47:55 +01:00
|
|
|
tracker_type = res[0]['type']
|
|
|
|
|
|
|
|
tags = dict_input.get('tags', [])
|
|
|
|
mails = dict_input.get('mails', [])
|
|
|
|
res = verify_mail_list(mails)
|
|
|
|
if res:
|
|
|
|
return res
|
2021-06-18 15:23:18 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
# Filters # TODO MOVE ME
|
|
|
|
filters = dict_input.get('filters', {})
|
|
|
|
if filters:
|
2024-02-15 16:22:00 +01:00
|
|
|
if filters.keys() == set(get_objects_tracked()) and set(filters['pgp'].get('subtypes', [])) == {'mail', 'name'}:
|
2023-05-04 16:35:56 +02:00
|
|
|
filters = {}
|
|
|
|
for obj_type in filters:
|
|
|
|
if obj_type not in get_objects_tracked():
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object type"}, 400
|
|
|
|
|
|
|
|
if obj_type == 'pgp':
|
|
|
|
if set(filters['pgp'].get('subtypes', [])) == {'mail', 'name'}:
|
|
|
|
filters['pgp'].pop('subtypes')
|
|
|
|
|
|
|
|
for filter_name in filters[obj_type]:
|
|
|
|
if filter_name not in {'mimetypes', 'sources', 'subtypes'}:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter"}, 400
|
|
|
|
elif filter_name == 'mimetypes': # TODO
|
|
|
|
pass
|
|
|
|
elif filter_name == 'sources':
|
|
|
|
if obj_type == 'item':
|
|
|
|
res = item_basic.verify_sources_list(filters['item']['sources'])
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
else:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter sources"}, 400
|
|
|
|
elif filter_name == 'subtypes':
|
|
|
|
obj_subtypes = set(get_object_all_subtypes(obj_type))
|
|
|
|
for subtype in filters[obj_type]['subtypes']:
|
|
|
|
if subtype not in obj_subtypes:
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object subtype"}, 400
|
2020-12-08 16:47:55 +01:00
|
|
|
|
|
|
|
level = dict_input.get('level', 1)
|
|
|
|
try:
|
|
|
|
level = int(level)
|
2023-05-04 16:35:56 +02:00
|
|
|
except TypeError:
|
|
|
|
level = 1
|
2024-08-26 15:56:46 +02:00
|
|
|
if level not in range(0, 3):
|
2020-12-08 16:47:55 +01:00
|
|
|
level = 1
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
tracker_uuid = create_tracker(tracker_type, to_track, org, user_id, level, description=description, filters=filters,
|
2023-05-04 16:35:56 +02:00
|
|
|
tags=tags, mails=mails, webhook=webhook)
|
|
|
|
|
|
|
|
return {'tracked': to_track, 'type': tracker_type, 'uuid': tracker_uuid}, 200
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_edit_tracker(dict_input, user_org, user_id, user_role):
|
2023-05-11 16:21:43 +02:00
|
|
|
tracker_uuid = dict_input.get('uuid')
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_tracker_acl(tracker_uuid, user_org, user_id, user_role, 'edit')
|
2023-05-11 16:21:43 +02:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
|
|
|
|
to_track = dict_input.get('tracked', None)
|
|
|
|
if not to_track:
|
|
|
|
return {"status": "error", "reason": "Tracker not provided"}, 400
|
|
|
|
tracker_type = dict_input.get('type', None)
|
|
|
|
if not tracker_type:
|
|
|
|
return {"status": "error", "reason": "Tracker type not provided"}, 400
|
2024-08-26 15:56:46 +02:00
|
|
|
|
|
|
|
level = dict_input.get('level', 1)
|
|
|
|
try:
|
|
|
|
level = int(level)
|
|
|
|
except TypeError:
|
|
|
|
level = 1
|
|
|
|
if level not in range(0, 3):
|
|
|
|
level = 1
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_is_allowed_to_edit_tracker_level(tracker_uuid, user_org, user_id, user_role, level)
|
2024-08-26 15:56:46 +02:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
nb_words = dict_input.get('nb_words', 1)
|
|
|
|
description = dict_input.get('description', '')
|
|
|
|
description = escape(description)
|
|
|
|
webhook = dict_input.get('webhook', '')
|
|
|
|
webhook = escape(webhook)
|
|
|
|
res = api_validate_tracker_to_add(to_track, tracker_type, nb_words=nb_words)
|
|
|
|
if res[1] != 200:
|
|
|
|
return res
|
|
|
|
to_track = res[0]['tracked']
|
|
|
|
tracker_type = res[0]['type']
|
|
|
|
|
|
|
|
tags = dict_input.get('tags', [])
|
|
|
|
mails = dict_input.get('mails', [])
|
|
|
|
res = verify_mail_list(mails)
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
|
|
|
# Filters # TODO MOVE ME
|
|
|
|
filters = dict_input.get('filters', {})
|
|
|
|
if filters:
|
2024-02-15 16:22:00 +01:00
|
|
|
if filters.keys() == set(get_objects_tracked()) and set(filters['pgp'].get('subtypes', [])) == {'mail', 'name'}:
|
2023-05-11 16:21:43 +02:00
|
|
|
if not filters['decoded'] and not filters['item']:
|
|
|
|
filters = {}
|
|
|
|
for obj_type in filters:
|
|
|
|
if obj_type not in get_objects_tracked():
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object type"}, 400
|
|
|
|
|
|
|
|
if obj_type == 'pgp':
|
|
|
|
if set(filters['pgp'].get('subtypes', [])) == {'mail', 'name'}:
|
|
|
|
filters['pgp'].pop('subtypes')
|
|
|
|
|
|
|
|
for filter_name in filters[obj_type]:
|
|
|
|
if filter_name not in {'mimetypes', 'sources', 'subtypes'}:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter"}, 400
|
|
|
|
elif filter_name == 'mimetypes': # TODO
|
|
|
|
pass
|
|
|
|
elif filter_name == 'sources':
|
|
|
|
if obj_type == 'item':
|
|
|
|
res = item_basic.verify_sources_list(filters['item']['sources'])
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
else:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter sources"}, 400
|
|
|
|
elif filter_name == 'subtypes':
|
|
|
|
obj_subtypes = set(get_object_all_subtypes(obj_type))
|
|
|
|
for subtype in filters[obj_type]['subtypes']:
|
|
|
|
if subtype not in obj_subtypes:
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object subtype"}, 400
|
|
|
|
|
2024-08-26 15:56:46 +02:00
|
|
|
tracker.edit(tracker_type, to_track, level, user_org, description=description, filters=filters,
|
2023-05-11 16:21:43 +02:00
|
|
|
tags=tags, mails=mails, webhook=webhook)
|
|
|
|
return {'tracked': to_track, 'type': tracker_type, 'uuid': tracker_uuid}, 200
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_delete_tracker(data, user_org, user_id, user_role):
|
2023-05-04 16:35:56 +02:00
|
|
|
tracker_uuid = data.get('uuid')
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_tracker_acl(tracker_uuid, user_org, user_id, user_role, 'delete')
|
2023-05-04 16:35:56 +02:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
return tracker.delete(), 200
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_tracker_add_object(data, user_org, user_id, user_role):
|
2024-04-03 17:39:45 +02:00
|
|
|
tracker_uuid = data.get('uuid')
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_tracker_acl(tracker_uuid, user_org, user_id, user_role, 'edit')
|
2024-04-03 17:39:45 +02:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
object_gid = data.get('gid')
|
|
|
|
date = data.get('date')
|
|
|
|
if date:
|
|
|
|
if not Date.validate_str_date(date):
|
|
|
|
date = None
|
|
|
|
try:
|
|
|
|
obj_type, subtype, obj_id = object_gid.split(':', 2)
|
|
|
|
except (AttributeError, IndexError):
|
|
|
|
return {"status": "error", "reason": "Invalid Object"}, 400
|
|
|
|
return tracker.add(obj_type, subtype, obj_id, date=date), 200
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_tracker_remove_object(data, user_org, user_id, user_role):
|
2024-03-27 11:03:27 +01:00
|
|
|
tracker_uuid = data.get('uuid')
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_tracker_acl(tracker_uuid, user_org, user_id, user_role, 'edit')
|
2024-03-27 11:03:27 +01:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
|
|
|
tracker = Tracker(tracker_uuid)
|
|
|
|
object_gid = data.get('gid')
|
|
|
|
try:
|
|
|
|
obj_type, subtype, obj_id = object_gid.split(':', 2)
|
|
|
|
except (AttributeError, IndexError):
|
|
|
|
return {"status": "error", "reason": "Invalid Object"}, 400
|
|
|
|
return tracker.remove(obj_type, subtype, obj_id), 200
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
## -- CREATE TRACKER -- ##
|
2020-12-08 16:47:55 +01:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
####################
|
|
|
|
#### WORD - SET ####
|
|
|
|
|
|
|
|
def get_tracked_words():
|
|
|
|
to_track = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
to_track[obj_type] = _get_tracked_by_obj_type('word', obj_type)
|
|
|
|
return to_track
|
|
|
|
|
|
|
|
def get_tracked_sets():
|
|
|
|
to_track = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
to_track[obj_type] = []
|
|
|
|
for tracked in _get_tracked_by_obj_type('set', obj_type):
|
|
|
|
res = tracked.split(';')
|
|
|
|
nb_words = int(res[1])
|
|
|
|
words_set = res[0].split(',')
|
|
|
|
to_track[obj_type].append({'words': words_set, 'nb': nb_words, 'tracked': tracked})
|
|
|
|
return to_track
|
|
|
|
|
|
|
|
def get_text_word_frequency(content, filtering=True):
|
|
|
|
content = content.lower()
|
|
|
|
words_dict = defaultdict(int)
|
|
|
|
|
|
|
|
if filtering:
|
2024-04-09 14:22:11 +02:00
|
|
|
if TOKENIZER is None:
|
|
|
|
init_tokenizer()
|
|
|
|
blob = TextBlob(content, tokenizer=TOKENIZER)
|
2023-05-04 16:35:56 +02:00
|
|
|
else:
|
|
|
|
blob = TextBlob(content)
|
|
|
|
for word in blob.tokens:
|
|
|
|
words_dict[word] += 1
|
|
|
|
return words_dict
|
|
|
|
|
|
|
|
###############
|
|
|
|
#### REGEX ####
|
|
|
|
|
|
|
|
def get_tracked_regexs():
|
|
|
|
to_track = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
to_track[obj_type] = []
|
|
|
|
for tracked in _get_tracked_by_obj_type('regex', obj_type):
|
|
|
|
to_track[obj_type].append({'regex': re.compile(tracked), 'tracked': tracked})
|
|
|
|
return to_track
|
|
|
|
|
|
|
|
########################
|
|
|
|
#### TYPO SQUATTING ####
|
|
|
|
|
|
|
|
def get_tracked_typosquatting_domains(tracked):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'tracker:typosquatting:{tracked}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_tracked_typosquatting():
|
|
|
|
to_track = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
to_track[obj_type] = []
|
|
|
|
for tracked in _get_tracked_by_obj_type('typosquatting', obj_type):
|
|
|
|
to_track[obj_type].append({'domains': get_tracked_typosquatting_domains(tracked), 'tracked': tracked})
|
|
|
|
return to_track
|
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
##############
|
2020-08-12 09:28:36 +02:00
|
|
|
#### YARA ####
|
|
|
|
def get_yara_rules_dir():
|
|
|
|
return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara')
|
|
|
|
|
|
|
|
def get_yara_rules_default_dir():
|
|
|
|
return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara', 'ail-yara-rules', 'rules')
|
|
|
|
|
|
|
|
# # TODO: cache + update
|
|
|
|
def get_all_default_yara_rules_types():
|
|
|
|
yara_dir = get_yara_rules_default_dir()
|
|
|
|
all_yara_types = next(os.walk(yara_dir))[1]
|
|
|
|
# save in cache ?
|
|
|
|
return all_yara_types
|
|
|
|
|
|
|
|
# # TODO: cache + update
|
|
|
|
def get_all_default_yara_files():
|
|
|
|
yara_dir = get_yara_rules_default_dir()
|
|
|
|
all_default_yara_files = {}
|
|
|
|
for rules_type in get_all_default_yara_rules_types():
|
|
|
|
all_default_yara_files[rules_type] = os.listdir(os.path.join(yara_dir, rules_type))
|
|
|
|
return all_default_yara_files
|
|
|
|
|
|
|
|
def get_all_default_yara_rules_by_type(yara_types):
|
|
|
|
all_default_yara_files = get_all_default_yara_files()
|
|
|
|
if yara_types in all_default_yara_files:
|
|
|
|
return all_default_yara_files[yara_types]
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
2021-06-14 17:36:30 +02:00
|
|
|
def get_all_tracked_yara_files(filter_disabled=False):
|
2023-05-10 16:26:46 +02:00
|
|
|
yara_files = r_tracker.smembers('all:tracker:yara')
|
2020-08-12 09:28:36 +02:00
|
|
|
if not yara_files:
|
|
|
|
yara_files = []
|
2021-06-14 17:36:30 +02:00
|
|
|
if filter_disabled:
|
|
|
|
pass
|
2020-08-12 09:28:36 +02:00
|
|
|
return yara_files
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_tracked_yara_rules():
|
|
|
|
to_track = {}
|
|
|
|
for obj_type in get_objects_tracked():
|
|
|
|
rules = {}
|
|
|
|
for tracked in _get_tracked_by_obj_type('yara', obj_type):
|
2023-09-29 15:43:37 +02:00
|
|
|
rule = os.path.join(get_yara_rules_dir(), tracked)
|
|
|
|
if not os.path.exists(rule):
|
|
|
|
logger.critical(f"Yara rule don't exists {tracked} : {obj_type}")
|
|
|
|
else:
|
|
|
|
rules[tracked] = rule
|
2023-05-04 16:35:56 +02:00
|
|
|
to_track[obj_type] = yara.compile(filepaths=rules)
|
|
|
|
return to_track
|
2022-12-19 16:38:20 +01:00
|
|
|
|
2020-08-12 09:28:36 +02:00
|
|
|
def reload_yara_rules():
|
|
|
|
yara_files = get_all_tracked_yara_files()
|
|
|
|
# {uuid: filename}
|
|
|
|
rule_dict = {}
|
|
|
|
for yar_path in yara_files:
|
2023-05-04 16:35:56 +02:00
|
|
|
for tracker_uuid in get_trackers_by_tracked('yara', yar_path):
|
2020-08-12 09:28:36 +02:00
|
|
|
rule_dict[tracker_uuid] = os.path.join(get_yara_rules_dir(), yar_path)
|
2023-02-22 11:08:29 +01:00
|
|
|
for tracker_uuid in rule_dict:
|
|
|
|
if not os.path.isfile(rule_dict[tracker_uuid]):
|
|
|
|
# TODO IGNORE + LOGS
|
|
|
|
raise Exception(f"Error: {rule_dict[tracker_uuid]} doesn't exists")
|
2020-08-12 09:28:36 +02:00
|
|
|
rules = yara.compile(filepaths=rule_dict)
|
|
|
|
return rules
|
|
|
|
|
|
|
|
def is_valid_yara_rule(yara_rule):
|
|
|
|
try:
|
|
|
|
yara.compile(source=yara_rule)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
def is_default_yara_rule(tracked_yara_name):
|
|
|
|
yara_dir = get_yara_rules_dir()
|
|
|
|
filename = os.path.join(yara_dir, tracked_yara_name)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
try:
|
|
|
|
if tracked_yara_name.split('/')[0] == 'custom-rules':
|
|
|
|
return False
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_valid_default_yara_rule(yara_rule, verbose=True):
|
2020-08-12 09:28:36 +02:00
|
|
|
yara_dir = get_yara_rules_default_dir()
|
|
|
|
filename = os.path.join(yara_dir, yara_rule)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
# incorrect filename
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
2020-12-08 16:47:55 +01:00
|
|
|
if verbose:
|
|
|
|
print('error: file transversal')
|
|
|
|
print(yara_dir)
|
|
|
|
print(filename)
|
2020-08-12 09:28:36 +02:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def save_yara_rule(yara_rule_type, yara_rule, tracker_uuid=None):
|
|
|
|
if yara_rule_type == 'yara_custom':
|
2023-05-10 16:26:46 +02:00
|
|
|
if not tracker_uuid:
|
2020-08-12 09:28:36 +02:00
|
|
|
tracker_uuid = str(uuid.uuid4())
|
|
|
|
filename = os.path.join('custom-rules', tracker_uuid + '.yar')
|
|
|
|
with open(os.path.join(get_yara_rules_dir(), filename), 'w') as f:
|
|
|
|
f.write(str(yara_rule))
|
2023-05-11 16:21:43 +02:00
|
|
|
elif yara_rule_type == 'yara_default':
|
2020-08-12 09:28:36 +02:00
|
|
|
filename = os.path.join('ail-yara-rules', 'rules', yara_rule)
|
|
|
|
return filename
|
2020-08-19 11:37:51 +02:00
|
|
|
|
2020-12-08 16:47:55 +01:00
|
|
|
def get_yara_rule_file_by_tracker_name(tracked_yara_name):
|
|
|
|
yara_dir = get_yara_rules_dir()
|
|
|
|
filename = os.path.join(yara_dir, tracked_yara_name)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
|
|
print('error: file transversal')
|
|
|
|
print(yara_dir)
|
|
|
|
print(filename)
|
|
|
|
return None
|
|
|
|
return filename
|
|
|
|
|
2020-08-19 11:37:51 +02:00
|
|
|
def get_yara_rule_content(yara_rule):
|
|
|
|
yara_dir = get_yara_rules_dir()
|
|
|
|
filename = os.path.join(yara_dir, yara_rule)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
|
|
|
|
# incorrect filename
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
2023-05-10 16:26:46 +02:00
|
|
|
return '' # # TODO: throw exception
|
2020-08-19 11:37:51 +02:00
|
|
|
|
|
|
|
with open(filename, 'r') as f:
|
|
|
|
rule_content = f.read()
|
|
|
|
return rule_content
|
|
|
|
|
2020-09-03 16:33:10 +02:00
|
|
|
def api_get_default_rule_content(default_yara_rule):
|
|
|
|
yara_dir = get_yara_rules_default_dir()
|
|
|
|
filename = os.path.join(yara_dir, default_yara_rule)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
2021-10-06 11:12:43 +02:00
|
|
|
return {'status': 'error', 'reason': 'file traversal detected'}, 400
|
2020-09-03 16:33:10 +02:00
|
|
|
|
|
|
|
if not os.path.isfile(filename):
|
2021-10-06 11:12:43 +02:00
|
|
|
return {'status': 'error', 'reason': 'yara rule not found'}, 400
|
|
|
|
|
|
|
|
with open(filename, 'r') as f:
|
|
|
|
rule_content = f.read()
|
|
|
|
return {'rule_name': default_yara_rule, 'content': rule_content}, 200
|
|
|
|
|
2020-09-03 16:33:10 +02:00
|
|
|
|
2021-10-06 11:12:43 +02:00
|
|
|
def get_yara_rule_content_restapi(request_dict):
|
|
|
|
rule_name = request_dict.get('rule_name', None)
|
|
|
|
if not request_dict:
|
|
|
|
return {'status': 'error', 'reason': 'Malformed JSON'}, 400
|
|
|
|
if not rule_name:
|
|
|
|
return {'status': 'error', 'reason': 'Mandatory parameter(s) not provided'}, 400
|
|
|
|
yara_dir = get_yara_rules_dir()
|
|
|
|
filename = os.path.join(yara_dir, rule_name)
|
|
|
|
filename = os.path.realpath(filename)
|
|
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
|
|
return {'status': 'error', 'reason': 'File Path Traversal'}, 400
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return {'status': 'error', 'reason': 'yara rule not found'}, 400
|
2020-09-03 16:33:10 +02:00
|
|
|
with open(filename, 'r') as f:
|
|
|
|
rule_content = f.read()
|
2021-10-06 11:12:43 +02:00
|
|
|
rule_content = base64.b64encode((rule_content.encode('utf-8'))).decode('UTF-8')
|
|
|
|
return {'status': 'success', 'content': rule_content}, 200
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
## -- YARA -- ##
|
2020-08-12 09:28:36 +02:00
|
|
|
|
2021-07-14 13:58:00 +02:00
|
|
|
######################
|
|
|
|
#### RETRO - HUNT ####
|
|
|
|
|
|
|
|
# state: pending/running/completed/paused
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
class RetroHunt:
|
|
|
|
|
|
|
|
def __init__(self, task_uuid):
|
|
|
|
self.uuid = task_uuid
|
|
|
|
|
|
|
|
def exists(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.exists(f'retro_hunt:{self.uuid}')
|
|
|
|
|
|
|
|
def _get_field(self, field):
|
|
|
|
return r_tracker.hget(f'retro_hunt:{self.uuid}', field)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def _set_field(self, field, value):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.hset(f'retro_hunt:{self.uuid}', field, value)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
## LEVEL ##
|
|
|
|
|
|
|
|
def get_level(self):
|
|
|
|
level = int(self._get_field('level'))
|
|
|
|
if not level:
|
2024-09-03 16:27:02 +02:00
|
|
|
level = 1
|
2024-08-28 16:47:44 +02:00
|
|
|
return int(level)
|
|
|
|
|
|
|
|
def set_level(self, level, org_uuid):
|
|
|
|
if level == 1: # global
|
|
|
|
r_tracker.sadd('retro_hunts', self.uuid)
|
|
|
|
elif level == 2: # org only
|
|
|
|
self.add_to_org(org_uuid)
|
|
|
|
self._set_field('level', level)
|
|
|
|
|
|
|
|
def delete_level(self, level=None):
|
|
|
|
if not level:
|
|
|
|
level = self.get_level()
|
|
|
|
if level == 1:
|
|
|
|
r_tracker.srem('retro_hunts', self.uuid)
|
|
|
|
# Org
|
|
|
|
elif level == 2:
|
|
|
|
ail_orgs.remove_obj_to_org(self.get_org(), 'retro_hunt', self.uuid)
|
|
|
|
|
|
|
|
def reset_level(self, old_level, new_level, new_org_uuid):
|
|
|
|
self.delete_level(old_level)
|
|
|
|
self.set_level(new_level, new_org_uuid)
|
|
|
|
|
2024-09-03 16:27:02 +02:00
|
|
|
def check_level(self, user_org):
|
|
|
|
level = self.get_level()
|
|
|
|
if level == 1:
|
|
|
|
return True
|
|
|
|
elif level == 2:
|
|
|
|
return self.get_org() == user_org
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
## ORG ##
|
|
|
|
|
|
|
|
def get_creator_org(self):
|
|
|
|
return self._get_field('creator_org')
|
|
|
|
|
|
|
|
def get_org(self):
|
|
|
|
return self._get_field('org')
|
|
|
|
|
|
|
|
def add_to_org(self, org_uuid):
|
|
|
|
self._set_field('org', org_uuid)
|
|
|
|
ail_orgs.add_obj_to_org(org_uuid, 'retro_hunt', self.uuid)
|
|
|
|
|
|
|
|
## -ORG- ##
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_creator(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('creator')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_date(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('date')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def get_last_analyzed(self):
|
|
|
|
return self._get_field('last')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def set_last_analyzed(self, obj_type, subtype, obj_id):
|
|
|
|
return self._set_field('last', f'{obj_type}:{subtype}:{obj_id}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def get_last_analyzed_cache(self):
|
|
|
|
r_cache.hget(f'retro_hunt:task:{self.uuid}', 'obj')
|
|
|
|
|
|
|
|
def set_last_analyzed_cache(self, obj_type, subtype, obj_id):
|
|
|
|
r_cache.hset(f'retro_hunt:task:{self.uuid}', 'obj', f'{obj_type}:{subtype}:{obj_id}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_name(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('name')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_description(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('description')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_timeout(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
res = self._get_field('timeout')
|
2023-05-04 16:35:56 +02:00
|
|
|
if res:
|
|
|
|
return int(res)
|
|
|
|
else:
|
|
|
|
return 30 # # TODO: FIXME use instance limit
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def get_filters(self):
|
|
|
|
filters = self._get_field('filters')
|
|
|
|
if not filters:
|
|
|
|
return {}
|
|
|
|
else:
|
|
|
|
return json.loads(filters)
|
|
|
|
|
|
|
|
def set_filters(self, filters):
|
|
|
|
if filters:
|
|
|
|
self._set_field('filters', json.dumps(filters))
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_tags(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'retro_hunt:tags:{self.uuid}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_mails(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers(f'retro_hunt:mails:{self.uuid}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_state(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('state')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def _set_state(self, new_state):
|
|
|
|
curr_state = self.get_state()
|
|
|
|
if curr_state:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem(f'retro_hunt:task:{curr_state}', self.uuid)
|
|
|
|
r_tracker.sadd(f'retro_hunts:{new_state}', self.uuid)
|
|
|
|
self._set_field('state', new_state)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_rule(self, r_compile=False):
|
2023-05-10 16:26:46 +02:00
|
|
|
rule = self._get_field('rule')
|
2023-05-04 16:35:56 +02:00
|
|
|
if r_compile:
|
|
|
|
rule = os.path.join(get_yara_rules_dir(), rule)
|
|
|
|
rule_dict = {self.uuid: os.path.join(get_yara_rules_dir(), rule)}
|
|
|
|
rule = yara.compile(filepaths=rule_dict)
|
|
|
|
return rule
|
|
|
|
|
|
|
|
# add timeout ?
|
|
|
|
def get_meta(self, options=set()):
|
|
|
|
meta = {'uuid': self.uuid,
|
|
|
|
'name': self.get_name(),
|
|
|
|
'state': self.get_state(),
|
|
|
|
'rule': self.get_rule(),
|
|
|
|
}
|
|
|
|
if 'creator' in options:
|
|
|
|
meta['creator'] = self.get_creator()
|
|
|
|
if 'date' in options:
|
2023-05-10 16:26:46 +02:00
|
|
|
meta['date'] = self.get_date()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'description' in options:
|
|
|
|
meta['description'] = self.get_description()
|
2024-08-28 16:47:44 +02:00
|
|
|
if 'level' in options:
|
|
|
|
meta['level'] = self.get_level()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'mails' in options:
|
|
|
|
meta['mails'] = self.get_mails()
|
|
|
|
if 'nb_match' in options:
|
|
|
|
meta['nb_match'] = self.get_nb_match()
|
2023-05-10 16:26:46 +02:00
|
|
|
if 'nb_objs' in options:
|
|
|
|
meta['nb_objs'] = self.get_nb_objs()
|
2024-09-06 13:46:04 +02:00
|
|
|
if 'org' in options:
|
|
|
|
meta['org'] = self.get_org()
|
|
|
|
if 'org_name' in options:
|
|
|
|
meta['org_name'] = ail_orgs.Organisation(self.get_org()).get_name()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'progress' in options:
|
|
|
|
meta['progress'] = self.get_progress()
|
2023-05-10 16:26:46 +02:00
|
|
|
if 'filters' in options:
|
|
|
|
meta['filters'] = self.get_filters()
|
2023-05-04 16:35:56 +02:00
|
|
|
if 'tags' in options:
|
|
|
|
meta['tags'] = self.get_tags()
|
|
|
|
return meta
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def is_paused(self):
|
|
|
|
return r_tracker.sismember('retro_hunts:paused', self.uuid)
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def to_pause(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
to_pause = r_cache.hget(f'retro_hunt:{self.uuid}', 'pause')
|
2023-05-04 16:35:56 +02:00
|
|
|
if to_pause:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def pause(self):
|
|
|
|
self._set_state('paused')
|
2023-05-10 16:26:46 +02:00
|
|
|
r_cache.hset(f'retro_hunt:{self.uuid}', 'pause', time.time())
|
2023-05-04 16:35:56 +02:00
|
|
|
self.clear_cache()
|
|
|
|
|
|
|
|
def resume(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
r_cache.hdel(f'retro_hunt:{self.uuid}', 'pause')
|
2023-05-04 16:35:56 +02:00
|
|
|
self._set_state('pending')
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def is_running(self):
|
|
|
|
return r_tracker.sismember('retro_hunts:running', self.uuid)
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def run(self): # TODO ADD MORE CHECK
|
|
|
|
self._set_state('running')
|
|
|
|
|
|
|
|
def complete(self):
|
|
|
|
self._set_state('completed')
|
|
|
|
self.clear_cache()
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.hdel(f'retro_hunt:{self.uuid}', 'last')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_progress(self):
|
|
|
|
if self.get_state() == 'completed':
|
|
|
|
progress = 100
|
|
|
|
else:
|
2023-05-10 16:26:46 +02:00
|
|
|
progress = r_cache.hget(f'retro_hunt:{self.uuid}', 'progress')
|
2023-05-04 16:35:56 +02:00
|
|
|
if not progress:
|
2023-05-10 16:26:46 +02:00
|
|
|
progress = self._get_field('progress')
|
2023-05-04 16:35:56 +02:00
|
|
|
return progress
|
|
|
|
|
|
|
|
def set_progress(self, progress):
|
2023-05-10 16:26:46 +02:00
|
|
|
res = r_cache.hset(f'retro_hunt:{self.uuid}', 'progress', progress)
|
|
|
|
if res:
|
|
|
|
self._set_field('progress', progress)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def get_nb_match(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
return self._get_field('nb_match')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def _incr_nb_match(self):
|
|
|
|
r_tracker.hincrby(f'retro_hunt:{self.uuid}', 'nb_match', 1)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def _decr_nb_match(self):
|
|
|
|
r_tracker.hincrby(f'retro_hunt:{self.uuid}', 'nb_match', -1)
|
|
|
|
|
|
|
|
def _set_nb_match(self, nb_match):
|
|
|
|
self._set_field('nb_match', nb_match)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
def clear_cache(self):
|
2023-05-10 16:26:46 +02:00
|
|
|
r_cache.delete(f'retro_hunt:{self.uuid}')
|
|
|
|
|
|
|
|
def get_nb_objs_by_type(self, obj_type):
|
|
|
|
return r_tracker.scard(f'retro_hunt:objs:{self.uuid}:{obj_type}')
|
|
|
|
|
|
|
|
def get_objs_by_type(self, obj_type):
|
|
|
|
return r_tracker.smembers(f'retro_hunt:objs:{self.uuid}:{obj_type}')
|
|
|
|
|
|
|
|
def get_nb_objs(self):
|
|
|
|
objs = {}
|
|
|
|
for obj_type in get_objects_retro_hunted():
|
2023-05-11 16:21:43 +02:00
|
|
|
nb = self.get_nb_objs_by_type(obj_type)
|
|
|
|
if nb:
|
|
|
|
objs[obj_type] = nb
|
2023-05-10 16:26:46 +02:00
|
|
|
return objs
|
|
|
|
|
|
|
|
def get_objs(self):
|
|
|
|
objs = []
|
|
|
|
for obj_type in get_objects_retro_hunted():
|
|
|
|
for obj in self.get_objs_by_type(obj_type):
|
|
|
|
subtype, obj_id = obj.split(':', 1)
|
|
|
|
objs.append((obj_type, subtype, obj_id))
|
|
|
|
return objs
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
def add(self, obj_type, subtype, obj_id):
|
|
|
|
# match by object type:
|
|
|
|
r_tracker.sadd(f'retro_hunt:objs:{self.uuid}:{obj_type}', f'{subtype}:{obj_id}')
|
|
|
|
# MAP object -> retro hunt
|
|
|
|
r_tracker.sadd(f'obj:retro_hunts:{obj_type}:{subtype}:{obj_id}', self.uuid)
|
|
|
|
self._incr_nb_match()
|
|
|
|
|
|
|
|
def remove(self, obj_type, subtype, obj_id):
|
|
|
|
# match by object type:
|
|
|
|
r_tracker.srem(f'retro_hunt:objs:{self.uuid}:{obj_type}', f'{subtype}:{obj_id}')
|
|
|
|
# MAP object -> retro hunt
|
|
|
|
r_tracker.srem(f'obj:retro_hunts:{obj_type}:{subtype}:{obj_id}', self.uuid)
|
|
|
|
self._decr_nb_match()
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def create(self, org_uuid, user_id, level, name, rule, description=None, filters=[], mails=[], tags=[], timeout=30, state='pending'):
|
2023-05-04 16:35:56 +02:00
|
|
|
if self.exists():
|
|
|
|
raise Exception('Error: Retro Hunt Task already exists')
|
|
|
|
|
|
|
|
self._set_field('name', escape(name))
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
self._set_field('rule', rule)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
self._set_field('date', datetime.date.today().strftime("%Y%m%d"))
|
|
|
|
self._set_field('name', escape(name))
|
2024-08-28 16:47:44 +02:00
|
|
|
self._set_field('creator_org', org_uuid)
|
|
|
|
self._set_field('creator', user_id)
|
2023-05-04 16:35:56 +02:00
|
|
|
if description:
|
|
|
|
self._set_field('description', description)
|
|
|
|
if timeout:
|
|
|
|
self._set_field('timeout', int(timeout))
|
|
|
|
for tag in tags:
|
2023-05-10 16:26:46 +02:00
|
|
|
# tag = escape(tag)
|
|
|
|
r_tracker.sadd(f'retro_hunt:tags:{self.uuid}', tag)
|
2023-05-04 16:35:56 +02:00
|
|
|
Tag.create_custom_tag(tag)
|
|
|
|
for mail in mails:
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd(f'retro_hunt:mails:{self.uuid}', escape(mail))
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
if filters:
|
|
|
|
self.set_filters(filters)
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
self.set_level(level, org_uuid)
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.sadd('retro_hunts:all', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
# add to pending tasks
|
|
|
|
if state not in ('pending', 'completed', 'paused'):
|
|
|
|
state = 'pending'
|
|
|
|
self._set_state(state)
|
|
|
|
|
|
|
|
def delete(self):
|
2024-02-15 11:42:10 +01:00
|
|
|
if self.is_running() and self.get_state() not in ['completed', 'paused']:
|
2023-05-04 16:35:56 +02:00
|
|
|
return None
|
|
|
|
|
2023-05-11 16:21:43 +02:00
|
|
|
# Delete custom rule
|
|
|
|
rule = self.get_rule()
|
|
|
|
if not is_default_yara_rule(rule):
|
|
|
|
filepath = get_yara_rule_file_by_tracker_name(rule)
|
|
|
|
if filepath:
|
|
|
|
os.remove(filepath)
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
self.delete_level()
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem('retro_hunts:pending', self.uuid)
|
|
|
|
r_tracker.delete(f'retro_hunts:{self.uuid}')
|
|
|
|
r_tracker.delete(f'retro_hunt:tags:{self.uuid}')
|
|
|
|
r_tracker.delete(f'retro_hunt:mails:{self.uuid}')
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
for obj in self.get_objs():
|
|
|
|
self.remove(obj[0], obj[1], obj[2])
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
r_tracker.srem('retro_hunts:all', self.uuid)
|
|
|
|
r_tracker.srem('retro_hunts:pending', self.uuid)
|
|
|
|
r_tracker.srem('retro_hunts:paused', self.uuid)
|
|
|
|
r_tracker.srem('retro_hunts:completed', self.uuid)
|
2023-05-04 16:35:56 +02:00
|
|
|
|
|
|
|
self.clear_cache()
|
|
|
|
return self.uuid
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def create_retro_hunt(user_org, user_id, level, name, rule_type, rule, description=None, filters=[], mails=[], tags=[], timeout=30, state='pending', task_uuid=None):
|
2023-05-04 16:35:56 +02:00
|
|
|
if not task_uuid:
|
|
|
|
task_uuid = str(uuid.uuid4())
|
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
|
|
|
# rule_type: yara_default - yara custom
|
|
|
|
rule = save_yara_rule(rule_type, rule, tracker_uuid=retro_hunt.uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
retro_hunt.create(user_org, user_id, level, name, rule, description=description, mails=mails, tags=tags,
|
2023-05-10 16:26:46 +02:00
|
|
|
timeout=timeout, filters=filters, state=state)
|
2023-05-04 16:35:56 +02:00
|
|
|
return retro_hunt.uuid
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
# TODO
|
|
|
|
# def _re_create_retro_hunt_task(name, rule, date, date_from, date_to, creator, sources, tags, mails, timeout, description, task_uuid, state='pending', nb_match=0, last_id=None):
|
|
|
|
# retro_hunt = RetroHunt(task_uuid)
|
|
|
|
# retro_hunt.create(name, rule, date_from, date_to, creator, description=description, mails=mails, tags=tags,
|
|
|
|
# timeout=timeout, sources=sources, state=state)
|
|
|
|
# if last_id:
|
|
|
|
# set_retro_hunt_last_analyzed(task_uuid, last_id)
|
|
|
|
# retro_hunt._set_nb_match(nb_match)
|
|
|
|
# retro_hunt._set_field('date', date)
|
|
|
|
|
2021-07-14 13:58:00 +02:00
|
|
|
## ? ? ?
|
|
|
|
# set tags
|
|
|
|
# set mails
|
|
|
|
# limit mail
|
|
|
|
|
|
|
|
# SET Retro Hunts
|
|
|
|
|
|
|
|
def get_all_retro_hunt_tasks():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('retro_hunts:all')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def get_retro_hunts_global():
|
|
|
|
return r_tracker.smembers('retro_hunts')
|
|
|
|
|
|
|
|
def get_retro_hunts_org(org_uuid):
|
|
|
|
return ail_orgs.get_org_objs_by_type(org_uuid, 'retro_hunt')
|
|
|
|
|
2024-09-06 13:46:04 +02:00
|
|
|
def get_retro_hunts_orgs():
|
|
|
|
retros = []
|
|
|
|
for retro_uuid in get_all_retro_hunt_tasks():
|
|
|
|
retro = RetroHunt(retro_uuid)
|
|
|
|
if retro.get_level() == 2:
|
|
|
|
retros.append(retro_uuid)
|
|
|
|
return retros
|
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_retro_hunt_pending_tasks():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('retro_hunts:pending')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_retro_hunt_running_tasks():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('retro_hunts:running')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_retro_hunt_paused_tasks():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('retro_hunts:paused')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
def get_retro_hunt_completed_tasks():
|
2023-05-10 16:26:46 +02:00
|
|
|
return r_tracker.smembers('retro_hunts:completed')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-04 16:35:56 +02:00
|
|
|
## Change STATES ##
|
|
|
|
|
2021-07-14 13:58:00 +02:00
|
|
|
def get_retro_hunt_task_to_start():
|
2023-05-10 16:26:46 +02:00
|
|
|
task_uuid = r_tracker.spop('retro_hunts:pending')
|
2021-07-14 13:58:00 +02:00
|
|
|
if task_uuid:
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
|
|
|
retro_hunt.run()
|
2021-07-14 13:58:00 +02:00
|
|
|
return task_uuid
|
|
|
|
|
|
|
|
## Metadata ##
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def get_retro_hunt_metas(trackers_uuid):
|
2023-05-04 16:35:56 +02:00
|
|
|
tasks = []
|
2024-08-28 16:47:44 +02:00
|
|
|
for task_uuid in trackers_uuid:
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
2024-09-06 13:46:04 +02:00
|
|
|
tasks.append(retro_hunt.get_meta(options={'date', 'progress', 'org', 'org_name', 'nb_match', 'tags'}))
|
2023-05-04 16:35:56 +02:00
|
|
|
return tasks
|
|
|
|
|
2024-02-15 11:31:17 +01:00
|
|
|
## Objects ##
|
|
|
|
|
|
|
|
def is_obj_retro_hunted(obj_type, subtype, obj_id):
|
|
|
|
return r_tracker.exists(f'obj:retro_hunts:{obj_type}:{subtype}:{obj_id}')
|
|
|
|
|
|
|
|
def get_obj_retro_hunts(obj_type, subtype, obj_id):
|
|
|
|
return r_tracker.smembers(f'obj:retro_hunts:{obj_type}:{subtype}:{obj_id}')
|
|
|
|
|
|
|
|
def delete_obj_retro_hunts(obj_type, subtype, obj_id):
|
|
|
|
for retro_uuid in get_obj_retro_hunts(obj_type, subtype, obj_id):
|
|
|
|
retro_hunt = RetroHunt(retro_uuid)
|
|
|
|
retro_hunt.remove(obj_type, subtype, obj_id)
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
#### ACL ####
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_check_retro_hunt_acl(retro_hunt, user_org, user_id, user_role, action):
|
|
|
|
if not ail_orgs.check_obj_access_acl(retro_hunt, user_org, user_id, user_role, action):
|
2024-08-28 16:47:44 +02:00
|
|
|
return {"status": "error", "reason": "Access Denied"}, 403
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# TODO
|
|
|
|
def api_is_allowed_to_edit_retro_hunt_level(retro_hunt, user_org, user_id, user_role, new_level):
|
|
|
|
if not ail_orgs.check_acl_edit_level(retro_hunt, user_org, user_id, user_role, new_level):
|
|
|
|
return {"status": "error", "reason": "Access Denied - Tracker level"}, 403
|
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
#### API ####
|
|
|
|
|
2021-07-14 13:58:00 +02:00
|
|
|
def api_check_retro_hunt_task_uuid(task_uuid):
|
|
|
|
if not is_valid_uuid_v4(task_uuid):
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "Invalid uuid"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
|
|
|
if not retro_hunt.exists():
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": "Unknown uuid"}, 404
|
2021-07-14 13:58:00 +02:00
|
|
|
return None
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_pause_retro_hunt_task(user_org, user_id, user_role, task_uuid):
|
2021-07-14 13:58:00 +02:00
|
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
|
|
if res:
|
|
|
|
return res
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_retro_hunt_acl(retro_hunt, user_org, user_id, user_role, 'edit')
|
2024-08-28 16:47:44 +02:00
|
|
|
if res:
|
|
|
|
return res
|
2023-05-04 16:35:56 +02:00
|
|
|
task_state = retro_hunt.get_state()
|
2021-07-14 13:58:00 +02:00
|
|
|
if task_state not in ['pending', 'running']:
|
2021-10-06 14:32:45 +02:00
|
|
|
return {"status": "error", "reason": f"Task {task_uuid} not paused, current state: {task_state}"}, 400
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt.pause()
|
2021-10-06 14:32:45 +02:00
|
|
|
return task_uuid, 200
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_resume_retro_hunt_task(user_org, user_id, user_role, task_uuid):
|
2021-07-14 13:58:00 +02:00
|
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
|
|
if res:
|
|
|
|
return res
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_retro_hunt_acl(retro_hunt, user_org, user_id, user_role, 'edit')
|
2024-08-28 16:47:44 +02:00
|
|
|
if res:
|
|
|
|
return res
|
2023-05-10 16:26:46 +02:00
|
|
|
if not retro_hunt.is_paused():
|
|
|
|
return {"status": "error",
|
|
|
|
"reason": f"Task {task_uuid} not paused, current state: {retro_hunt.get_state()}"}, 400
|
2023-05-04 16:35:56 +02:00
|
|
|
retro_hunt.resume()
|
2021-10-06 14:32:45 +02:00
|
|
|
return task_uuid, 200
|
2021-07-14 13:58:00 +02:00
|
|
|
|
|
|
|
def api_validate_rule_to_add(rule, rule_type):
|
2023-05-10 16:26:46 +02:00
|
|
|
if rule_type == 'yara_custom':
|
2021-07-14 13:58:00 +02:00
|
|
|
if not is_valid_yara_rule(rule):
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "Invalid custom Yara Rule"}, 400
|
2023-05-10 16:26:46 +02:00
|
|
|
elif rule_type == 'yara_default':
|
2021-07-14 13:58:00 +02:00
|
|
|
if not is_valid_default_yara_rule(rule):
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "The Yara Rule doesn't exist"}, 400
|
2021-07-14 13:58:00 +02:00
|
|
|
else:
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "Incorrect type"}, 400
|
|
|
|
return {"status": "success", "rule": rule, "type": rule_type}, 200
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
def api_create_retro_hunt_task(dict_input, user_org, user_id):
|
2021-07-14 13:58:00 +02:00
|
|
|
# # TODO: API: check mandatory arg
|
|
|
|
# # TODO: TIMEOUT
|
|
|
|
|
|
|
|
# timeout=30
|
|
|
|
rule = dict_input.get('rule', None)
|
|
|
|
if not rule:
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "Retro Hunt Rule not provided"}, 400
|
2021-07-14 13:58:00 +02:00
|
|
|
task_type = dict_input.get('type', None)
|
|
|
|
if not task_type:
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "type not provided"}, 400
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
# Level
|
|
|
|
level = dict_input.get('level', 1)
|
|
|
|
try:
|
|
|
|
level = int(level)
|
|
|
|
except TypeError:
|
|
|
|
level = 1
|
|
|
|
if level not in range(1, 3):
|
|
|
|
level = 1
|
|
|
|
|
2021-07-14 13:58:00 +02:00
|
|
|
# # TODO: limit
|
|
|
|
name = dict_input.get('name', '')
|
|
|
|
name = escape(name)
|
|
|
|
name = name[:60]
|
|
|
|
# # TODO: limit
|
|
|
|
description = dict_input.get('description', '')
|
|
|
|
description = escape(description)
|
|
|
|
description = description[:1000]
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
res = api_validate_rule_to_add(rule, task_type)
|
|
|
|
if res[1] != 200:
|
2021-07-14 13:58:00 +02:00
|
|
|
return res
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
tags = dict_input.get('tags', []) # TODO escape custom tags
|
|
|
|
mails = dict_input.get('mails', []) # TODO escape mails
|
2021-07-14 13:58:00 +02:00
|
|
|
res = verify_mail_list(mails)
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
# Filters # TODO MOVE ME
|
|
|
|
filters = dict_input.get('filters', {})
|
|
|
|
if filters:
|
|
|
|
if filters.keys() == get_objects_retro_hunted():
|
|
|
|
filters = {}
|
|
|
|
for obj_type in filters:
|
|
|
|
if obj_type not in get_objects_retro_hunted():
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object type"}, 400
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
for filter_name in filters[obj_type]:
|
|
|
|
if filter_name not in {'date_from', 'date_to', 'mimetypes', 'sources', 'subtypes'}:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter"}, 400
|
|
|
|
elif filter_name == 'date_from':
|
|
|
|
if not Date.validate_str_date(filters[obj_type]['date_from']):
|
|
|
|
return {"status": "error", "reason": "Invalid date_from"}, 400
|
|
|
|
elif filter_name == 'date_to':
|
|
|
|
if not Date.validate_str_date(filters[obj_type]['date_from']):
|
|
|
|
return {"status": "error", "reason": "Invalid date_to"}, 400
|
|
|
|
elif filter_name == 'mimetypes': # TODO sanityze mimetypes
|
|
|
|
pass
|
|
|
|
elif filter_name == 'sources':
|
|
|
|
if obj_type == 'item':
|
|
|
|
res = item_basic.verify_sources_list(filters['item']['sources'])
|
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
else:
|
|
|
|
return {"status": "error", "reason": "Invalid Filter sources"}, 400
|
|
|
|
elif filter_name == 'subtypes':
|
|
|
|
obj_subtypes = set(get_object_all_subtypes(obj_type))
|
|
|
|
for subtype in filters[obj_type]['subtypes']:
|
|
|
|
if subtype not in obj_subtypes:
|
|
|
|
return {"status": "error", "reason": "Invalid Tracker Object subtype"}, 400
|
|
|
|
|
|
|
|
if 'date_from' and 'date_to' in filters:
|
|
|
|
res = Date.api_validate_str_date_range(filters[obj_type]['date_from'], filters[obj_type]['date_to'])
|
|
|
|
if res:
|
|
|
|
return res
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-08-28 16:47:44 +02:00
|
|
|
task_uuid = create_retro_hunt(user_org, user_id, level, name, task_type, rule, description=description,
|
2023-05-10 16:26:46 +02:00
|
|
|
mails=mails, tags=tags, timeout=30, filters=filters)
|
2023-05-04 16:35:56 +02:00
|
|
|
return {'name': name, 'rule': rule, 'type': task_type, 'uuid': task_uuid}, 200
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
def api_delete_retro_hunt_task(user_org, user_id, user_role, task_uuid):
|
2021-07-14 13:58:00 +02:00
|
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
|
|
if res:
|
|
|
|
return res
|
2023-05-10 16:26:46 +02:00
|
|
|
retro_hunt = RetroHunt(task_uuid)
|
2024-09-05 14:41:13 +02:00
|
|
|
res = api_check_retro_hunt_acl(retro_hunt, user_org, user_id, user_role, 'delete')
|
2024-08-28 16:47:44 +02:00
|
|
|
if res:
|
|
|
|
return res
|
2024-02-15 11:45:55 +01:00
|
|
|
if retro_hunt.is_running() and retro_hunt.get_state() not in ['completed', 'paused']:
|
2023-05-04 16:35:56 +02:00
|
|
|
return {"status": "error", "reason": "You can't delete a running task"}, 400
|
2021-07-14 13:58:00 +02:00
|
|
|
else:
|
2023-05-04 16:35:56 +02:00
|
|
|
return retro_hunt.delete(), 200
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
################################################################################
|
|
|
|
################################################################################
|
|
|
|
################################################################################
|
|
|
|
################################################################################
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
#### DB FIX ####
|
2022-06-16 16:58:07 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# def _fix_db_custom_tags():
|
|
|
|
# for tag in get_trackers_tags():
|
|
|
|
# if not Tag.is_taxonomie_tag(tag) and not Tag.is_galaxy_tag(tag):
|
|
|
|
# Tag.create_custom_tag(tag)
|
2022-06-24 16:50:21 +02:00
|
|
|
|
2022-06-16 16:58:07 +02:00
|
|
|
#### -- ####
|
|
|
|
|
2023-05-10 16:26:46 +02:00
|
|
|
|
2024-04-09 14:22:11 +02:00
|
|
|
# if __name__ == '__main__':
|
2022-07-08 09:47:47 +02:00
|
|
|
|
2024-04-09 14:22:11 +02:00
|
|
|
# _fix_db_custom_tags()
|
2021-08-27 18:05:21 +02:00
|
|
|
# fix_all_tracker_uuid_list()
|
|
|
|
# res = get_all_tracker_uuid()
|
|
|
|
# print(len(res))
|
|
|
|
|
|
|
|
# import Term
|
|
|
|
# Term.delete_term('5262ab6c-8784-4a55-b0ff-a471018414b4')
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# fix_tracker_stats_per_day('5262ab6c-8784-4a55-b0ff-a471018414b4')
|
2021-08-27 18:05:21 +02:00
|
|
|
|
|
|
|
# tracker_uuid = '5262ab6c-8784-4a55-b0ff-a471018414b4'
|
|
|
|
# fix_tracker_item_link(tracker_uuid)
|
|
|
|
# res = get_item_all_trackers_uuid('archive/')
|
|
|
|
# print(res)
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = is_valid_yara_rule('rule dummy { }')
|
2021-06-14 17:36:30 +02:00
|
|
|
|
|
|
|
# res = create_tracker('test', 'word', 'admin@admin.test', 1, [], [], None, sources=['crawled', 'pastebin.com', 'rt/pastebin.com'])
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = create_tracker('circl\.lu', 'regex', 'admin@admin.test', 1, [], [], None, sources=['crawled','pastebin.com'])
|
|
|
|
# print(res)
|
2021-06-14 17:36:30 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# t_uuid = '1c2d35b0-9330-4feb-b454-da13007aa9f7'
|
|
|
|
# res = get_tracker_sources('ail-yara-rules/rules/crypto/certificate.yar', 'yara')
|
2021-06-14 17:36:30 +02:00
|
|
|
|
2021-06-17 14:48:26 +02:00
|
|
|
# sys.path.append(os.environ['AIL_BIN'])
|
|
|
|
# from packages import Term
|
|
|
|
# Term.delete_term('074ab4be-6049-45b5-a20e-8125a4e4f500')
|
2021-06-14 17:36:30 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = get_items_to_analyze('archive/pastebin.com_pro/2020/05/15', last='archive/pastebin.com_pro/2020/05/15/zkHEgqjQ.gz')
|
|
|
|
# get_retro_hunt_task_progress('0', nb_src_done=2)
|
2021-06-17 14:48:26 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = set_cache_retro_hunt_task_progress('0', 100)
|
|
|
|
# res = get_retro_hunt_task_nb_src_done('0', sources=['pastebin.com_pro', 'alerts/pastebin.com_pro', 'crawled'])
|
|
|
|
# print(res)
|
2021-07-14 13:58:00 +02:00
|
|
|
|
|
|
|
# sources = ['pastebin.com_pro', 'alerts/pastebin.com_pro', 'crawled']
|
|
|
|
# rule = 'custom-rules/4a8a3d04-f0b6-43ce-8e00-bdf47a8df241.yar'
|
|
|
|
# name = 'retro_hunt_test_1'
|
|
|
|
# description = 'circl retro hunt first test'
|
|
|
|
# tags = ['retro_circl', 'circl']
|
|
|
|
# creator = 'admin@admin.test'
|
|
|
|
# date_from = '20200610'
|
|
|
|
# date_to = '20210630'
|
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=sources, tags=tags, description=description)
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# get_retro_hunt_nb_item_by_day(['80b402ef-a8a9-4e97-adb6-e090edcfd571'], date_from=None, date_to=None, num_day=31)
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = get_retro_hunt_nb_item_by_day(['c625f971-16e6-4331-82a7-b1e1b9efdec1'], date_from='20200610', date_to='20210630')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# res = delete_retro_hunt_task('598687b6-f765-4f8b-861a-09ad76d0ab34')
|
2021-07-14 13:58:00 +02:00
|
|
|
|
2024-09-05 14:41:13 +02:00
|
|
|
# print(res)
|