mirror of https://github.com/CIRCL/AIL-framework
1570 lines
58 KiB
Python
Executable File
1570 lines
58 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# -*-coding:UTF-8 -*
|
|
|
|
import os
|
|
import re
|
|
import sys
|
|
import time
|
|
import uuid
|
|
import yara
|
|
import datetime
|
|
import base64
|
|
|
|
from ail_typo_squatting import runAll
|
|
import math
|
|
|
|
|
|
from flask import escape
|
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
|
##################################
|
|
# Import Project packages
|
|
##################################
|
|
from packages import Date
|
|
from lib import ConfigLoader
|
|
from lib import item_basic
|
|
from lib import Tag
|
|
from lib.Users import User
|
|
|
|
config_loader = ConfigLoader.ConfigLoader()
|
|
r_cache = config_loader.get_redis_conn("Redis_Cache")
|
|
|
|
r_tracker = config_loader.get_db_conn("Kvrocks_Trackers")
|
|
|
|
r_serv_tracker = config_loader.get_db_conn("Kvrocks_Trackers") # TODO REMOVE ME
|
|
|
|
items_dir = config_loader.get_config_str("Directories", "pastes")
|
|
if items_dir[-1] == '/':
|
|
items_dir = items_dir[:-1]
|
|
config_loader = None
|
|
|
|
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
|
email_regex = re.compile(email_regex)
|
|
|
|
special_characters = set('[<>~!?@#$%^&*|()_-+={}":;,.\'\n\r\t]/\\')
|
|
special_characters.add('\\s')
|
|
|
|
###############
|
|
#### UTILS ####
|
|
def is_valid_uuid_v4(UUID):
|
|
if not UUID:
|
|
return False
|
|
UUID = UUID.replace('-', '')
|
|
try:
|
|
uuid_test = uuid.UUID(hex=UUID, version=4)
|
|
return uuid_test.hex == UUID
|
|
except:
|
|
return False
|
|
|
|
def is_valid_regex(tracker_regex):
|
|
try:
|
|
re.compile(tracker_regex)
|
|
return True
|
|
except:
|
|
return False
|
|
|
|
def is_valid_mail(email):
|
|
result = email_regex.match(email)
|
|
if result:
|
|
return True
|
|
else:
|
|
return False
|
|
|
|
def verify_mail_list(mail_list):
|
|
for mail in mail_list:
|
|
if not is_valid_mail(mail):
|
|
return {'status': 'error', 'reason': 'Invalid email', 'value': mail}, 400
|
|
return None
|
|
|
|
##-- UTILS --##
|
|
###############
|
|
|
|
################################################################################################
|
|
################################################################################################
|
|
################################################################################################
|
|
|
|
class Tracker:
|
|
def __init__(self, tracker_uuid):
|
|
self.uuid = tracker_uuid
|
|
|
|
def get_uuid(self):
|
|
return self.uuid
|
|
|
|
def exists(self):
|
|
return r_tracker.exists(f'tracker:{self.uuid}')
|
|
|
|
def get_date(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'date')
|
|
|
|
def get_first_seen(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'first_seen')
|
|
|
|
def get_last_seen(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'last_seen')
|
|
|
|
def get_description(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'description')
|
|
|
|
def get_level(self):
|
|
level = r_tracker.hget(f'tracker:{self.uuid}', 'level')
|
|
if not level:
|
|
level = 0
|
|
return int(level)
|
|
|
|
def get_sources(self):
|
|
return r_tracker.smembers(f'tracker:sources:{self.uuid}')
|
|
|
|
def get_tracker(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'tracked')
|
|
|
|
def get_type(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'type')
|
|
|
|
def get_tags(self):
|
|
return r_tracker.smembers(f'tracker:tags:{self.uuid}')
|
|
|
|
def mail_export(self):
|
|
return r_tracker.exists(f'tracker:mail:{self.uuid}')
|
|
|
|
def get_mails(self):
|
|
return r_tracker.smembers(f'tracker:mail:{self.uuid}')
|
|
|
|
def get_user(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'user_id')
|
|
|
|
def webhook_export(self):
|
|
return r_tracker.hexists(f'tracker:mail:{self.uuid}', 'webhook')
|
|
|
|
def get_webhook(self):
|
|
return r_tracker.hget(f'tracker:{self.uuid}', 'webhook')
|
|
|
|
# TODO get objects/ tracked items
|
|
|
|
|
|
# TODO sparkline
|
|
def get_meta(self, options):
|
|
if not options:
|
|
options = set()
|
|
meta = {'uuid': self.uuid,
|
|
'tracker': self.get_tracker(),
|
|
'type': self.get_type(),
|
|
'date': self.get_date(),
|
|
'first_seen': self.get_first_seen(),
|
|
'last_seen': self.get_last_seen()}
|
|
if 'user' in options:
|
|
meta['user'] = self.get_user()
|
|
if 'level' in options:
|
|
meta['level'] = self.get_level()
|
|
if 'description' in options:
|
|
meta['description'] = self.get_description()
|
|
if 'tags' in options:
|
|
meta['tags'] = self.get_tags()
|
|
if 'sources' in options:
|
|
meta['sources'] = self.get_sources()
|
|
if 'mails' in options:
|
|
meta['mails'] = self.get_mails()
|
|
if 'webhooks' in options:
|
|
meta['webhook'] = self.get_webhook()
|
|
# if 'sparkline' in options:
|
|
# meta['sparkline'] = get_tracker_sparkline(tracker_uuid)
|
|
|
|
|
|
|
|
|
|
# TODO
|
|
def add(self, obj_id):
|
|
pass
|
|
|
|
################################################################################################
|
|
################################################################################################
|
|
################################################################################################
|
|
|
|
def get_all_tracker_type():
|
|
return ['word', 'set', 'regex', 'yara']
|
|
|
|
def get_all_tracker_uuid():
|
|
return r_serv_tracker.smembers(f'trackers:all')
|
|
|
|
def get_all_tracker_uuid_by_type(tracker_type):
|
|
return r_serv_tracker.smembers(f'trackers:all:{tracker_type}')
|
|
|
|
# def get_all_tracker():
|
|
# l_keys_name = []
|
|
# for tracker_type in get_all_tracker_type():
|
|
# l_keys_name.append(f'all:tracker:{tracker_type}')
|
|
# return r_serv_tracker.sunion(l_keys_name[0], *l_keys_name[1:])
|
|
|
|
def get_all_tracker_by_type(tracker_type):
|
|
return r_serv_tracker.smembers(f'all:tracker:{tracker_type}')
|
|
|
|
def get_user_trackers_uuid(user_id, tracker_type=None):
|
|
if tracker_type:
|
|
return r_serv_tracker.smembers(f'user:tracker:{user_id}:{tracker_type}')
|
|
else:
|
|
return r_serv_tracker.smembers(f'user:tracker:{user_id}')
|
|
|
|
def get_global_trackers_uuid(tracker_type=None):
|
|
if tracker_type:
|
|
return r_serv_tracker.smembers(f'global:tracker:{tracker_type}')
|
|
else:
|
|
return r_serv_tracker.smembers('global:tracker')
|
|
|
|
def get_tracker_by_uuid(tracker_uuid):
|
|
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'tracked')
|
|
|
|
def get_tracker_type(tracker_uuid):
|
|
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'type')
|
|
|
|
def get_tracker_level(tracker_uuid):
|
|
level = r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'level')
|
|
if not level:
|
|
level = 0
|
|
return int(level)
|
|
|
|
def get_tracker_user_id(tracker_uuid):
|
|
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'user_id')
|
|
|
|
def get_tracker_uuid_list(tracker, tracker_type): ######################################################### USE ME
|
|
return list(r_serv_tracker.smembers('all:tracker_uuid:{}:{}'.format(tracker_type, tracker)))
|
|
|
|
def get_tracker_tags(tracker_uuid):
|
|
return list(r_serv_tracker.smembers('tracker:tags:{}'.format(tracker_uuid)))
|
|
|
|
def get_tracker_mails(tracker_uuid):
|
|
return list(r_serv_tracker.smembers('tracker:mail:{}'.format(tracker_uuid)))
|
|
|
|
def get_tracker_webhook(tracker_uuid):
|
|
return r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'webhook')
|
|
|
|
def get_tracker_uuid_sources(tracker_uuid):
|
|
return list(r_serv_tracker.smembers(f'tracker:sources:{tracker_uuid}'))
|
|
|
|
def get_tracker_description(tracker_uuid):
|
|
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'description')
|
|
|
|
def get_tracker_date(tracker_uuid):
|
|
return r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'date')
|
|
|
|
def get_tracker_first_seen(tracker_uuid):
|
|
res = r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'first_seen')
|
|
if res:
|
|
return res
|
|
else:
|
|
return None
|
|
|
|
def get_tracker_last_seen(tracker_uuid):
|
|
res = r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'last_seen')
|
|
if res:
|
|
return res
|
|
else:
|
|
return None
|
|
|
|
def get_tracker_metadata(tracker_uuid, user_id=False, description=False, level=False, tags=False, mails=False, sources=True, sparkline=False, webhook=False):
|
|
dict_uuid = {}
|
|
dict_uuid['uuid'] = tracker_uuid
|
|
dict_uuid['tracker'] = get_tracker_by_uuid(tracker_uuid)
|
|
dict_uuid['type'] = get_tracker_type(tracker_uuid)
|
|
dict_uuid['date'] = get_tracker_date(tracker_uuid)
|
|
dict_uuid['first_seen'] = get_tracker_first_seen(tracker_uuid)
|
|
dict_uuid['last_seen'] = get_tracker_last_seen(tracker_uuid)
|
|
if user_id:
|
|
dict_uuid['user_id'] = get_tracker_user_id(tracker_uuid)
|
|
if level:
|
|
dict_uuid['level'] = get_tracker_level(tracker_uuid)
|
|
if mails:
|
|
dict_uuid['mails'] = get_tracker_mails(tracker_uuid)
|
|
if sources:
|
|
dict_uuid['sources'] = get_tracker_uuid_sources(tracker_uuid)
|
|
if tags:
|
|
dict_uuid['tags'] = get_tracker_tags(tracker_uuid)
|
|
if sparkline:
|
|
dict_uuid['sparkline'] = get_tracker_sparkline(tracker_uuid)
|
|
if description:
|
|
dict_uuid['description'] = get_tracker_description(tracker_uuid)
|
|
if webhook:
|
|
dict_uuid['webhook'] = get_tracker_webhook(tracker_uuid)
|
|
|
|
return dict_uuid
|
|
|
|
def get_user_trackers_metadata(user_id, tracker_type=None):
|
|
meta_trackers = []
|
|
for tracker_uuid in get_user_trackers_uuid(user_id, tracker_type=None):
|
|
meta_trackers.append(get_tracker_metadata(tracker_uuid, tags=True, mails=True, sparkline=True))
|
|
return meta_trackers
|
|
|
|
def get_global_trackers_metadata(tracker_type=None):
|
|
meta_trackers = []
|
|
for tracker_uuid in get_global_trackers_uuid(tracker_type=None):
|
|
meta_trackers.append(get_tracker_metadata(tracker_uuid, tags=True, mails=True, sparkline=True))
|
|
return meta_trackers
|
|
|
|
def get_tracker_metadata_api(request_dict):
|
|
tracker_uuid = request_dict.get('tracker_uuid', None)
|
|
if not request_dict:
|
|
return {'status': 'error', 'reason': 'Malformed JSON'}, 400
|
|
if not tracker_uuid:
|
|
return {'status': 'error', 'reason': 'Mandatory parameter(s) not provided'}, 400
|
|
if not is_valid_uuid_v4(tracker_uuid):
|
|
return {"status": "error", "reason": "Invalid Tracker UUID"}, 400
|
|
if not r_serv_tracker.exists(f'tracker:{tracker_uuid}'):
|
|
return {'status': 'error', 'reason': 'Tracker not found'}, 404
|
|
|
|
dict_tracker = {'status': 'success',
|
|
'uuid': tracker_uuid,
|
|
'user_id': get_tracker_user_id(tracker_uuid),
|
|
'tracker': get_tracker_by_uuid(tracker_uuid),
|
|
'type': get_tracker_type(tracker_uuid),
|
|
'date': get_tracker_date(tracker_uuid),
|
|
'first_seen': get_tracker_first_seen(tracker_uuid),
|
|
'last_seen': get_tracker_last_seen(tracker_uuid),
|
|
'level': get_tracker_level(tracker_uuid),
|
|
'mails': get_tracker_mails(tracker_uuid),
|
|
'tags': get_tracker_tags(tracker_uuid),
|
|
'description': get_tracker_description(tracker_uuid),
|
|
'webhook': get_tracker_webhook(tracker_uuid)
|
|
}
|
|
|
|
return dict_tracker, 200
|
|
|
|
# tracker sparkline
|
|
def get_tracker_sparkline(tracker_uuid, num_day=6):
|
|
date_range_sparkline = Date.get_date_range(num_day)
|
|
sparklines_value = []
|
|
for date_day in date_range_sparkline:
|
|
nb_seen_this_day = r_serv_tracker.zscore(f'tracker:stat:{tracker_uuid}', int(date_day))
|
|
if nb_seen_this_day is None:
|
|
nb_seen_this_day = 0
|
|
sparklines_value.append(int(nb_seen_this_day))
|
|
return sparklines_value
|
|
|
|
def get_tracker_items_by_daterange(tracker_uuid, date_from, date_to):
|
|
all_item_id = set()
|
|
if date_from and date_to:
|
|
l_date_match = r_serv_tracker.zrange(f'tracker:stat:{tracker_uuid}', 0, -1, withscores=True)
|
|
if l_date_match:
|
|
dict_date_match = dict(l_date_match)
|
|
for date_day in Date.substract_date(date_from, date_to):
|
|
if date_day in dict_date_match:
|
|
all_item_id |= r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}')
|
|
return all_item_id
|
|
|
|
def get_tracker_typosquatting_domains(tracker_uuid):
|
|
return r_serv_tracker.smembers(f'tracker:typosquatting:{tracker_uuid}')
|
|
|
|
def get_typosquatting_tracked_words_list():
|
|
typosquattings = {}
|
|
typos_uuid = get_all_tracker_uuid_by_type("typosquatting")
|
|
|
|
for typo_uuid in typos_uuid:
|
|
tracker = get_tracker_by_uuid(typo_uuid)
|
|
typosquattings[tracker] = get_tracker_typosquatting_domains(typo_uuid)
|
|
|
|
return typosquattings
|
|
|
|
|
|
def add_tracked_item(tracker_uuid, item_id):
|
|
item_date = item_basic.get_item_date(item_id)
|
|
# track item
|
|
r_serv_tracker.sadd(f'obj:trackers:item:{item_id}', tracker_uuid)
|
|
res = r_serv_tracker.sadd(f'tracker:item:{tracker_uuid}:{item_date}', item_id)
|
|
# track nb item by date
|
|
if res == 1:
|
|
nb_items = r_serv_tracker.zincrby(f'tracker:stat:{tracker_uuid}', 1, int(item_date))
|
|
if nb_items == 1:
|
|
update_tracker_daterange(tracker_uuid, item_date)
|
|
|
|
def set_tracker_first_seen(tracker_uuid, date):
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'first_seen', int(date))
|
|
|
|
def set_tracker_last_seen(tracker_uuid, date):
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'last_seen', int(date))
|
|
|
|
def set_tracker_user_id(tracker_uuid, user_id):
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'user_id', user_id)
|
|
|
|
# # TODO: ADD CACHE ???
|
|
def update_tracker_daterange(tracker_uuid, date, op='add'):
|
|
date = int(date)
|
|
first_seen = get_tracker_first_seen(tracker_uuid)
|
|
|
|
if op == 'add':
|
|
if not first_seen:
|
|
set_tracker_first_seen(tracker_uuid, date)
|
|
set_tracker_last_seen(tracker_uuid, date)
|
|
else:
|
|
first_seen = int(first_seen)
|
|
last_seen = int(get_tracker_last_seen(tracker_uuid))
|
|
if date < first_seen:
|
|
set_tracker_first_seen(tracker_uuid, date)
|
|
if date > last_seen:
|
|
set_tracker_last_seen(tracker_uuid, date)
|
|
|
|
if op == 'del':
|
|
pass
|
|
|
|
def remove_tracked_item(item_id):
|
|
item_date = item_basic.get_item_date(item_id)
|
|
for tracker_uuid in get_item_all_trackers_uuid(item_id):
|
|
r_serv_tracker.srem(f'obj:trackers:item:{item_id}', tracker_uuid)
|
|
res = r_serv_tracker.srem(f'tracker:item:{tracker_uuid}:{item_date}', item_id)
|
|
if res:
|
|
r_serv_tracker.zincrby(f'tracker:stat:{tracker_uuid}', -1, int(item_date))
|
|
|
|
def get_item_all_trackers_uuid(obj_id):
|
|
#obj_type = 'item'
|
|
return r_serv_tracker.smembers(f'obj:trackers:item:{obj_id}')
|
|
|
|
def is_obj_tracked(obj_type, subtype, obj_id):
|
|
return r_serv_tracker.exists(f'obj:trackers:{obj_type}:{obj_id}')
|
|
|
|
def get_obj_all_trackers(obj_type, subtype, obj_id):
|
|
return r_serv_tracker.smembers(f'obj:trackers:{obj_type}:{obj_id}')
|
|
|
|
# # TODO: ADD all Objects + Subtypes
|
|
def delete_obj_trackers(obj_type, subtype, id):
|
|
if obj_type == 'item':
|
|
remove_tracked_item(id)
|
|
|
|
def get_email_subject(tracker_uuid):
|
|
tracker_description = get_tracker_description(tracker_uuid)
|
|
if not tracker_description:
|
|
return "AIL framework: Tracker Alert"
|
|
else:
|
|
return 'AIL framework: {}'.format(tracker_description)
|
|
|
|
def get_tracker_last_updated_by_type(tracker_type):
|
|
epoch_update = r_cache.get(f'tracker:refresh:{tracker_type}')
|
|
if not epoch_update:
|
|
epoch_update = 0
|
|
return float(epoch_update)
|
|
|
|
# # TODO: check type API
|
|
def trigger_trackers_refresh(tracker_type):
|
|
r_cache.set(f'tracker:refresh:{tracker_type}', time.time())
|
|
|
|
######################
|
|
#### TRACKERS ACL ####
|
|
|
|
def is_tracker_in_global_level(tracker, tracker_type):
|
|
res = r_serv_tracker.smembers('all:tracker_uuid:{}:{}'.format(tracker_type, tracker))
|
|
if res:
|
|
for elem_uuid in res:
|
|
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'level')=='1':
|
|
return True
|
|
return False
|
|
|
|
def is_tracker_in_user_level(tracker, tracker_type, user_id):
|
|
res = r_serv_tracker.smembers('user:tracker:{}'.format(user_id))
|
|
if res:
|
|
for elem_uuid in res:
|
|
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'tracked')== tracker:
|
|
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'type')== tracker_type:
|
|
return True
|
|
return False
|
|
|
|
## API ##
|
|
def api_check_tracker_uuid(tracker_uuid):
|
|
if not is_valid_uuid_v4(tracker_uuid):
|
|
return {"status": "error", "reason": "Invalid uuid"}, 400
|
|
if not r_serv_tracker.exists(f'tracker:{tracker_uuid}'):
|
|
return {"status": "error", "reason": "Unknown uuid"}, 404
|
|
return None
|
|
|
|
def api_is_allowed_to_edit_tracker(tracker_uuid, user_id):
|
|
if not is_valid_uuid_v4(tracker_uuid):
|
|
return {"status": "error", "reason": "Invalid uuid"}, 400
|
|
tracker_creator = r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'user_id')
|
|
if not tracker_creator:
|
|
return {"status": "error", "reason": "Unknown uuid"}, 404
|
|
user = User(user_id)
|
|
if not user.is_in_role('admin') and user_id != tracker_creator:
|
|
return {"status": "error", "reason": "Access Denied"}, 403
|
|
return {"uuid": tracker_uuid}, 200
|
|
|
|
##-- ACL --##
|
|
|
|
#### FIX DB ####
|
|
def fix_tracker_stats_per_day(tracker_uuid):
|
|
date_from = get_tracker_date(tracker_uuid)
|
|
date_to = Date.get_today_date_str()
|
|
# delete stats
|
|
r_serv_tracker.delete(f'tracker:stat:{tracker_uuid}')
|
|
r_serv_tracker.hdel(f'tracker:{tracker_uuid}', 'first_seen')
|
|
r_serv_tracker.hdel(f'tracker:{tracker_uuid}', 'last_seen')
|
|
# create new stats
|
|
for date_day in Date.substract_date(date_from, date_to):
|
|
date_day = int(date_day)
|
|
|
|
nb_items = r_serv_tracker.scard(f'tracker:item:{tracker_uuid}:{date_day}')
|
|
if nb_items:
|
|
r_serv_tracker.zincrby(f'tracker:stat:{tracker_uuid}', nb_items, int(date_day))
|
|
|
|
# update first_seen/last_seen
|
|
update_tracker_daterange(tracker_uuid, date_day)
|
|
|
|
def fix_tracker_item_link(tracker_uuid):
|
|
date_from = get_tracker_first_seen(tracker_uuid)
|
|
date_to = get_tracker_last_seen(tracker_uuid)
|
|
|
|
if date_from and date_to:
|
|
for date_day in Date.substract_date(date_from, date_to):
|
|
l_items = r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}')
|
|
for item_id in l_items:
|
|
r_serv_tracker.sadd(f'obj:trackers:item:{item_id}', tracker_uuid)
|
|
|
|
def fix_all_tracker_uuid_list():
|
|
r_serv_tracker.delete(f'trackers:all')
|
|
for tracker_type in get_all_tracker_type():
|
|
r_serv_tracker.delete(f'trackers:all:{tracker_type}')
|
|
l_tracker = get_all_tracker_by_type(tracker_type)
|
|
for tracker in l_tracker:
|
|
l_tracker_uuid = get_tracker_uuid_list(tracker, tracker_type)
|
|
for tracker_uuid in l_tracker_uuid:
|
|
r_serv_tracker.sadd(f'trackers:all', tracker_uuid)
|
|
r_serv_tracker.sadd(f'trackers:all:{tracker_type}', tracker_uuid)
|
|
|
|
##-- FIX DB --##
|
|
|
|
#### CREATE TRACKER ####
|
|
def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1):
|
|
if tracker_type=='regex':
|
|
if not is_valid_regex(tracker):
|
|
return {"status": "error", "reason": "Invalid regex"}, 400
|
|
elif tracker_type=='word' or tracker_type=='set':
|
|
# force lowercase
|
|
tracker = tracker.lower()
|
|
word_set = set(tracker)
|
|
set_inter = word_set.intersection(special_characters)
|
|
if set_inter:
|
|
return {"status": "error", "reason": f'special character(s) not allowed: {set_inter}', "message": "Please use a python regex or remove all special characters"}, 400
|
|
words = tracker.split()
|
|
# not a word
|
|
if tracker_type=='word' and len(words)>1:
|
|
tracker_type = 'set'
|
|
|
|
# ouput format: tracker1,tracker2,tracker3;2
|
|
if tracker_type=='set':
|
|
try:
|
|
nb_words = int(nb_words)
|
|
except:
|
|
nb_words = 1
|
|
if nb_words==0:
|
|
nb_words = 1
|
|
|
|
words_set = set(words)
|
|
words_set = sorted(words_set)
|
|
|
|
if nb_words > len(words_set):
|
|
nb_words = len(words_set)
|
|
|
|
tracker = ",".join(words_set)
|
|
tracker = "{};{}".format(tracker, nb_words)
|
|
elif tracker_type == 'typosquatting':
|
|
tracker = tracker.lower()
|
|
# Take only the first term
|
|
domain = tracker.split(" ")
|
|
if len(domain) > 1:
|
|
return {"status": "error", "reason": "Only one domain is accepted at a time"}, 400
|
|
if not "." in tracker:
|
|
return {"status": "error", "reason": "Invalid domain name"}, 400
|
|
|
|
|
|
elif tracker_type=='yara_custom':
|
|
if not is_valid_yara_rule(tracker):
|
|
return {"status": "error", "reason": "Invalid custom Yara Rule"}, 400
|
|
elif tracker_type=='yara_default':
|
|
if not is_valid_default_yara_rule(tracker):
|
|
return {"status": "error", "reason": "The Yara Rule doesn't exist"}, 400
|
|
else:
|
|
return {"status": "error", "reason": "Incorrect type"}, 400
|
|
return {"status": "success", "tracker": tracker, "type": tracker_type}, 200
|
|
|
|
def _re_create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard, tracker_uuid, sources, first_seen, last_seen):
|
|
create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard=dashboard, tracker_uuid=tracker_uuid, sources=sources)
|
|
set_tracker_user_id(tracker_uuid, user_id)
|
|
if first_seen:
|
|
set_tracker_first_seen(tracker_uuid, first_seen)
|
|
if last_seen:
|
|
set_tracker_last_seen(tracker_uuid, last_seen)
|
|
|
|
|
|
def create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard=0, tracker_uuid=None, sources=[]):
|
|
# edit tracker
|
|
if tracker_uuid:
|
|
# check if type changed
|
|
old_type = get_tracker_type(tracker_uuid)
|
|
if not old_type:
|
|
edit_tracker = False
|
|
else:
|
|
edit_tracker = True
|
|
old_tracker = get_tracker_by_uuid(tracker_uuid)
|
|
old_level = get_tracker_level(tracker_uuid)
|
|
tracker_user_id = get_tracker_user_id(tracker_uuid)
|
|
|
|
# Create new tracker
|
|
else:
|
|
edit_tracker = False
|
|
# generate tracker uuid
|
|
tracker_uuid = str(uuid.uuid4())
|
|
old_type = None
|
|
old_tracker = None
|
|
|
|
# YARA
|
|
if tracker_type == 'yara_custom' or tracker_type == 'yara_default':
|
|
# create yara rule
|
|
if tracker_type == 'yara_default' and old_type == 'yara':
|
|
if not is_default_yara_rule(old_tracker):
|
|
filepath = get_yara_rule_file_by_tracker_name(old_tracker)
|
|
if filepath:
|
|
os.remove(filepath)
|
|
tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid)
|
|
tracker_type = 'yara'
|
|
|
|
elif tracker_type == 'typosquatting':
|
|
domain = tracker.split(" ")[0]
|
|
typo_generation = runAll(domain=domain, limit=math.inf, formatoutput="text", pathOutput="-", verbose=False)
|
|
for typo in typo_generation:
|
|
r_serv_tracker.sadd(f'tracker:typosquatting:{tracker_uuid}', typo)
|
|
|
|
# create metadata
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'tracked', tracker)
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'type', tracker_type)
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'date', datetime.date.today().strftime("%Y%m%d"))
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'level', level)
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'dashboard', dashboard)
|
|
if not edit_tracker:
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'user_id', user_id)
|
|
|
|
if description:
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'description', description)
|
|
|
|
if webhook:
|
|
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'webhook', webhook)
|
|
|
|
# type change
|
|
if edit_tracker:
|
|
r_serv_tracker.srem('all:tracker:{}'.format(old_type), old_tracker)
|
|
r_serv_tracker.srem('all:tracker_uuid:{}:{}'.format(old_type, old_tracker), tracker_uuid)
|
|
if level != old_level:
|
|
if level == 0:
|
|
r_serv_tracker.srem('global:tracker', tracker_uuid)
|
|
elif level == 1:
|
|
r_serv_tracker.srem('user:tracker:{}'.format(tracker_user_id), tracker_uuid)
|
|
if tracker_type != old_type:
|
|
if old_level == 0:
|
|
r_serv_tracker.srem('user:tracker:{}:{}'.format(tracker_user_id, old_type), tracker_uuid)
|
|
elif old_level == 1:
|
|
r_serv_tracker.srem('global:tracker:{}'.format(old_type), tracker_uuid)
|
|
if old_type=='yara':
|
|
if not is_default_yara_rule(old_tracker):
|
|
filepath = get_yara_rule_file_by_tracker_name(old_tracker)
|
|
if filepath:
|
|
os.remove(filepath)
|
|
|
|
# create all tracker set
|
|
r_serv_tracker.sadd('all:tracker:{}'.format(tracker_type), tracker)
|
|
|
|
# create tracker - uuid map
|
|
r_serv_tracker.sadd('all:tracker_uuid:{}:{}'.format(tracker_type, tracker), tracker_uuid)
|
|
|
|
r_serv_tracker.sadd(f'trackers:all', tracker_uuid)
|
|
r_serv_tracker.sadd(f'trackers:all:{tracker_type}', tracker_uuid)
|
|
|
|
# add display level set
|
|
if level == 0: # user only
|
|
r_serv_tracker.sadd('user:tracker:{}'.format(user_id), tracker_uuid)
|
|
r_serv_tracker.sadd('user:tracker:{}:{}'.format(user_id, tracker_type), tracker_uuid)
|
|
elif level == 1: # global
|
|
r_serv_tracker.sadd('global:tracker', tracker_uuid)
|
|
r_serv_tracker.sadd('global:tracker:{}'.format(tracker_type), tracker_uuid)
|
|
|
|
if edit_tracker:
|
|
r_serv_tracker.delete(f'tracker:tags:{tracker_uuid}')
|
|
r_serv_tracker.delete(f'tracker:mail:{tracker_uuid}')
|
|
r_serv_tracker.delete(f'tracker:sources:{tracker_uuid}')
|
|
|
|
# create tracker tags list
|
|
for tag in tags:
|
|
tag = escape(tag)
|
|
r_serv_tracker.sadd(f'tracker:tags:{tracker_uuid}', tag)
|
|
Tag.create_custom_tag(tag)
|
|
|
|
# create tracker mail notification list
|
|
for mail in mails:
|
|
r_serv_tracker.sadd(f'tracker:mail:{tracker_uuid}', escape(mail))
|
|
|
|
# create tracker sources filter
|
|
for source in sources:
|
|
# escape source ?
|
|
r_serv_tracker.sadd(f'tracker:sources:{tracker_uuid}', escape(source))
|
|
# toggle refresh module tracker list/set
|
|
trigger_trackers_refresh(tracker_type)
|
|
if tracker_type != old_type: # toggle old type refresh
|
|
trigger_trackers_refresh(old_type)
|
|
return tracker_uuid
|
|
|
|
def api_add_tracker(dict_input, user_id):
|
|
tracker = dict_input.get('tracker', None)
|
|
if not tracker:
|
|
return {"status": "error", "reason": "Tracker not provided"}, 400
|
|
tracker_type = dict_input.get('type', None)
|
|
if not tracker_type:
|
|
return {"status": "error", "reason": "Tracker type not provided"}, 400
|
|
nb_words = dict_input.get('nb_words', 1)
|
|
description = dict_input.get('description', '')
|
|
description = escape(description)
|
|
webhook = dict_input.get('webhook', '')
|
|
webhook = escape(webhook)
|
|
res = api_validate_tracker_to_add(tracker , tracker_type, nb_words=nb_words)
|
|
if res[1]!=200:
|
|
return res
|
|
tracker = res[0]['tracker']
|
|
tracker_type = res[0]['type']
|
|
|
|
tags = dict_input.get('tags', [])
|
|
mails = dict_input.get('mails', [])
|
|
res = verify_mail_list(mails)
|
|
if res:
|
|
return res
|
|
|
|
sources = dict_input.get('sources', [])
|
|
res = item_basic.verify_sources_list(sources)
|
|
if res:
|
|
return res
|
|
|
|
## TODO: add dashboard key
|
|
level = dict_input.get('level', 1)
|
|
try:
|
|
level = int(level)
|
|
if level not in range(0, 1):
|
|
level = 1
|
|
except:
|
|
level = 1
|
|
|
|
tracker_uuid = dict_input.get('uuid', None)
|
|
# check edit ACL
|
|
if tracker_uuid:
|
|
res = api_is_allowed_to_edit_tracker(tracker_uuid, user_id)
|
|
if res[1] != 200:
|
|
return res
|
|
else:
|
|
# check if tracker already tracked in global
|
|
if level==1:
|
|
if is_tracker_in_global_level(tracker, tracker_type) and not tracker_uuid:
|
|
return {"status": "error", "reason": "Tracker already exist"}, 409
|
|
else:
|
|
if is_tracker_in_user_level(tracker, tracker_type, user_id) and not tracker_uuid:
|
|
return {"status": "error", "reason": "Tracker already exist"}, 409
|
|
|
|
tracker_uuid = create_tracker(tracker , tracker_type, user_id, level, tags, mails, description, webhook, tracker_uuid=tracker_uuid, sources=sources)
|
|
|
|
return {'tracker': tracker, 'type': tracker_type, 'uuid': tracker_uuid}, 200
|
|
|
|
##-- CREATE TRACKER --##
|
|
|
|
##############
|
|
#### YARA ####
|
|
def get_yara_rules_dir():
|
|
return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara')
|
|
|
|
def get_yara_rules_default_dir():
|
|
return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara', 'ail-yara-rules', 'rules')
|
|
|
|
# # TODO: cache + update
|
|
def get_all_default_yara_rules_types():
|
|
yara_dir = get_yara_rules_default_dir()
|
|
all_yara_types = next(os.walk(yara_dir))[1]
|
|
# save in cache ?
|
|
return all_yara_types
|
|
|
|
# # TODO: cache + update
|
|
def get_all_default_yara_files():
|
|
yara_dir = get_yara_rules_default_dir()
|
|
all_default_yara_files = {}
|
|
for rules_type in get_all_default_yara_rules_types():
|
|
all_default_yara_files[rules_type] = os.listdir(os.path.join(yara_dir, rules_type))
|
|
return all_default_yara_files
|
|
|
|
def get_all_default_yara_rules_by_type(yara_types):
|
|
all_default_yara_files = get_all_default_yara_files()
|
|
if yara_types in all_default_yara_files:
|
|
return all_default_yara_files[yara_types]
|
|
else:
|
|
return []
|
|
|
|
def get_all_tracked_yara_files(filter_disabled=False):
|
|
yara_files = r_serv_tracker.smembers('all:tracker:yara')
|
|
if not yara_files:
|
|
yara_files = []
|
|
if filter_disabled:
|
|
pass
|
|
return yara_files
|
|
|
|
def get_yara_rule_by_uuid(tracker_uuid):
|
|
yar_path = get_tracker_by_uuid(tracker_uuid)
|
|
return yara.compile(filepath=os.path.join(get_yara_rules_dir(), yar_path))
|
|
|
|
def reload_yara_rules():
|
|
yara_files = get_all_tracked_yara_files()
|
|
# {uuid: filename}
|
|
rule_dict = {}
|
|
for yar_path in yara_files:
|
|
l_tracker_uuid = get_tracker_uuid_list(yar_path, 'yara')
|
|
for tracker_uuid in l_tracker_uuid:
|
|
rule_dict[tracker_uuid] = os.path.join(get_yara_rules_dir(), yar_path)
|
|
for tracker_uuid in rule_dict:
|
|
if not os.path.isfile(rule_dict[tracker_uuid]):
|
|
# TODO IGNORE + LOGS
|
|
raise Exception(f"Error: {rule_dict[tracker_uuid]} doesn't exists")
|
|
rules = yara.compile(filepaths=rule_dict)
|
|
return rules
|
|
|
|
# # TODO:
|
|
# Avoid useless CHECK
|
|
# Empty list == ALL SOURCES
|
|
# FIXME MOOVE ME
|
|
def get_tracker_sources(tracker, tracker_type):
|
|
l_sources = set()
|
|
for tracker_uuid in get_tracker_uuid_list(tracker, tracker_type):
|
|
sources = get_tracker_uuid_sources(tracker_uuid)
|
|
if sources:
|
|
for source in get_tracker_uuid_sources(tracker_uuid):
|
|
l_sources.add(source)
|
|
else:
|
|
l_sources = []
|
|
break
|
|
return l_sources
|
|
|
|
def is_valid_yara_rule(yara_rule):
|
|
try:
|
|
yara.compile(source=yara_rule)
|
|
return True
|
|
except:
|
|
return False
|
|
|
|
def is_default_yara_rule(tracked_yara_name):
|
|
yara_dir = get_yara_rules_dir()
|
|
filename = os.path.join(yara_dir, tracked_yara_name)
|
|
filename = os.path.realpath(filename)
|
|
try:
|
|
if tracked_yara_name.split('/')[0] == 'custom-rules':
|
|
return False
|
|
except:
|
|
return False
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
return False
|
|
else:
|
|
if os.path.isfile(filename):
|
|
return True
|
|
return False
|
|
|
|
def is_valid_default_yara_rule(yara_rule, verbose=True):
|
|
yara_dir = get_yara_rules_default_dir()
|
|
filename = os.path.join(yara_dir, yara_rule)
|
|
filename = os.path.realpath(filename)
|
|
# incorrect filename
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
if verbose:
|
|
print('error: file transversal')
|
|
print(yara_dir)
|
|
print(filename)
|
|
return False
|
|
else:
|
|
if os.path.isfile(filename):
|
|
return True
|
|
else:
|
|
return False
|
|
|
|
def save_yara_rule(yara_rule_type, yara_rule, tracker_uuid=None):
|
|
if yara_rule_type == 'yara_custom':
|
|
if not tracker_uuid:
|
|
tracker_uuid = str(uuid.uuid4())
|
|
filename = os.path.join('custom-rules', tracker_uuid + '.yar')
|
|
with open(os.path.join(get_yara_rules_dir(), filename), 'w') as f:
|
|
f.write(str(yara_rule))
|
|
if yara_rule_type == 'yara_default':
|
|
filename = os.path.join('ail-yara-rules', 'rules', yara_rule)
|
|
return filename
|
|
|
|
def get_yara_rule_file_by_tracker_name(tracked_yara_name):
|
|
yara_dir = get_yara_rules_dir()
|
|
filename = os.path.join(yara_dir, tracked_yara_name)
|
|
filename = os.path.realpath(filename)
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
print('error: file transversal')
|
|
print(yara_dir)
|
|
print(filename)
|
|
return None
|
|
return filename
|
|
|
|
def get_yara_rule_content(yara_rule):
|
|
yara_dir = get_yara_rules_dir()
|
|
filename = os.path.join(yara_dir, yara_rule)
|
|
filename = os.path.realpath(filename)
|
|
|
|
# incorrect filename
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
return '' # # TODO: throw exception
|
|
|
|
with open(filename, 'r') as f:
|
|
rule_content = f.read()
|
|
return rule_content
|
|
|
|
def api_get_default_rule_content(default_yara_rule):
|
|
yara_dir = get_yara_rules_default_dir()
|
|
filename = os.path.join(yara_dir, default_yara_rule)
|
|
filename = os.path.realpath(filename)
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
return {'status': 'error', 'reason': 'file traversal detected'}, 400
|
|
|
|
if not os.path.isfile(filename):
|
|
return {'status': 'error', 'reason': 'yara rule not found'}, 400
|
|
|
|
with open(filename, 'r') as f:
|
|
rule_content = f.read()
|
|
return {'rule_name': default_yara_rule, 'content': rule_content}, 200
|
|
|
|
|
|
def get_yara_rule_content_restapi(request_dict):
|
|
rule_name = request_dict.get('rule_name', None)
|
|
if not request_dict:
|
|
return {'status': 'error', 'reason': 'Malformed JSON'}, 400
|
|
if not rule_name:
|
|
return {'status': 'error', 'reason': 'Mandatory parameter(s) not provided'}, 400
|
|
yara_dir = get_yara_rules_dir()
|
|
filename = os.path.join(yara_dir, rule_name)
|
|
filename = os.path.realpath(filename)
|
|
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
|
|
return {'status': 'error', 'reason': 'File Path Traversal'}, 400
|
|
if not os.path.isfile(filename):
|
|
return {'status': 'error', 'reason': 'yara rule not found'}, 400
|
|
with open(filename, 'r') as f:
|
|
rule_content = f.read()
|
|
rule_content = base64.b64encode((rule_content.encode('utf-8'))).decode('UTF-8')
|
|
return {'status': 'success', 'content': rule_content}, 200
|
|
|
|
|
|
|
|
##-- YARA --##
|
|
|
|
######################
|
|
#### RETRO - HUNT ####
|
|
|
|
# state: pending/running/completed/paused
|
|
|
|
# task keys:
|
|
## tracker:retro_hunt:task:{task_uuid} state
|
|
# start_time
|
|
# end_time
|
|
# date_from
|
|
# date_to
|
|
# creator
|
|
# timeout
|
|
# date
|
|
# type
|
|
|
|
## ? ? ?
|
|
# set tags
|
|
# set mails
|
|
# limit mail
|
|
|
|
# SET Retro Hunts
|
|
|
|
def get_all_retro_hunt_tasks():
|
|
return r_serv_tracker.smembers('tracker:retro_hunt:task:all')
|
|
|
|
def get_all_pending_retro_hunt_tasks():
|
|
return r_serv_tracker.smembers('tracker:retro_hunt:task:pending')
|
|
|
|
def get_all_running_retro_hunt_tasks():
|
|
return r_serv_tracker.smembers('tracker:retro_hunt:task:running')
|
|
|
|
def get_all_paused_retro_hunt_tasks():
|
|
return r_serv_tracker.smembers('tracker:retro_hunt:task:paused')
|
|
|
|
## Change STATES ##
|
|
|
|
def get_all_completed_retro_hunt_tasks():
|
|
return r_serv_tracker.smembers('tracker:retro_hunt:task:completed')
|
|
|
|
def get_retro_hunt_task_to_start():
|
|
task_uuid = r_serv_tracker.spop('tracker:retro_hunt:task:pending')
|
|
if task_uuid:
|
|
set_retro_hunt_task_state(task_uuid, 'running')
|
|
return task_uuid
|
|
|
|
def pause_retro_hunt_task(task_uuid):
|
|
set_retro_hunt_task_state(task_uuid, 'paused')
|
|
r_cache.hset(f'tracker:retro_hunt:task:{task_uuid}', 'pause', time.time())
|
|
|
|
def check_retro_hunt_pause(task_uuid):
|
|
is_paused = r_cache.hget(f'tracker:retro_hunt:task:{task_uuid}', 'pause')
|
|
if is_paused:
|
|
return True
|
|
else:
|
|
return False
|
|
|
|
def resume_retro_hunt_task(task_uuid):
|
|
r_cache.hdel(f'tracker:retro_hunt:task:{task_uuid}', 'pause')
|
|
set_retro_hunt_task_state(task_uuid, 'pending')
|
|
|
|
## Metadata ##
|
|
|
|
def get_retro_hunt_task_name(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'name')
|
|
|
|
def get_retro_hunt_task_state(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'state')
|
|
|
|
def set_retro_hunt_task_state(task_uuid, new_state):
|
|
curr_state = get_retro_hunt_task_state(task_uuid)
|
|
if curr_state:
|
|
r_serv_tracker.srem(f'tracker:retro_hunt:task:{curr_state}', task_uuid)
|
|
r_serv_tracker.sadd(f'tracker:retro_hunt:task:{new_state}', task_uuid)
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'state', new_state)
|
|
|
|
# def get_retro_hunt_task_type(task_uuid):
|
|
# return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'type')
|
|
#
|
|
# def set_retro_hunt_task_type(task_uuid, task_type):
|
|
# r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'type', task_type)
|
|
|
|
# # TODO: yararule
|
|
def get_retro_hunt_task_rule(task_uuid, r_compile=False):
|
|
#rule_type = 'yara'
|
|
rule = r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'rule')
|
|
if r_compile:
|
|
#if rule_type == 'yara'
|
|
rule = os.path.join(get_yara_rules_dir(), rule)
|
|
rule_dict = {task_uuid : os.path.join(get_yara_rules_dir(), rule)}
|
|
rule = yara.compile(filepaths=rule_dict)
|
|
return rule
|
|
|
|
def get_retro_hunt_task_timeout(task_uuid):
|
|
res = r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'timeout')
|
|
if res:
|
|
return int(res)
|
|
else:
|
|
return 30 # # TODO: FIXME use instance limit
|
|
|
|
def get_retro_hunt_task_date(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date')
|
|
|
|
def set_retro_hunt_task_date(task_uuid, date):
|
|
return r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'date', date)
|
|
|
|
def get_retro_hunt_task_date_from(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date_from')
|
|
|
|
def get_retro_hunt_task_date_to(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date_to')
|
|
|
|
def get_retro_hunt_task_creator(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'creator')
|
|
|
|
def get_retro_hunt_last_analyzed(task_uuid):
|
|
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'last')
|
|
|
|
# Keep history to relaunch on error/pause
|
|
def set_retro_hunt_last_analyzed(task_uuid, last_id):
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'last', last_id)
|
|
|
|
def get_retro_hunt_task_sources(task_uuid, r_sort=False):
|
|
sources = r_serv_tracker.smembers(f'tracker:retro_hunt:task:sources:{task_uuid}')
|
|
if not sources:
|
|
sources = set(item_basic.get_all_items_sources(filter_dir=False))
|
|
if r_sort:
|
|
sources = sorted(sources)
|
|
return sources
|
|
|
|
def get_retro_hunt_task_tags(task_uuid):
|
|
return r_serv_tracker.smembers(f'tracker:retro_hunt:task:tags:{task_uuid}')
|
|
|
|
def get_retro_hunt_task_mails(task_uuid):
|
|
return r_serv_tracker.smembers(f'tracker:retro_hunt:task:mails:{task_uuid}')
|
|
|
|
# # TODO: ADD TYPE + TIMEOUT
|
|
def get_retro_hunt_task_metadata(task_uuid, date=False, progress=False, creator=False, sources=None, tags=None, description=False, nb_match=False):
|
|
task_metadata = {'uuid': task_uuid}
|
|
task_metadata['state'] = get_retro_hunt_task_state(task_uuid)
|
|
task_metadata['name'] = get_retro_hunt_task_name(task_uuid)
|
|
task_metadata['rule'] = get_retro_hunt_task_rule(task_uuid)
|
|
if creator:
|
|
task_metadata['creator'] = get_retro_hunt_task_creator(task_uuid)
|
|
if date:
|
|
task_metadata['date'] = r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date')
|
|
task_metadata['date_from'] = get_retro_hunt_task_date_from(task_uuid)
|
|
task_metadata['date_to'] = get_retro_hunt_task_date_to(task_uuid)
|
|
if description:
|
|
task_metadata['description'] = r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'description')
|
|
if nb_match:
|
|
task_metadata['nb_match'] = get_retro_hunt_nb_match(task_uuid)
|
|
if progress:
|
|
task_metadata['progress'] = get_retro_hunt_task_progress(task_uuid)
|
|
if sources:
|
|
task_metadata['sources'] = get_retro_hunt_task_sources(task_uuid, r_sort=True)
|
|
if tags:
|
|
task_metadata['tags'] = get_retro_hunt_task_tags(task_uuid)
|
|
return task_metadata
|
|
|
|
def get_all_retro_hunt_tasks_with_metadata():
|
|
l_retro_hunt = []
|
|
for task_uuid in get_all_retro_hunt_tasks():
|
|
l_retro_hunt.append(get_retro_hunt_task_metadata(task_uuid, date=True, progress=True, tags=True, nb_match=True))
|
|
return l_retro_hunt
|
|
|
|
def get_retro_hunt_task_progress(task_uuid):
|
|
if get_retro_hunt_task_state(task_uuid) == 'completed':
|
|
progress = 100
|
|
else:
|
|
progress = r_cache.hget(f'tracker:retro_hunt:task:{task_uuid}', 'progress')
|
|
if not progress:
|
|
progress = compute_retro_hunt_task_progress(task_uuid)
|
|
return progress
|
|
|
|
def set_cache_retro_hunt_task_progress(task_uuid, progress):
|
|
r_cache.hset(f'tracker:retro_hunt:task:{task_uuid}', 'progress', progress)
|
|
|
|
def set_cache_retro_hunt_task_id(task_uuid, id):
|
|
r_cache.hset(f'tracker:retro_hunt:task:{task_uuid}', 'id', id)
|
|
|
|
def clear_retro_hunt_task_cache(task_uuid):
|
|
r_cache.delete(f'tracker:retro_hunt:task:{task_uuid}')
|
|
|
|
# Others
|
|
|
|
# date
|
|
# type
|
|
# tags
|
|
# mails
|
|
# name
|
|
# description
|
|
|
|
# state error
|
|
def _re_create_retro_hunt_task(name, rule, date, date_from, date_to, creator, sources, tags, mails, timeout, description, task_uuid, state='pending', nb_match=0, last_id=None):
|
|
create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=sources, tags=tags, mails=mails, timeout=timeout, description=description, task_uuid=task_uuid, state=state)
|
|
if last_id:
|
|
set_retro_hunt_last_analyzed(task_uuid, last_id)
|
|
_set_retro_hunt_nb_match(task_uuid, nb_match)
|
|
set_retro_hunt_task_date(task_uuid, date)
|
|
|
|
|
|
# # # TODO: TYPE
|
|
def create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=[], tags=[], mails=[], timeout=30, description=None, task_uuid=None, state='pending'):
|
|
if not task_uuid:
|
|
task_uuid = str(uuid.uuid4())
|
|
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'name', escape(name))
|
|
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'rule', rule)
|
|
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'date', datetime.date.today().strftime("%Y%m%d"))
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'date_from', date_from)
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'date_to', date_to)
|
|
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'creator', creator)
|
|
if description:
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'description', description)
|
|
if timeout:
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'timeout', int(timeout))
|
|
for source in sources:
|
|
r_serv_tracker.sadd(f'tracker:retro_hunt:task:sources:{task_uuid}', escape(source))
|
|
for tag in tags:
|
|
tag = escape(tag)
|
|
r_serv_tracker.sadd(f'tracker:retro_hunt:task:tags:{task_uuid}', tag)
|
|
Tag.create_custom_tag(tag)
|
|
for mail in mails:
|
|
r_serv_tracker.sadd(f'tracker:retro_hunt:task:mails:{task_uuid}', escape(mail))
|
|
|
|
r_serv_tracker.sadd('tracker:retro_hunt:task:all', task_uuid)
|
|
|
|
# add to pending tasks
|
|
if state not in ('pending', 'completed', 'paused'):
|
|
state = 'pending'
|
|
set_retro_hunt_task_state(task_uuid, state)
|
|
return task_uuid
|
|
|
|
# # TODO: delete rule
|
|
def delete_retro_hunt_task(task_uuid):
|
|
if r_serv_tracker.sismember('tracker:retro_hunt:task:running', task_uuid):
|
|
return None
|
|
|
|
r_serv_tracker.srem('tracker:retro_hunt:task:pending', task_uuid)
|
|
r_serv_tracker.delete(f'tracker:retro_hunt:task:{task_uuid}')
|
|
r_serv_tracker.delete(f'tracker:retro_hunt:task:sources:{task_uuid}')
|
|
r_serv_tracker.delete(f'tracker:retro_hunt:task:tags:{task_uuid}')
|
|
r_serv_tracker.delete(f'tracker:retro_hunt:task:mails:{task_uuid}')
|
|
|
|
for item_date in get_retro_hunt_all_item_dates(task_uuid):
|
|
r_serv_tracker.delete(f'tracker:retro_hunt:task:item:{task_uuid}:{item_date}')
|
|
|
|
r_serv_tracker.srem('tracker:retro_hunt:task:all', task_uuid)
|
|
r_serv_tracker.srem('tracker:retro_hunt:task:pending', task_uuid)
|
|
r_serv_tracker.srem('tracker:retro_hunt:task:paused', task_uuid)
|
|
r_serv_tracker.srem('tracker:retro_hunt:task:completed', task_uuid)
|
|
|
|
clear_retro_hunt_task_cache(task_uuid)
|
|
return task_uuid
|
|
|
|
def get_retro_hunt_task_current_date(task_uuid):
|
|
last = get_retro_hunt_last_analyzed(task_uuid)
|
|
if last:
|
|
curr_date = item_basic.get_item_date(last)
|
|
else:
|
|
curr_date = get_retro_hunt_task_date_from(task_uuid)
|
|
return curr_date
|
|
|
|
def get_retro_hunt_task_nb_src_done(task_uuid, sources=[]):
|
|
if not sources:
|
|
sources = list(get_retro_hunt_task_sources(task_uuid, r_sort=True))
|
|
else:
|
|
sources = list(sources)
|
|
last_id = get_retro_hunt_last_analyzed(task_uuid)
|
|
if last_id:
|
|
last_source = item_basic.get_source(last_id)
|
|
try:
|
|
nb_src_done = sources.index(last_source)
|
|
except ValueError:
|
|
nb_src_done = 0
|
|
else:
|
|
nb_src_done = 0
|
|
return nb_src_done
|
|
|
|
def get_retro_hunt_dir_day_to_analyze(task_uuid, date, filter_last=False, sources=[]):
|
|
if not sources:
|
|
sources = get_retro_hunt_task_sources(task_uuid, r_sort=True)
|
|
|
|
# filter last
|
|
if filter_last:
|
|
last = get_retro_hunt_last_analyzed(task_uuid)
|
|
if last:
|
|
curr_source = item_basic.get_source(last)
|
|
# remove processed sources
|
|
set_sources = sources.copy()
|
|
for source in sources:
|
|
if source != curr_source:
|
|
set_sources.remove(source)
|
|
else:
|
|
break
|
|
sources = set_sources
|
|
|
|
# return all dirs by day
|
|
date = f'{date[0:4]}/{date[4:6]}/{date[6:8]}'
|
|
dirs = set()
|
|
for source in sources:
|
|
dirs.add(os.path.join(source, date))
|
|
return dirs
|
|
|
|
# # TODO: move me
|
|
def get_items_to_analyze(dir, last=None):
|
|
if items_dir == 'PASTES':
|
|
full_dir = os.path.join(os.environ['AIL_HOME'], 'PASTES', dir)
|
|
else:
|
|
full_dir = os.path.join(items_dir, dir)
|
|
if os.path.isdir(full_dir):
|
|
all_items = sorted([os.path.join(dir, f) for f in os.listdir(full_dir) if os.path.isfile(os.path.join(full_dir, f))])
|
|
# remove processed items
|
|
if last:
|
|
items_set = all_items.copy()
|
|
for item in all_items:
|
|
if item != last:
|
|
items_set.remove(item)
|
|
else:
|
|
break
|
|
all_items = items_set
|
|
return all_items
|
|
else:
|
|
return []
|
|
|
|
def compute_retro_hunt_task_progress(task_uuid, date_from=None, date_to=None, sources=[], curr_date=None, nb_src_done=0):
|
|
# get nb days
|
|
if not date_from:
|
|
date_from = get_retro_hunt_task_date_from(task_uuid)
|
|
if not date_to:
|
|
date_to = get_retro_hunt_task_date_to(task_uuid)
|
|
nb_days = Date.get_nb_days_by_daterange(date_from, date_to)
|
|
|
|
# nb days completed
|
|
if not curr_date:
|
|
curr_date = get_retro_hunt_task_current_date(task_uuid)
|
|
nb_days_done = Date.get_nb_days_by_daterange(date_from, curr_date) - 1
|
|
|
|
# sources
|
|
if not sources:
|
|
nb_sources = len(get_retro_hunt_task_sources(task_uuid))
|
|
else:
|
|
nb_sources = len(sources)
|
|
|
|
# get progress
|
|
progress = ((nb_days_done * nb_sources) + nb_src_done) * 100 / (nb_days * nb_sources)
|
|
return int(progress)
|
|
|
|
# # TODO: # FIXME: # Cache
|
|
|
|
# # TODO: ADD MAP ID => Retro_Hunt
|
|
def save_retro_hunt_match(task_uuid, id, object_type='item'):
|
|
item_date = item_basic.get_item_date(id)
|
|
res = r_serv_tracker.sadd(f'tracker:retro_hunt:task:item:{task_uuid}:{item_date}', id)
|
|
# track nb item by date
|
|
if res == 1:
|
|
r_serv_tracker.zincrby(f'tracker:retro_hunt:task:stat:{task_uuid}', 1, int(item_date))
|
|
# Add map obj_id -> task_uuid
|
|
r_serv_tracker.sadd(f'obj:retro_hunt:item:{id}', task_uuid)
|
|
|
|
def get_retro_hunt_all_item_dates(task_uuid):
|
|
return r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1)
|
|
|
|
def get_retro_hunt_nb_match(task_uuid):
|
|
nb_match = r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'nb_match')
|
|
if not nb_match:
|
|
l_date_value = r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1, withscores=True)
|
|
nb_match = 0
|
|
for tuple in l_date_value:
|
|
nb_match += int(tuple[1])
|
|
return int(nb_match)
|
|
|
|
def _set_retro_hunt_nb_match(task_uuid, nb_match):
|
|
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'nb_match', nb_match)
|
|
|
|
def set_retro_hunt_nb_match(task_uuid):
|
|
l_date_value = r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1, withscores=True)
|
|
nb_match = 0
|
|
for tuple in l_date_value:
|
|
nb_match += int(tuple[1])
|
|
_set_retro_hunt_nb_match(task_uuid, nb_match)
|
|
|
|
def get_retro_hunt_items_by_daterange(task_uuid, date_from, date_to):
|
|
all_item_id = set()
|
|
if date_from and date_to:
|
|
l_date_match = r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1, withscores=True)
|
|
if l_date_match:
|
|
dict_date_match = dict(l_date_match)
|
|
for date_day in Date.substract_date(date_from, date_to):
|
|
if date_day in dict_date_match:
|
|
all_item_id |= r_serv_tracker.smembers(f'tracker:retro_hunt:task:item:{task_uuid}:{date_day}')
|
|
return all_item_id
|
|
|
|
def get_retro_hunt_nb_item_by_day(l_task_uuid, date_from=None, date_to=None):
|
|
list_stats = []
|
|
for task_uuid in l_task_uuid:
|
|
dict_task_data = []
|
|
retro_name = get_retro_hunt_task_name(task_uuid)
|
|
l_date_match = r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1, withscores=True)
|
|
if l_date_match:
|
|
dict_date_match = dict(l_date_match)
|
|
if not date_from:
|
|
date_from = min(dict_date_match)
|
|
if not date_to:
|
|
date_to = max(dict_date_match)
|
|
|
|
date_range = Date.substract_date(date_from, date_to)
|
|
for date_day in date_range:
|
|
nb_seen_this_day = int(dict_date_match.get(date_day, 0))
|
|
dict_task_data.append({"date": date_day,"value": int(nb_seen_this_day)})
|
|
list_stats.append({"name": retro_name,"Data": dict_task_data})
|
|
return list_stats
|
|
|
|
## API ##
|
|
def api_check_retro_hunt_task_uuid(task_uuid):
|
|
if not is_valid_uuid_v4(task_uuid):
|
|
return {"status": "error", "reason": "Invalid uuid"}, 400
|
|
if not r_serv_tracker.exists(f'tracker:retro_hunt:task:{task_uuid}'):
|
|
return {"status": "error", "reason": "Unknown uuid"}, 404
|
|
return None
|
|
|
|
def api_get_retro_hunt_items(dict_input):
|
|
task_uuid = dict_input.get('uuid', None)
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
if res:
|
|
return res
|
|
|
|
date_from = dict_input.get('date_from', None)
|
|
date_to = dict_input.get('date_to', None)
|
|
if date_from is None:
|
|
date_from = get_retro_hunt_task_date_from(task_uuid)
|
|
if date_from:
|
|
date_from = date_from[0]
|
|
if date_to is None:
|
|
date_to = date_from
|
|
if date_from > date_to:
|
|
date_from = date_to
|
|
|
|
all_items_id = get_retro_hunt_items_by_daterange(task_uuid, date_from, date_to)
|
|
all_items_id = item_basic.get_all_items_metadata_dict(all_items_id)
|
|
|
|
res_dict = {}
|
|
res_dict['uuid'] = task_uuid
|
|
res_dict['date_from'] = date_from
|
|
res_dict['date_to'] = date_to
|
|
res_dict['items'] = all_items_id
|
|
return res_dict, 200
|
|
|
|
def api_pause_retro_hunt_task(task_uuid):
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
if res:
|
|
return res
|
|
task_state = get_retro_hunt_task_state(task_uuid)
|
|
if task_state not in ['pending', 'running']:
|
|
return {"status": "error", "reason": f"Task {task_uuid} not paused, current state: {task_state}"}, 400
|
|
pause_retro_hunt_task(task_uuid)
|
|
return task_uuid, 200
|
|
|
|
def api_resume_retro_hunt_task(task_uuid):
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
if res:
|
|
return res
|
|
task_state = get_retro_hunt_task_state(task_uuid)
|
|
if not r_serv_tracker.sismember('tracker:retro_hunt:task:paused', task_uuid):
|
|
return {"status": "error", "reason": f"Task {task_uuid} not paused, current state: {get_retro_hunt_task_state(task_uuid)}"}, 400
|
|
resume_retro_hunt_task(task_uuid)
|
|
return task_uuid, 200
|
|
|
|
def api_validate_rule_to_add(rule, rule_type):
|
|
if rule_type=='yara_custom':
|
|
if not is_valid_yara_rule(rule):
|
|
return ({"status": "error", "reason": "Invalid custom Yara Rule"}, 400)
|
|
elif rule_type=='yara_default':
|
|
if not is_valid_default_yara_rule(rule):
|
|
return ({"status": "error", "reason": "The Yara Rule doesn't exist"}, 400)
|
|
else:
|
|
return ({"status": "error", "reason": "Incorrect type"}, 400)
|
|
return ({"status": "success", "rule": rule, "type": rule_type}, 200)
|
|
|
|
def api_create_retro_hunt_task(dict_input, creator):
|
|
# # TODO: API: check mandatory arg
|
|
# # TODO: TIMEOUT
|
|
|
|
# timeout=30
|
|
rule = dict_input.get('rule', None)
|
|
if not rule:
|
|
return ({"status": "error", "reason": "Retro Hunt Rule not provided"}, 400)
|
|
task_type = dict_input.get('type', None)
|
|
if not task_type:
|
|
return ({"status": "error", "reason": "type not provided"}, 400)
|
|
|
|
# # TODO: limit
|
|
name = dict_input.get('name', '')
|
|
name = escape(name)
|
|
name = name[:60]
|
|
# # TODO: limit
|
|
description = dict_input.get('description', '')
|
|
description = escape(description)
|
|
description = description[:1000]
|
|
|
|
res = api_validate_rule_to_add(rule , task_type)
|
|
if res[1]!=200:
|
|
return res
|
|
|
|
tags = dict_input.get('tags', [])
|
|
mails = dict_input.get('mails', [])
|
|
res = verify_mail_list(mails)
|
|
if res:
|
|
return res
|
|
|
|
sources = dict_input.get('sources', [])
|
|
res = item_basic.verify_sources_list(sources)
|
|
if res:
|
|
return res
|
|
|
|
date_from = dict_input.get('date_from', '')
|
|
date_to = dict_input.get('date_to', '')
|
|
res = Date.api_validate_str_date_range(date_from, date_to)
|
|
if res:
|
|
return res
|
|
|
|
task_uuid = str(uuid.uuid4())
|
|
|
|
# RULE
|
|
rule = save_yara_rule(task_type, rule, tracker_uuid=task_uuid)
|
|
task_type = 'yara'
|
|
|
|
task_uuid = create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=sources,
|
|
tags=tags, mails=mails, timeout=30, description=description, task_uuid=task_uuid)
|
|
|
|
return ({'name': name, 'rule': rule, 'type': task_type, 'uuid': task_uuid}, 200)
|
|
|
|
def api_delete_retro_hunt_task(task_uuid):
|
|
res = api_check_retro_hunt_task_uuid(task_uuid)
|
|
if res:
|
|
return res
|
|
if r_serv_tracker.sismember('tracker:retro_hunt:task:running', task_uuid):
|
|
return ({"status": "error", "reason": "You can't delete a running task"}, 400)
|
|
else:
|
|
return (delete_retro_hunt_task(task_uuid), 200)
|
|
|
|
#### DB FIX ####
|
|
def get_trackers_tags():
|
|
tags = set()
|
|
for tracker_uuid in get_all_tracker_uuid():
|
|
for tag in get_tracker_tags(tracker_uuid):
|
|
tags.add(tag)
|
|
for task_uuid in get_all_retro_hunt_tasks():
|
|
for tag in get_retro_hunt_task_tags(task_uuid):
|
|
tags.add(tag)
|
|
return tags
|
|
|
|
def _fix_db_custom_tags():
|
|
for tag in get_trackers_tags():
|
|
if not Tag.is_taxonomie_tag(tag) and not Tag.is_galaxy_tag(tag):
|
|
Tag.create_custom_tag(tag)
|
|
|
|
#### -- ####
|
|
|
|
if __name__ == '__main__':
|
|
|
|
_fix_db_custom_tags()
|
|
# fix_all_tracker_uuid_list()
|
|
# res = get_all_tracker_uuid()
|
|
# print(len(res))
|
|
|
|
# import Term
|
|
# Term.delete_term('5262ab6c-8784-4a55-b0ff-a471018414b4')
|
|
|
|
#fix_tracker_stats_per_day('5262ab6c-8784-4a55-b0ff-a471018414b4')
|
|
|
|
# tracker_uuid = '5262ab6c-8784-4a55-b0ff-a471018414b4'
|
|
# fix_tracker_item_link(tracker_uuid)
|
|
# res = get_item_all_trackers_uuid('archive/')
|
|
# print(res)
|
|
|
|
#res = is_valid_yara_rule('rule dummy { }')
|
|
|
|
# res = create_tracker('test', 'word', 'admin@admin.test', 1, [], [], None, sources=['crawled', 'pastebin.com', 'rt/pastebin.com'])
|
|
#res = create_tracker('circl\.lu', 'regex', 'admin@admin.test', 1, [], [], None, sources=['crawled','pastebin.com'])
|
|
#print(res)
|
|
|
|
#t_uuid = '1c2d35b0-9330-4feb-b454-da13007aa9f7'
|
|
#res = get_tracker_sources('ail-yara-rules/rules/crypto/certificate.yar', 'yara')
|
|
|
|
# sys.path.append(os.environ['AIL_BIN'])
|
|
# from packages import Term
|
|
# Term.delete_term('074ab4be-6049-45b5-a20e-8125a4e4f500')
|
|
|
|
|
|
#res = get_items_to_analyze('archive/pastebin.com_pro/2020/05/15', last='archive/pastebin.com_pro/2020/05/15/zkHEgqjQ.gz')
|
|
#get_retro_hunt_task_progress('0', nb_src_done=2)
|
|
|
|
#res = set_cache_retro_hunt_task_progress('0', 100)
|
|
#res = get_retro_hunt_task_nb_src_done('0', sources=['pastebin.com_pro', 'alerts/pastebin.com_pro', 'crawled'])
|
|
#print(res)
|
|
|
|
# sources = ['pastebin.com_pro', 'alerts/pastebin.com_pro', 'crawled']
|
|
# rule = 'custom-rules/4a8a3d04-f0b6-43ce-8e00-bdf47a8df241.yar'
|
|
# name = 'retro_hunt_test_1'
|
|
# description = 'circl retro hunt first test'
|
|
# tags = ['retro_circl', 'circl']
|
|
# creator = 'admin@admin.test'
|
|
# date_from = '20200610'
|
|
# date_to = '20210630'
|
|
|
|
#res = create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=sources, tags=tags, description=description)
|
|
|
|
|
|
#get_retro_hunt_nb_item_by_day(['80b402ef-a8a9-4e97-adb6-e090edcfd571'], date_from=None, date_to=None, num_day=31)
|
|
|
|
#res = get_retro_hunt_nb_item_by_day(['c625f971-16e6-4331-82a7-b1e1b9efdec1'], date_from='20200610', date_to='20210630')
|
|
|
|
#res = delete_retro_hunt_task('598687b6-f765-4f8b-861a-09ad76d0ab34')
|
|
|
|
#print(res)
|