2022-10-25 16:25:19 +02:00
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
"""
|
|
|
|
Base Class for AIL Objects
|
|
|
|
"""
|
|
|
|
|
|
|
|
##################################
|
|
|
|
# Import External packages
|
|
|
|
##################################
|
|
|
|
import os
|
2023-05-25 14:33:12 +02:00
|
|
|
import re
|
2022-10-25 16:25:19 +02:00
|
|
|
import sys
|
|
|
|
from abc import abstractmethod, ABC
|
|
|
|
|
2022-11-29 16:01:01 +01:00
|
|
|
# from flask import url_for
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
|
|
|
##################################
|
|
|
|
# Import Project packages
|
|
|
|
##################################
|
|
|
|
from lib.objects.abstract_object import AbstractObject
|
|
|
|
from lib.ConfigLoader import ConfigLoader
|
|
|
|
from lib.item_basic import is_crawled, get_item_domain
|
2022-12-19 16:38:20 +01:00
|
|
|
from lib.data_retention_engine import update_obj_date
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
from packages import Date
|
|
|
|
|
|
|
|
# LOAD CONFIG
|
|
|
|
config_loader = ConfigLoader()
|
|
|
|
r_object = config_loader.get_db_conn("Kvrocks_Objects")
|
|
|
|
config_loader = None
|
|
|
|
|
|
|
|
class AbstractDaterangeObject(AbstractObject, ABC):
|
|
|
|
"""
|
|
|
|
Abstract Subtype Object
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, obj_type, id):
|
|
|
|
""" Abstract for all the AIL object
|
|
|
|
|
|
|
|
:param obj_type: object type (item, ...)
|
|
|
|
:param id: Object ID
|
|
|
|
"""
|
|
|
|
super().__init__(obj_type, id)
|
|
|
|
|
|
|
|
def exists(self):
|
2022-12-19 16:38:20 +01:00
|
|
|
return r_object.exists(f'meta:{self.type}:{self.id}')
|
2022-10-25 16:25:19 +02:00
|
|
|
|
2023-08-18 11:05:21 +02:00
|
|
|
def _get_field(self, field): # TODO remove me (NEW in abstract)
|
2023-05-25 14:33:12 +02:00
|
|
|
return r_object.hget(f'meta:{self.type}:{self.id}', field)
|
|
|
|
|
2023-08-18 11:05:21 +02:00
|
|
|
def _set_field(self, field, value): # TODO remove me (NEW in abstract)
|
2023-05-25 14:33:12 +02:00
|
|
|
return r_object.hset(f'meta:{self.type}:{self.id}', field, value)
|
|
|
|
|
2022-10-25 16:25:19 +02:00
|
|
|
def get_first_seen(self, r_int=False):
|
2023-05-25 14:33:12 +02:00
|
|
|
first_seen = self._get_field('first_seen')
|
2022-10-25 16:25:19 +02:00
|
|
|
if r_int:
|
|
|
|
if first_seen:
|
|
|
|
return int(first_seen)
|
|
|
|
else:
|
|
|
|
return 99999999
|
|
|
|
else:
|
|
|
|
return first_seen
|
|
|
|
|
|
|
|
def get_last_seen(self, r_int=False):
|
2023-05-25 14:33:12 +02:00
|
|
|
last_seen = self._get_field('last_seen')
|
2022-10-25 16:25:19 +02:00
|
|
|
if r_int:
|
|
|
|
if last_seen:
|
|
|
|
return int(last_seen)
|
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return last_seen
|
|
|
|
|
|
|
|
def get_nb_seen(self):
|
2023-01-09 16:03:06 +01:00
|
|
|
return self.get_nb_correlation('item')
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
def get_nb_seen_by_date(self, date):
|
2022-12-21 14:20:13 +01:00
|
|
|
nb = r_object.zscore(f'{self.type}:date:{date}', self.id)
|
2022-10-25 16:25:19 +02:00
|
|
|
if nb is None:
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(nb)
|
|
|
|
|
|
|
|
def _get_meta(self, options=[]):
|
2023-06-26 16:28:31 +02:00
|
|
|
meta_dict = self.get_default_meta()
|
|
|
|
meta_dict['first_seen'] = self.get_first_seen()
|
|
|
|
meta_dict['last_seen'] = self.get_last_seen()
|
|
|
|
meta_dict['nb_seen'] = self.get_nb_seen()
|
2022-10-25 16:25:19 +02:00
|
|
|
if 'sparkline' in options:
|
|
|
|
meta_dict['sparkline'] = self.get_sparkline()
|
|
|
|
return meta_dict
|
|
|
|
|
|
|
|
def set_first_seen(self, first_seen):
|
2023-05-25 14:33:12 +02:00
|
|
|
self._set_field('first_seen', first_seen)
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
def set_last_seen(self, last_seen):
|
2023-05-25 14:33:12 +02:00
|
|
|
self._set_field('last_seen', last_seen)
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
def update_daterange(self, date):
|
|
|
|
date = int(date)
|
|
|
|
# obj don't exit
|
|
|
|
if not self.exists():
|
|
|
|
self.set_first_seen(date)
|
|
|
|
self.set_last_seen(date)
|
|
|
|
else:
|
|
|
|
first_seen = self.get_first_seen(r_int=True)
|
|
|
|
last_seen = self.get_last_seen(r_int=True)
|
|
|
|
if date < first_seen:
|
|
|
|
self.set_first_seen(date)
|
|
|
|
if date > last_seen:
|
|
|
|
self.set_last_seen(date)
|
|
|
|
|
|
|
|
def get_sparkline(self):
|
|
|
|
sparkline = []
|
|
|
|
for date in Date.get_previous_date_list(6):
|
|
|
|
sparkline.append(self.get_nb_seen_by_date(date))
|
|
|
|
return sparkline
|
|
|
|
|
2023-05-12 15:43:41 +02:00
|
|
|
def get_content(self, r_type='str'):
|
|
|
|
if r_type == 'str':
|
|
|
|
return self.id
|
|
|
|
elif r_type == 'bytes':
|
|
|
|
return self.id.encode()
|
2023-05-04 16:35:56 +02:00
|
|
|
|
2023-01-09 16:03:06 +01:00
|
|
|
def _add_create(self):
|
|
|
|
r_object.sadd(f'{self.type}:all', self.id)
|
|
|
|
|
2023-10-12 13:53:00 +02:00
|
|
|
def _add(self, date, obj):
|
2022-10-25 16:25:19 +02:00
|
|
|
if not self.exists():
|
2023-04-05 09:51:42 +02:00
|
|
|
self._add_create()
|
2022-10-25 16:25:19 +02:00
|
|
|
self.set_first_seen(date)
|
|
|
|
self.set_last_seen(date)
|
|
|
|
else:
|
|
|
|
self.update_daterange(date)
|
2022-12-19 16:38:20 +01:00
|
|
|
update_obj_date(date, self.type)
|
2022-10-25 16:25:19 +02:00
|
|
|
|
2023-10-12 13:53:00 +02:00
|
|
|
if obj:
|
2023-06-19 08:55:56 +02:00
|
|
|
# Correlations
|
2023-10-12 13:53:00 +02:00
|
|
|
self.add_correlation(obj.type, obj.get_subtype(r_str=True), obj.get_id())
|
2023-06-19 08:55:56 +02:00
|
|
|
|
2023-10-12 13:53:00 +02:00
|
|
|
# Stats NB by day: # TODO Don't increase on reprocess
|
2023-06-16 15:39:13 +02:00
|
|
|
r_object.zincrby(f'{self.type}:date:{date}', 1, self.id)
|
2023-10-12 13:53:00 +02:00
|
|
|
|
|
|
|
if obj.type == 'item':
|
|
|
|
item_id = obj.get_id()
|
|
|
|
# domain
|
|
|
|
if is_crawled(item_id):
|
|
|
|
domain = get_item_domain(item_id)
|
|
|
|
self.add_correlation('domain', '', domain)
|
|
|
|
|
|
|
|
def add(self, date, obj):
|
|
|
|
self._add(date, obj)
|
2022-10-25 16:25:19 +02:00
|
|
|
|
|
|
|
# TODO:ADD objects + Stats
|
2023-05-25 14:33:12 +02:00
|
|
|
def _create(self, first_seen=None, last_seen=None):
|
|
|
|
if first_seen:
|
|
|
|
self.set_first_seen(first_seen)
|
|
|
|
if last_seen:
|
|
|
|
self.set_last_seen(last_seen)
|
2022-10-25 16:25:19 +02:00
|
|
|
r_object.sadd(f'{self.type}:all', self.id)
|
|
|
|
|
|
|
|
# TODO
|
|
|
|
def _delete(self):
|
|
|
|
pass
|
2023-05-25 14:33:12 +02:00
|
|
|
|
|
|
|
|
|
|
|
class AbstractDaterangeObjects(ABC):
|
|
|
|
"""
|
|
|
|
Abstract Daterange Objects
|
|
|
|
"""
|
|
|
|
|
2023-06-13 15:29:11 +02:00
|
|
|
def __init__(self, obj_type, obj_class):
|
2023-05-25 14:33:12 +02:00
|
|
|
""" Abstract for Daterange Objects
|
|
|
|
|
|
|
|
:param obj_type: object type (item, ...)
|
2023-06-13 15:29:11 +02:00
|
|
|
:param obj_class: object python class (Item, ...)
|
2023-05-25 14:33:12 +02:00
|
|
|
"""
|
|
|
|
self.type = obj_type
|
2023-06-13 15:29:11 +02:00
|
|
|
self.obj_class = obj_class
|
2023-05-25 14:33:12 +02:00
|
|
|
|
2023-06-13 15:29:11 +02:00
|
|
|
def get_ids(self):
|
2023-05-25 14:33:12 +02:00
|
|
|
return r_object.smembers(f'{self.type}:all')
|
|
|
|
|
2023-06-13 15:29:11 +02:00
|
|
|
# def get_ids_iterator(self):
|
|
|
|
# return r_object.sscan_iter(r_object, f'{self.type}:all')
|
|
|
|
|
2023-05-25 14:33:12 +02:00
|
|
|
def get_by_date(self, date):
|
|
|
|
return r_object.zrange(f'{self.type}:date:{date}', 0, -1)
|
|
|
|
|
|
|
|
def get_nb_by_date(self, date):
|
|
|
|
return r_object.zcard(f'{self.type}:date:{date}')
|
|
|
|
|
|
|
|
def get_by_daterange(self, date_from, date_to):
|
|
|
|
obj_ids = set()
|
|
|
|
for date in Date.substract_date(date_from, date_to):
|
|
|
|
obj_ids = obj_ids | set(self.get_by_date(date))
|
|
|
|
return obj_ids
|
|
|
|
|
|
|
|
def get_metas(self, obj_ids, options=set()):
|
|
|
|
dict_obj = {}
|
|
|
|
for obj_id in obj_ids:
|
2023-06-13 15:29:11 +02:00
|
|
|
obj = self.obj_class(obj_id)
|
2023-05-25 14:33:12 +02:00
|
|
|
dict_obj[obj_id] = obj.get_meta(options=options)
|
|
|
|
return dict_obj
|
|
|
|
|
|
|
|
@abstractmethod
|
2023-06-13 15:29:11 +02:00
|
|
|
def sanitize_id_to_search(self, id_to_search):
|
|
|
|
return id_to_search
|
2023-05-25 14:33:12 +02:00
|
|
|
|
2023-06-13 15:29:11 +02:00
|
|
|
def search_by_id(self, name_to_search, r_pos=False, case_sensitive=True):
|
2023-05-25 14:33:12 +02:00
|
|
|
objs = {}
|
2023-06-13 15:29:11 +02:00
|
|
|
if case_sensitive:
|
|
|
|
flags = 0
|
|
|
|
else:
|
|
|
|
flags = re.IGNORECASE
|
2023-05-25 14:33:12 +02:00
|
|
|
# for subtype in subtypes:
|
2023-06-13 15:29:11 +02:00
|
|
|
r_name = self.sanitize_id_to_search(name_to_search)
|
2023-05-25 14:33:12 +02:00
|
|
|
if not name_to_search or isinstance(r_name, dict):
|
|
|
|
return objs
|
2023-06-13 15:29:11 +02:00
|
|
|
r_name = re.compile(r_name, flags=flags)
|
|
|
|
for obj_id in self.get_ids(): # TODO REPLACE ME WITH AN ITERATOR
|
|
|
|
res = re.search(r_name, obj_id)
|
2023-05-25 14:33:12 +02:00
|
|
|
if res:
|
2023-06-13 15:29:11 +02:00
|
|
|
objs[obj_id] = {}
|
2023-05-25 14:33:12 +02:00
|
|
|
if r_pos:
|
2023-06-13 15:29:11 +02:00
|
|
|
objs[obj_id]['hl-start'] = res.start()
|
|
|
|
objs[obj_id]['hl-end'] = res.end()
|
|
|
|
return objs
|
|
|
|
|
|
|
|
def sanitize_content_to_search(self, content_to_search):
|
|
|
|
return content_to_search
|
|
|
|
|
|
|
|
def search_by_content(self, content_to_search, r_pos=False, case_sensitive=True):
|
|
|
|
objs = {}
|
|
|
|
if case_sensitive:
|
|
|
|
flags = 0
|
|
|
|
else:
|
|
|
|
flags = re.IGNORECASE
|
|
|
|
# for subtype in subtypes:
|
|
|
|
r_search = self.sanitize_content_to_search(content_to_search)
|
|
|
|
if not r_search or isinstance(r_search, dict):
|
|
|
|
return objs
|
|
|
|
r_search = re.compile(r_search, flags=flags)
|
|
|
|
for obj_id in self.get_ids(): # TODO REPLACE ME WITH AN ITERATOR
|
|
|
|
obj = self.obj_class(obj_id)
|
|
|
|
content = obj.get_content()
|
|
|
|
res = re.search(r_search, content)
|
|
|
|
if res:
|
|
|
|
objs[obj_id] = {}
|
|
|
|
if r_pos: # TODO ADD CONTENT ????
|
|
|
|
objs[obj_id]['hl-start'] = res.start()
|
|
|
|
objs[obj_id]['hl-end'] = res.end()
|
|
|
|
objs[obj_id]['content'] = content
|
2023-05-25 14:33:12 +02:00
|
|
|
return objs
|
|
|
|
|
|
|
|
def api_get_chart_nb_by_daterange(self, date_from, date_to):
|
|
|
|
date_type = []
|
|
|
|
for date in Date.substract_date(date_from, date_to):
|
|
|
|
d = {'date': f'{date[0:4]}-{date[4:6]}-{date[6:8]}',
|
|
|
|
self.type: self.get_nb_by_date(date)}
|
|
|
|
date_type.append(d)
|
|
|
|
return date_type
|
|
|
|
|
|
|
|
def api_get_meta_by_daterange(self, date_from, date_to):
|
|
|
|
date = Date.sanitise_date_range(date_from, date_to)
|
2023-06-13 15:29:11 +02:00
|
|
|
return self.get_metas(self.get_by_daterange(date['date_from'], date['date_to']), options={'sparkline'})
|