2021-04-02 09:52:05 +02:00
|
|
|
# coding: utf-8
|
|
|
|
"""
|
|
|
|
Base Class for AIL Modules
|
|
|
|
"""
|
|
|
|
|
|
|
|
##################################
|
|
|
|
# Import External packages
|
|
|
|
##################################
|
|
|
|
from abc import ABC, abstractmethod
|
2023-04-13 14:25:02 +02:00
|
|
|
import os
|
2023-05-12 15:29:53 +02:00
|
|
|
import logging
|
|
|
|
import logging.config
|
2023-04-13 14:25:02 +02:00
|
|
|
import sys
|
2021-04-02 09:52:05 +02:00
|
|
|
import time
|
2021-06-07 15:08:39 +02:00
|
|
|
import traceback
|
2021-04-02 09:52:05 +02:00
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
2021-04-02 09:52:05 +02:00
|
|
|
##################################
|
|
|
|
# Import Project packages
|
|
|
|
##################################
|
|
|
|
from pubsublogger import publisher
|
2023-05-12 15:29:53 +02:00
|
|
|
from lib import ail_logger
|
2023-04-13 14:25:02 +02:00
|
|
|
from lib.ail_queues import AILQueue
|
2022-01-19 16:20:18 +01:00
|
|
|
from lib import regex_helper
|
2023-04-13 14:25:02 +02:00
|
|
|
from lib.exceptions import ModuleQueueError
|
2023-06-22 15:38:04 +02:00
|
|
|
from lib.objects.ail_objects import get_obj_from_global_id
|
2021-04-02 09:52:05 +02:00
|
|
|
|
2023-05-12 15:29:53 +02:00
|
|
|
logging.config.dictConfig(ail_logger.get_config(name='modules'))
|
|
|
|
|
2021-04-02 09:52:05 +02:00
|
|
|
class AbstractModule(ABC):
|
|
|
|
"""
|
|
|
|
Abstract Module class
|
|
|
|
"""
|
|
|
|
|
2023-05-12 15:29:53 +02:00
|
|
|
def __init__(self, module_name=None, queue=True):
|
2021-04-02 09:52:05 +02:00
|
|
|
"""
|
2023-06-02 11:03:32 +02:00
|
|
|
AIL Module,
|
2021-04-02 09:52:05 +02:00
|
|
|
module_name: str; set the module name if different from the instance ClassName
|
2023-06-02 11:03:32 +02:00
|
|
|
:param queue: Allow to push messages to other modules
|
2021-04-02 09:52:05 +02:00
|
|
|
"""
|
2023-05-12 15:29:53 +02:00
|
|
|
self.logger = logging.getLogger(f'{self.__class__.__name__}')
|
|
|
|
|
2021-04-02 09:52:05 +02:00
|
|
|
# Module name if provided else instance className
|
|
|
|
self.module_name = module_name if module_name else self._module_name()
|
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
self.pid = os.getpid()
|
|
|
|
|
|
|
|
# Setup the I/O queues
|
|
|
|
if queue:
|
|
|
|
self.queue = AILQueue(self.module_name, self.pid)
|
2023-06-22 15:38:04 +02:00
|
|
|
self.obj = None
|
|
|
|
self.sha256_mess = None
|
2021-04-02 09:52:05 +02:00
|
|
|
|
|
|
|
# Init Redis Logger
|
|
|
|
self.redis_logger = publisher
|
|
|
|
# Port of the redis instance used by pubsublogger
|
|
|
|
self.redis_logger.port = 6380
|
|
|
|
# Channel name to publish logs
|
2021-05-14 14:42:16 +02:00
|
|
|
# # TODO: refactor logging
|
2021-04-28 15:24:33 +02:00
|
|
|
# If provided could be a namespaced channel like script:<ModuleName>
|
2023-05-12 15:29:53 +02:00
|
|
|
self.redis_logger.channel = 'Script'
|
2021-05-14 14:50:42 +02:00
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
# Cache key
|
2022-12-19 16:38:20 +01:00
|
|
|
self.r_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
|
2022-06-03 15:30:48 +02:00
|
|
|
self.max_execution_time = 30
|
2021-04-02 09:52:05 +02:00
|
|
|
|
|
|
|
# Run module endlessly
|
|
|
|
self.proceed = True
|
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
# Waiting time in seconds between two processed messages
|
2021-04-02 09:52:05 +02:00
|
|
|
self.pending_seconds = 10
|
|
|
|
|
2022-10-25 16:25:19 +02:00
|
|
|
# Debug Mode
|
|
|
|
self.debug = False
|
|
|
|
|
2023-06-22 15:38:04 +02:00
|
|
|
def get_obj(self):
|
|
|
|
return self.obj
|
|
|
|
|
2024-02-27 10:15:40 +01:00
|
|
|
def set_obj(self, new_obj):
|
|
|
|
if self.obj:
|
|
|
|
old_id = self.obj.id
|
|
|
|
self.obj = new_obj
|
|
|
|
self.queue.rename_message_obj(self.obj.id, old_id)
|
|
|
|
else:
|
|
|
|
self.obj = new_obj
|
|
|
|
|
2021-05-14 14:42:16 +02:00
|
|
|
def get_message(self):
|
|
|
|
"""
|
|
|
|
Get message from the Redis Queue (QueueIn)
|
|
|
|
Input message can change between modules
|
|
|
|
ex: '<item id>'
|
|
|
|
"""
|
2023-06-22 15:38:04 +02:00
|
|
|
message = self.queue.get_message()
|
|
|
|
if message:
|
|
|
|
obj_global_id, sha256_mess, mess = message
|
|
|
|
if obj_global_id:
|
|
|
|
self.sha256_mess = sha256_mess
|
|
|
|
self.obj = get_obj_from_global_id(obj_global_id)
|
|
|
|
else:
|
|
|
|
self.sha256_mess = None
|
|
|
|
self.obj = None
|
|
|
|
return mess
|
|
|
|
self.sha256_mess = None
|
|
|
|
self.obj = None
|
|
|
|
return None
|
2021-05-14 14:42:16 +02:00
|
|
|
|
2023-10-11 12:06:01 +02:00
|
|
|
# TODO ADD META OBJ ????
|
|
|
|
def add_message_to_queue(self, obj=None, message='', queue=None):
|
2021-05-14 14:42:16 +02:00
|
|
|
"""
|
2023-04-13 14:25:02 +02:00
|
|
|
Add message to queue
|
2023-06-22 15:38:04 +02:00
|
|
|
:param obj: AILObject
|
2021-05-14 14:42:16 +02:00
|
|
|
:param message: message to send in queue
|
2023-06-22 15:38:04 +02:00
|
|
|
:param queue: queue name or module name
|
2021-05-14 14:42:16 +02:00
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
ex: add_message_to_queue(item_id, 'Mail')
|
2021-05-14 14:42:16 +02:00
|
|
|
"""
|
2023-06-22 15:38:04 +02:00
|
|
|
if obj:
|
|
|
|
obj_global_id = obj.get_global_id()
|
|
|
|
elif self.obj:
|
|
|
|
obj_global_id = self.obj.get_global_id()
|
|
|
|
else:
|
|
|
|
obj_global_id = '::'
|
|
|
|
self.queue.send_message(obj_global_id, message, queue)
|
2021-04-02 09:52:05 +02:00
|
|
|
|
2023-04-13 15:59:44 +02:00
|
|
|
def get_available_queues(self):
|
|
|
|
return self.queue.get_out_queues()
|
|
|
|
|
2023-08-21 15:49:32 +02:00
|
|
|
def regex_match(self, regex, obj_id, content):
|
|
|
|
return regex_helper.regex_match(self.r_cache_key, regex, obj_id, content, max_time=self.max_execution_time)
|
|
|
|
|
2022-12-19 16:38:20 +01:00
|
|
|
def regex_search(self, regex, obj_id, content):
|
|
|
|
return regex_helper.regex_search(self.r_cache_key, regex, obj_id, content, max_time=self.max_execution_time)
|
|
|
|
|
|
|
|
def regex_finditer(self, regex, obj_id, content):
|
|
|
|
return regex_helper.regex_finditer(self.r_cache_key, regex, obj_id, content, max_time=self.max_execution_time)
|
|
|
|
|
2023-04-13 14:25:02 +02:00
|
|
|
def regex_findall(self, regex, obj_id, content, r_set=False):
|
2022-01-19 16:20:18 +01:00
|
|
|
"""
|
|
|
|
regex findall helper (force timeout)
|
|
|
|
:param regex: compiled regex
|
2023-04-13 14:25:02 +02:00
|
|
|
:param obj_id: object id
|
2022-01-19 16:20:18 +01:00
|
|
|
:param content: object content
|
2023-04-13 14:25:02 +02:00
|
|
|
:param r_set: return result as set
|
2022-01-19 16:20:18 +01:00
|
|
|
"""
|
2023-04-13 14:25:02 +02:00
|
|
|
return regex_helper.regex_findall(self.module_name, self.r_cache_key, regex, obj_id, content,
|
|
|
|
max_time=self.max_execution_time, r_set=r_set)
|
2022-01-19 16:20:18 +01:00
|
|
|
|
2023-05-24 10:48:29 +02:00
|
|
|
def regex_phone_iter(self, country_code, obj_id, content):
|
|
|
|
"""
|
|
|
|
regex findall helper (force timeout)
|
|
|
|
:param regex: compiled regex
|
|
|
|
:param obj_id: object id
|
|
|
|
:param content: object content
|
|
|
|
:param r_set: return result as set
|
|
|
|
"""
|
|
|
|
return regex_helper.regex_phone_iter(self.r_cache_key, country_code, obj_id, content,
|
|
|
|
max_time=self.max_execution_time)
|
|
|
|
|
2021-04-02 09:52:05 +02:00
|
|
|
def run(self):
|
|
|
|
"""
|
|
|
|
Run Module endless process
|
|
|
|
"""
|
2021-04-02 16:41:11 +02:00
|
|
|
|
2021-04-02 09:52:05 +02:00
|
|
|
# Endless loop processing messages from the input queue
|
|
|
|
while self.proceed:
|
2021-05-14 14:42:16 +02:00
|
|
|
# Get one message (ex:item id) from the Redis Queue (QueueIn)
|
|
|
|
message = self.get_message()
|
2021-04-02 16:41:11 +02:00
|
|
|
|
2023-06-22 15:38:04 +02:00
|
|
|
if message or self.obj:
|
2021-04-28 15:24:33 +02:00
|
|
|
try:
|
|
|
|
# Module processing with the message from the queue
|
|
|
|
self.compute(message)
|
|
|
|
except Exception as err:
|
2022-10-25 16:25:19 +02:00
|
|
|
if self.debug:
|
2023-04-13 14:25:02 +02:00
|
|
|
self.queue.error()
|
2022-10-25 16:25:19 +02:00
|
|
|
raise err
|
2023-04-13 14:25:02 +02:00
|
|
|
|
|
|
|
# LOG ERROR
|
2021-06-07 15:08:39 +02:00
|
|
|
trace = traceback.format_tb(err.__traceback__)
|
2022-05-20 14:45:46 +02:00
|
|
|
trace = ''.join(trace)
|
2023-05-12 15:29:53 +02:00
|
|
|
self.logger.critical(f"Error in module {self.module_name}: {__name__} : {err}")
|
2024-03-13 10:11:03 +01:00
|
|
|
if message:
|
|
|
|
self.logger.critical(f"Module {self.module_name} input message: {message}")
|
|
|
|
if self.obj:
|
|
|
|
self.logger.critical(f"{self.module_name} Obj: {self.obj.get_global_id()}")
|
2023-05-12 15:29:53 +02:00
|
|
|
self.logger.critical(trace)
|
2023-04-13 14:25:02 +02:00
|
|
|
|
|
|
|
if isinstance(err, ModuleQueueError):
|
|
|
|
self.queue.error()
|
|
|
|
raise err
|
2021-10-29 18:48:12 +02:00
|
|
|
# remove from set_module
|
|
|
|
## check if item process == completed
|
2021-06-07 15:08:39 +02:00
|
|
|
|
2023-06-22 15:38:04 +02:00
|
|
|
if self.obj:
|
|
|
|
self.queue.end_message(self.obj.get_global_id(), self.sha256_mess)
|
|
|
|
self.obj = None
|
|
|
|
self.sha256_mess = None
|
|
|
|
|
2021-04-28 15:24:33 +02:00
|
|
|
else:
|
2021-04-02 09:52:05 +02:00
|
|
|
self.computeNone()
|
|
|
|
# Wait before next process
|
2023-05-12 15:29:53 +02:00
|
|
|
self.logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s")
|
2021-04-02 09:52:05 +02:00
|
|
|
time.sleep(self.pending_seconds)
|
|
|
|
|
|
|
|
def _module_name(self):
|
|
|
|
"""
|
2023-04-13 14:25:02 +02:00
|
|
|
Returns the instance class name (ie the Module Name)
|
2021-04-02 09:52:05 +02:00
|
|
|
"""
|
|
|
|
return self.__class__.__name__
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def compute(self, message):
|
|
|
|
"""
|
|
|
|
Main method of the Module to implement
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-10-12 13:53:00 +02:00
|
|
|
def compute_manual(self, obj, message=None):
|
|
|
|
self.obj = obj
|
|
|
|
return self.compute(message)
|
|
|
|
|
2021-04-02 09:52:05 +02:00
|
|
|
def computeNone(self):
|
|
|
|
"""
|
|
|
|
Method of the Module when there is no message
|
|
|
|
"""
|
|
|
|
pass
|