2023-03-31 14:53:20 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*-coding:UTF-8 -*
|
|
|
|
"""
|
|
|
|
The Mixer Module
|
|
|
|
================
|
|
|
|
|
|
|
|
This module is consuming the Redis-list created by the ZMQ_Feed_Q Module.
|
|
|
|
|
|
|
|
This module take all the feeds provided in the config.
|
|
|
|
|
|
|
|
|
2023-10-11 12:06:01 +02:00
|
|
|
Depending on the configuration, this module will process the feed as follows:
|
2023-03-31 14:53:20 +02:00
|
|
|
operation_mode 1: "Avoid any duplicate from any sources"
|
|
|
|
- The module maintain a list of content for each item
|
|
|
|
- If the content is new, process it
|
|
|
|
- Else, do not process it but keep track for statistics on duplicate
|
|
|
|
|
|
|
|
DISABLED
|
|
|
|
operation_mode 2: "Keep duplicate coming from different sources"
|
|
|
|
- The module maintain a list of name given to the item by the feeder
|
|
|
|
- If the name has not yet been seen, process it
|
|
|
|
- Elseif, the saved content associated with the item is not the same, process it
|
|
|
|
- Else, do not process it but keep track for statistics on duplicate
|
|
|
|
|
|
|
|
operation_mode 3: "Don't look if duplicated content"
|
|
|
|
- SImply do not bother to check if it is a duplicate
|
|
|
|
- Simply do not bother to check if it is a duplicate
|
|
|
|
|
|
|
|
Note that the hash of the content is defined as the sha1(gzip64encoded).
|
|
|
|
|
|
|
|
"""
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
|
2024-11-19 15:58:40 +01:00
|
|
|
# import hashlib
|
|
|
|
|
2023-03-31 14:53:20 +02:00
|
|
|
sys.path.append(os.environ['AIL_BIN'])
|
|
|
|
##################################
|
|
|
|
# Import Project packages
|
|
|
|
##################################
|
|
|
|
from modules.abstract_module import AbstractModule
|
|
|
|
from lib.ConfigLoader import ConfigLoader
|
2024-11-19 15:58:40 +01:00
|
|
|
from lib import ail_stats
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Mixer(AbstractModule):
|
|
|
|
"""docstring for Mixer module."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super(Mixer, self).__init__()
|
|
|
|
|
|
|
|
config_loader = ConfigLoader()
|
|
|
|
self.r_cache = config_loader.get_redis_conn("Redis_Mixer_Cache")
|
|
|
|
|
2024-11-19 15:58:40 +01:00
|
|
|
self.pending_seconds = 1
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
self.refresh_time = 30
|
2024-11-19 15:58:40 +01:00
|
|
|
timestamp = int(time.time())
|
|
|
|
self.last_refresh = int(timestamp - (timestamp % 30))
|
|
|
|
if timestamp > self.last_refresh:
|
|
|
|
self.last_refresh += 30
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
self.operation_mode = config_loader.get_config_int("Module_Mixer", "operation_mode")
|
|
|
|
print(f'Operation mode {self.operation_mode}')
|
|
|
|
|
|
|
|
self.ttl_key = config_loader.get_config_int("Module_Mixer", "ttl_duplicate")
|
|
|
|
self.default_feeder_name = config_loader.get_config_str("Module_Mixer", "default_unnamed_feed_name")
|
|
|
|
|
|
|
|
self.feeders_processed = {}
|
|
|
|
|
2023-05-12 15:29:53 +02:00
|
|
|
self.logger.info(f"Module: {self.module_name} Launched")
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
def increase_stat_processed(self, feeder):
|
|
|
|
try:
|
|
|
|
self.feeders_processed[feeder] += 1
|
|
|
|
except KeyError:
|
|
|
|
self.feeders_processed[feeder] = 1
|
|
|
|
|
|
|
|
def refresh_stats(self):
|
2024-11-19 15:58:40 +01:00
|
|
|
timestamp = int(time.time())
|
|
|
|
if timestamp >= self.last_refresh:
|
|
|
|
timestamp = timestamp - timestamp % self.refresh_time
|
|
|
|
print('update', timestamp)
|
|
|
|
print(self.feeders_processed)
|
|
|
|
ail_stats.add_feeders(timestamp, self.feeders_processed)
|
|
|
|
self.feeders_processed = {}
|
|
|
|
self.last_refresh = self.last_refresh + 30
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
def computeNone(self):
|
|
|
|
self.refresh_stats()
|
|
|
|
|
|
|
|
def compute(self, message):
|
|
|
|
self.refresh_stats()
|
2023-10-11 12:06:01 +02:00
|
|
|
# obj = self.obj
|
|
|
|
# TODO CHECK IF NOT self.object -> get object global ID from message
|
|
|
|
|
2023-03-31 14:53:20 +02:00
|
|
|
splitted = message.split()
|
2023-10-11 12:06:01 +02:00
|
|
|
# message -> feeder_name - content
|
|
|
|
# or message -> feeder_name
|
2023-10-05 16:24:28 +02:00
|
|
|
|
|
|
|
# feeder_name - object
|
2023-10-11 12:06:01 +02:00
|
|
|
if len(splitted) == 1: # feeder_name - object (content already saved)
|
|
|
|
feeder_name = message
|
|
|
|
gzip64encoded = None
|
2023-10-05 16:24:28 +02:00
|
|
|
|
|
|
|
# Feeder name in message: "feeder obj_id gzip64encoded"
|
2023-10-11 12:06:01 +02:00
|
|
|
elif len(splitted) == 2: # gzip64encoded content
|
|
|
|
feeder_name, gzip64encoded = splitted
|
2023-03-31 14:53:20 +02:00
|
|
|
else:
|
2023-10-11 12:06:01 +02:00
|
|
|
self.logger.warning(f'Invalid Message: {splitted} not processed')
|
2023-03-31 14:53:20 +02:00
|
|
|
return None
|
|
|
|
|
2023-10-11 12:06:01 +02:00
|
|
|
if self.obj.type == 'item':
|
|
|
|
# Remove ITEMS_FOLDER from item path (crawled item + submitted)
|
|
|
|
# Limit basename length
|
|
|
|
obj_id = self.obj.id
|
|
|
|
self.obj.sanitize_id()
|
|
|
|
if self.obj.id != obj_id:
|
|
|
|
self.queue.rename_message_obj(self.obj.id, obj_id)
|
|
|
|
|
2023-03-31 14:53:20 +02:00
|
|
|
|
2024-11-19 15:58:40 +01:00
|
|
|
# # TODO only work for item object
|
|
|
|
# # Avoid any duplicate coming from any sources
|
|
|
|
# if self.operation_mode == 1:
|
|
|
|
# digest = hashlib.sha1(gzip64encoded.encode('utf8')).hexdigest()
|
|
|
|
# if self.r_cache.exists(digest): # Content already exists
|
|
|
|
# # STATS
|
|
|
|
# self.increase_stat_duplicate(feeder_name)
|
|
|
|
# else: # New content
|
|
|
|
# self.r_cache.sadd(digest, feeder_name)
|
|
|
|
# self.r_cache.expire(digest, self.ttl_key)
|
|
|
|
#
|
|
|
|
# self.increase_stat_processed(feeder_name)
|
|
|
|
# self.add_message_to_queue(message=relay_message)
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
# Need To Be Fixed, Currently doesn't check the source (-> same as operation 1)
|
|
|
|
# # Keep duplicate coming from different sources
|
|
|
|
# elif self.operation_mode == 2:
|
|
|
|
# digest = hashlib.sha1(gzip64encoded.encode('utf8')).hexdigest()
|
|
|
|
# # Filter to avoid duplicate
|
|
|
|
# older_digest = self.r_cache.get(f'HASH_{item_id}')
|
|
|
|
# if older_digest is None:
|
|
|
|
# # New content
|
|
|
|
# # Store in redis for filtering
|
|
|
|
# self.r_cache.set(f'HASH_{item_id}', digest)
|
|
|
|
# self.r_cache.sadd(item_id, feeder_name)
|
|
|
|
# self.r_cache.expire(item_id, self.ttl_key)
|
|
|
|
# self.r_cache.expire(f'HASH_{item_id}', self.ttl_key)
|
|
|
|
#
|
2023-04-13 14:25:02 +02:00
|
|
|
# self.add_message_to_queue(relay_message)
|
2023-03-31 14:53:20 +02:00
|
|
|
#
|
|
|
|
# else:
|
|
|
|
# if digest != older_digest:
|
|
|
|
# # Same item name but different content
|
|
|
|
# # STATS
|
|
|
|
# self.increase_stat_duplicate(feeder_name)
|
|
|
|
# self.r_cache.sadd(item_id, feeder_name)
|
|
|
|
# self.r_cache.expire(item_id, ttl_key)
|
|
|
|
#
|
2023-04-13 14:25:02 +02:00
|
|
|
# self.add_message_to_queue(relay_message)
|
2023-03-31 14:53:20 +02:00
|
|
|
#
|
|
|
|
# else:
|
|
|
|
# # Already processed
|
|
|
|
# # Keep track of processed items
|
|
|
|
# # STATS
|
|
|
|
# self.increase_stat_duplicate(feeder_name)
|
|
|
|
|
|
|
|
# No Filtering
|
2024-11-19 15:58:40 +01:00
|
|
|
# else:
|
|
|
|
|
|
|
|
self.increase_stat_processed(feeder_name)
|
|
|
|
self.add_message_to_queue(obj=self.obj, message=gzip64encoded)
|
2023-03-31 14:53:20 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
module = Mixer()
|
|
|
|
module.run()
|